gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.redis.internal.executor.hash; import static org.assertj.core.api.Assertions.assertThat; import java.util.EnumMap; import org.assertj.core.data.Offset; import org.junit.After; import org.junit.Before; import org.junit.Test; import redis.clients.jedis.Jedis; import org.apache.geode.redis.RedisIntegrationTest; import org.apache.geode.test.awaitility.GeodeAwaitility; /** * Contains tests that measure the used memory of redis or radish and assert that the memory * overhead is equal to an expected memory overhead. * * @see MemoryOverheadIntegrationTest * @see #measureAndCheckPerEntryOverhead(AddEntryFunction, Measurement) (IntToLongFunction, * Measurement) */ public abstract class AbstractMemoryOverheadIntegrationTest implements RedisIntegrationTest { private static final int WARM_UP_ENTRY_COUNT = 1000; private static final int TOTAL_ENTRY_COUNT = 5000; private static final int SAMPLE_INTERVAL = 100; private static final int REDIS_CLIENT_TIMEOUT = Math.toIntExact(GeodeAwaitility.getTimeout().toMillis()); /** * A value that force native redis to not use an optimized data structure. * Native redis uses an optimized data structure (a zip list) for very small hashes and sets. This * will allow us to force redis not to use a ziplist. */ public static final String LARGE_STRING = "value_that_will_force_redis_to_not_use_a_ziplist______________________________________________________________"; /** * Allowed variance in our measurements before the test fails. This allows us to be up to * 7 bytes off from the original measurement. */ public static final long ALLOWED_BYTE_DIFFERENCE = 7L; protected Jedis jedis; protected enum Measurement { STRING, HASH, HASH_ENTRY, SET, SET_ENTRY } @Before public void setUp() { jedis = new Jedis("localhost", getPort(), REDIS_CLIENT_TIMEOUT); } @After public void tearDown() { jedis.flushAll(); jedis.close(); } /** * Subclasses should use this to return the expected per entry overhead for each measurement. * * @return A map with the expected overhead for each measurement type. */ abstract EnumMap<Measurement, Integer> expectedPerEntryOverhead(); /** * Return the total used memory on the server. */ abstract long getUsedMemory(); /** * Measure the overhead for each redis string that is added to the server. */ @Test public void measureOverheadPerString() { // Function that adds a new redis string to the server final AddEntryFunction addStringFunction = uniqueString -> { String response = jedis.set(uniqueString, uniqueString); assertThat(response).isEqualTo("OK"); // Note - jedis convert strings to bytes with the UTF-8 charset // Since the strings above are all ASCII, the length == the number of bytes return uniqueString.length() + uniqueString.length(); }; measureAndCheckPerEntryOverhead(addStringFunction, Measurement.STRING); } /** * Measure the overhead for each redis hash that is added to the server. */ @Test public void measureOverheadPerHash() { // Function that adds a new redis hash to the server final AddEntryFunction addHashFunction = uniqueString -> { String mapKey = "key"; Long response = jedis.hset(uniqueString, mapKey, LARGE_STRING); assertThat(response).isEqualTo(1); return uniqueString.length() + mapKey.length() + LARGE_STRING.length(); }; measureAndCheckPerEntryOverhead(addHashFunction, Measurement.HASH); } /** * Measure the overhead for each entry that is added to a redis hash. This * uses a single hash and adds additional fields to the hash and measures the overhead * of the additional fields. */ @Test public void measureOverheadPerHashEntry() { // Function that adds an additional hash entry to a single redis hash final AddEntryFunction addHashEntryFunction = uniqueString -> { String valueString = String.format("%s value-%s", LARGE_STRING, uniqueString); Long response = jedis.hset("TestSet", uniqueString, valueString); assertThat(response).isEqualTo(1); return uniqueString.length() + valueString.length(); }; measureAndCheckPerEntryOverhead(addHashEntryFunction, Measurement.HASH_ENTRY); } /** * Measure the overhead for each redis set that is added to the server. */ @Test public void measureOverheadPerSet() { // Function that adds a new redis set to the server final AddEntryFunction addSetFunction = uniqueString -> { Long response = jedis.sadd(uniqueString, LARGE_STRING); assertThat(response).isEqualTo(1); return uniqueString.length() + LARGE_STRING.length(); }; measureAndCheckPerEntryOverhead(addSetFunction, Measurement.SET); } /** * Measure the overhead for each entry that is added to a redis set. This * uses a single sets and adds additional fields to the hash and measures the overhead * of the additional fields. */ @Test public void measureOverheadPerSetEntry() { // Function that adds a new entry to a single redis set final AddEntryFunction addSetEntryFunction = uniqueString -> { String valueString = String.format("%s value-%s", LARGE_STRING, uniqueString); Long response = jedis.sadd("TestSet", valueString); assertThat(response).isEqualTo(1); return valueString.length(); }; measureAndCheckPerEntryOverhead(addSetEntryFunction, Measurement.SET_ENTRY); } /** * Measures the per entry overhead of a particular operation. The passed in function * should add a single value to the server using the given uniqueString. This method * will call that function repeatedly and measure the memory use. * * This method measures the total memory use of the server (radish or redis) * before and after adding a certain number of entries, and computes the per entry overhead. * It asserts that the overhead matches the result of {@link #expectedPerEntryOverhead()} for * the given measurement * * @param addEntry A function that adds an entry to the server through some redis operation. The * function should return the amount of actual user data added. This user data * size will be subtracted out to compute the pure overhead of redis or radish * data structures. * @param measurement Indicates what data structure we are measuring. Used to look up the expected * memory usage. */ private void measureAndCheckPerEntryOverhead(AddEntryFunction addEntry, Measurement measurement) { long expectedOverhead = expectedPerEntryOverhead().get(measurement); // Put some entries to make sure we initialize any constant size data structures. We are // just trying to measure the cost of each add entry operation. for (int i = 0; i < WARM_UP_ENTRY_COUNT; i++) { String uniqueString = String.format("warmup-%10d", i); addEntry.addEntryAndReturnDataSize(uniqueString); } // Perform measurements long baseline = getUsedMemory(); long totalDataSize = 0; System.out.println("Measuring the per entry overhead for each " + measurement); System.out.printf("%20s, %20s, %20s", "Used Memory", "Total Mem Per Entry", "Overhead Per Entry\n"); long perEntryOverhead = 0; for (int i = 0; i < TOTAL_ENTRY_COUNT; i++) { String uniqueString = String.format("%10d", i); totalDataSize += addEntry.addEntryAndReturnDataSize(uniqueString); if (i % SAMPLE_INTERVAL == (SAMPLE_INTERVAL - 1)) { long currentMemory = getUsedMemory() - baseline; long perEntryMemory = currentMemory / i; perEntryOverhead = (currentMemory - totalDataSize) / i; System.out.printf("%20d, %20d, %20d\n", currentMemory, perEntryMemory, perEntryOverhead); } } // These assertions compare the computed per entry overhead against result of the // expectedPerEntryOverhead function. Please look at that function for the expected values. // We are allow these values to be off by 1 byte due to rounding issues assertThat(perEntryOverhead).withFailMessage( "The overhead per %s has increased from %s to %s. Please see if you can avoid introducing additional memory overhead.", measurement, expectedOverhead, perEntryOverhead) .isLessThanOrEqualTo(expectedOverhead + ALLOWED_BYTE_DIFFERENCE); assertThat(perEntryOverhead).withFailMessage( "The overhead per %s has decreased from %s to %s. Great job! Please update the expected value in this test.", measurement, expectedOverhead, perEntryOverhead) .isCloseTo(expectedOverhead, Offset.offset(ALLOWED_BYTE_DIFFERENCE)); } @FunctionalInterface public interface AddEntryFunction { /** * Add an entry to the server. * * @param uniqueString A unique string than can be used to generate a unique key * for the entry. * @return the size of the user data added by the operation */ long addEntryAndReturnDataSize(String uniqueString); } }
/* * Copyright 2001-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.collections15; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.commons.collections15.map.AbstractTestMultiMap; import org.apache.commons.collections15.multimap.MultiHashMap; import java.util.*; /** * Unit Tests for <code>MultiHashMap</code>. * * @author Matt Hall, John Watkinson, Unknown * @version $Revision: 1.1 $ $Date: 2005/10/11 17:05:39 $ */ public class TestMultiHashMap extends AbstractTestMultiMap { public TestMultiHashMap(String testName) { super(testName); } public static Test suite() { return new TestSuite(TestMultiHashMap.class); } public static void main(String args[]) { String[] testCaseName = {TestMultiHashMap.class.getName()}; junit.textui.TestRunner.main(testCaseName); } // MutltiHashMap was introduced in Collections 2.x public String getCompatibilityVersion() { return "2"; } public MultiMap makeEmptyMap() { return new MultiHashMap(); } //---------------------------- // Tests //---------------------------- public void testPutNGet() { MultiHashMap map = new MultiHashMap(); loadMap(map); checkMap(map); assertTrue(map.get(new Integer(99)) == null); map.clear(); assertTrue(map.size() == 0); } public void testContainsValue() { MultiHashMap map = new MultiHashMap(); loadMap(map); assertTrue(map.containsValue("uno")); assertTrue(map.containsValue("quatro")); assertTrue(map.containsValue("two")); assertTrue(!map.containsValue("uggaBugga")); map.clear(); } public void testValues() { MultiHashMap map = new MultiHashMap(); loadMap(map); Collection vals = map.values(); assertTrue(vals.size() == getFullSize()); map.clear(); } static private class MapPair { MapPair(int key, String val) { mKey = new Integer(key); mValue = val; } Integer mKey = null; String mValue = null; } static private MapPair[][] sMapPairs = {{new MapPair(0, "zero")}, {new MapPair(1, "one"), new MapPair(1, "ONE"), new MapPair(1, "uno")}, {new MapPair(2, "two"), new MapPair(2, "two")}, {new MapPair(3, "three"), new MapPair(3, "THREE"), new MapPair(3, "tres")}, {new MapPair(4, "four"), new MapPair(4, "quatro")}}; private void loadMap(MultiHashMap map) { // Set up so that we load the keys "randomly" // (i.e. we don't want to load int row-order, so that all like keys // load together. We want to mix it up...) int numRows = sMapPairs.length; int maxCols = 0; for (int ii = 0; ii < sMapPairs.length; ii++) { if (sMapPairs[ii].length > maxCols) { maxCols = sMapPairs[ii].length; } } for (int ii = 0; ii < maxCols; ii++) { for (int jj = 0; jj < numRows; jj++) { if (ii < sMapPairs[jj].length) { map.put(sMapPairs[jj][ii].mKey, sMapPairs[jj][ii].mValue); //--------------------------------------------------------- } } } assertTrue(map.size() == sMapPairs.length); } private void checkMap(MultiHashMap map) { for (int ii = 0; ii < sMapPairs.length; ii++) { checkKeyList(map, ii); } } private void checkKeyList(MultiHashMap map, int index) { assertTrue(index < sMapPairs.length); Integer key = sMapPairs[index][0].mKey; Object obj = map.get(key); //-------------------------- assertTrue(obj != null); assertTrue(obj instanceof Collection); Collection keyList = (Collection) obj; assertTrue(keyList.size() == sMapPairs[index].length); Iterator iter = keyList.iterator(); while (iter.hasNext()) { Object oval = iter.next(); assertTrue(oval != null); assertTrue(oval instanceof String); String val = (String) oval; boolean foundIt = false; for (int ii = 0; ii < sMapPairs[index].length; ii++) { if (val.equals(sMapPairs[index][ii].mValue)) { foundIt = true; } } assertTrue(foundIt); } } public int getFullSize() { int len = 0; for (int ii = 0; ii < sMapPairs.length; ii++) { len += sMapPairs[ii].length; } return len; } public void testEntrySetIterator() { } public void testEntrySetContainsProperMappings() { } public void testEntrySetIteratorHasProperMappings() { // override and ignore test -- it will fail when verifying the iterator for // the set contains the right value -- we're not returning the value, we're // returning a collection. // TODO: re-implement this test to ensure the values of the iterator match // the proper collection rather than the value the superclass is checking // for. return; } // Next methods are overriden because MultiHashMap values are always a // collection, and deviate from the Map contract because of this. // TODO: implement the tests to ensure that Map.get(Object) returns the // appropriate collection of values public void testMapGet() { } public void testMapPut() { } public void testMapPutAll() { } public void testMapRemove() { } public void testMapEquals() { MultiHashMap one = new MultiHashMap(); Integer value = new Integer(1); one.put("One", value); one.remove("One", value); MultiHashMap two = new MultiHashMap(); assertEquals(two, one); } public void testMapHashCode() { } // The verification for the map and its entry set must also be overridden // because the values are not going to be the same as the values in the // confirmed map (they're going to be collections15 of values instead). public void verifyMap() { // TODO: implement test to ensure that map is the same as confirmed if // its values were converted into collections15. } public void verifyEntrySet() { // TODO: implement test to ensure that each entry is the same as one in // the confirmed map, but with the value wrapped in a collection. } // The verification method must be overridden because MultiHashMap's // values() is not properly backed by the map (Bug 9573). public void verifyValues() { // update the values view to the latest version, then proceed to verify // as usual. values = map.values(); super.verifyValues(); } //----------------------------------------------------------------------- public void testGetCollection() { MultiHashMap map = new MultiHashMap(); map.put("A", "AA"); assertSame(map.get("A"), map.getCollection("A")); } public void testTotalSize() { MultiHashMap map = new MultiHashMap(); assertEquals(0, map.totalSize()); map.put("A", "AA"); assertEquals(1, map.totalSize()); map.put("B", "BA"); assertEquals(2, map.totalSize()); map.put("B", "BB"); assertEquals(3, map.totalSize()); map.put("B", "BC"); assertEquals(4, map.totalSize()); map.remove("A"); assertEquals(3, map.totalSize()); map.remove("B", "BC"); assertEquals(2, map.totalSize()); } public void testSize_Key() { MultiHashMap map = new MultiHashMap(); assertEquals(0, map.size("A")); assertEquals(0, map.size("B")); map.put("A", "AA"); assertEquals(1, map.size("A")); assertEquals(0, map.size("B")); map.put("B", "BA"); assertEquals(1, map.size("A")); assertEquals(1, map.size("B")); map.put("B", "BB"); assertEquals(1, map.size("A")); assertEquals(2, map.size("B")); map.put("B", "BC"); assertEquals(1, map.size("A")); assertEquals(3, map.size("B")); map.remove("A"); assertEquals(0, map.size("A")); assertEquals(3, map.size("B")); map.remove("B", "BC"); assertEquals(0, map.size("A")); assertEquals(2, map.size("B")); } public void testIterator_Key() { MultiHashMap map = new MultiHashMap(); assertEquals(false, map.iterator("A").hasNext()); map.put("A", "AA"); Iterator it = map.iterator("A"); assertEquals(true, it.hasNext()); it.next(); assertEquals(false, it.hasNext()); } public void testContainsValue_Key() { MultiHashMap map = new MultiHashMap(); assertEquals(false, map.containsValue("A", "AA")); assertEquals(false, map.containsValue("B", "BB")); map.put("A", "AA"); assertEquals(true, map.containsValue("A", "AA")); assertEquals(false, map.containsValue("A", "AB")); } public void testPutAll_KeyCollection() { MultiHashMap map = new MultiHashMap(); Collection coll = Arrays.asList(new Object[]{"X", "Y", "Z"}); assertEquals(true, map.putAll("A", coll)); assertEquals(3, map.size("A")); assertEquals(true, map.containsValue("A", "X")); assertEquals(true, map.containsValue("A", "Y")); assertEquals(true, map.containsValue("A", "Z")); assertEquals(false, map.putAll("A", null)); assertEquals(3, map.size("A")); assertEquals(true, map.containsValue("A", "X")); assertEquals(true, map.containsValue("A", "Y")); assertEquals(true, map.containsValue("A", "Z")); assertEquals(false, map.putAll("A", new ArrayList())); assertEquals(3, map.size("A")); assertEquals(true, map.containsValue("A", "X")); assertEquals(true, map.containsValue("A", "Y")); assertEquals(true, map.containsValue("A", "Z")); coll = Arrays.asList(new Object[]{"M"}); assertEquals(true, map.putAll("A", coll)); assertEquals(4, map.size("A")); assertEquals(true, map.containsValue("A", "X")); assertEquals(true, map.containsValue("A", "Y")); assertEquals(true, map.containsValue("A", "Z")); assertEquals(true, map.containsValue("A", "M")); } public void testClone() { MultiHashMap map = new MultiHashMap(); map.put("A", "1"); map.put("A", "2"); Collection coll = (Collection) map.get("A"); assertEquals(1, map.size()); assertEquals(2, coll.size()); MultiHashMap cloned = (MultiHashMap) map.clone(); Collection clonedColl = (Collection) cloned.get("A"); assertNotSame(map, cloned); assertNotSame(coll, clonedColl); assertEquals(1, map.size()); assertEquals(2, coll.size()); assertEquals(1, cloned.size()); assertEquals(2, clonedColl.size()); map.put("A", "3"); assertEquals(1, map.size()); assertEquals(3, coll.size()); assertEquals(1, cloned.size()); assertEquals(2, clonedColl.size()); } public void testConstructorCopy1() { MultiHashMap map = new MultiHashMap(); map.put("A", "1"); map.put("A", "2"); Collection coll = (Collection) map.get("A"); assertEquals(1, map.size()); assertEquals(2, coll.size()); MultiHashMap newMap = new MultiHashMap(map); Collection newColl = (Collection) newMap.get("A"); assertNotSame(map, newMap); assertNotSame(coll, newColl); assertEquals(1, map.size()); assertEquals(2, coll.size()); assertEquals(1, newMap.size()); assertEquals(2, newColl.size()); map.put("A", "3"); assertEquals(1, map.size()); assertEquals(3, coll.size()); assertEquals(1, newMap.size()); assertEquals(2, newColl.size()); } public void testConstructorCopy2() { Map map = new HashMap(); map.put("A", "1"); map.put("B", "2"); assertEquals(2, map.size()); MultiHashMap newMap = new MultiHashMap(map); Collection newColl = (Collection) newMap.get("A"); assertNotSame(map, newMap); assertEquals(2, map.size()); assertEquals(2, newMap.size()); assertEquals(1, newColl.size()); map.put("A", "3"); assertEquals(2, map.size()); assertEquals(2, newMap.size()); assertEquals(1, newColl.size()); map.put("C", "4"); assertEquals(3, map.size()); assertEquals(2, newMap.size()); assertEquals(1, newColl.size()); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.pinpoint.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Specifies the content, including message variables and attributes, to use in a message that's sent directly to an * endpoint. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-2016-12-01/EndpointSendConfiguration" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class EndpointSendConfiguration implements Serializable, Cloneable, StructuredPojo { /** * <p> * The body of the message. If specified, this value overrides the default message body. * </p> */ private String bodyOverride; /** * <p> * A map of custom attributes to attach to the message for the address. For a push notification, this payload is * added to the data.pinpoint object. For an email or text message, this payload is added to email/SMS delivery * receipt event attributes. * </p> */ private java.util.Map<String, String> context; /** * <p> * The raw, JSON-formatted string to use as the payload for the message. If specified, this value overrides the * message. * </p> */ private String rawContent; /** * <p> * A map of the message variables to merge with the variables specified for the default message * (DefaultMessage.Substitutions). The variables specified in this map take precedence over all other variables. * </p> */ private java.util.Map<String, java.util.List<String>> substitutions; /** * <p> * The title or subject line of the message. If specified, this value overrides the default message title or subject * line. * </p> */ private String titleOverride; /** * <p> * The body of the message. If specified, this value overrides the default message body. * </p> * * @param bodyOverride * The body of the message. If specified, this value overrides the default message body. */ public void setBodyOverride(String bodyOverride) { this.bodyOverride = bodyOverride; } /** * <p> * The body of the message. If specified, this value overrides the default message body. * </p> * * @return The body of the message. If specified, this value overrides the default message body. */ public String getBodyOverride() { return this.bodyOverride; } /** * <p> * The body of the message. If specified, this value overrides the default message body. * </p> * * @param bodyOverride * The body of the message. If specified, this value overrides the default message body. * @return Returns a reference to this object so that method calls can be chained together. */ public EndpointSendConfiguration withBodyOverride(String bodyOverride) { setBodyOverride(bodyOverride); return this; } /** * <p> * A map of custom attributes to attach to the message for the address. For a push notification, this payload is * added to the data.pinpoint object. For an email or text message, this payload is added to email/SMS delivery * receipt event attributes. * </p> * * @return A map of custom attributes to attach to the message for the address. For a push notification, this * payload is added to the data.pinpoint object. For an email or text message, this payload is added to * email/SMS delivery receipt event attributes. */ public java.util.Map<String, String> getContext() { return context; } /** * <p> * A map of custom attributes to attach to the message for the address. For a push notification, this payload is * added to the data.pinpoint object. For an email or text message, this payload is added to email/SMS delivery * receipt event attributes. * </p> * * @param context * A map of custom attributes to attach to the message for the address. For a push notification, this payload * is added to the data.pinpoint object. For an email or text message, this payload is added to email/SMS * delivery receipt event attributes. */ public void setContext(java.util.Map<String, String> context) { this.context = context; } /** * <p> * A map of custom attributes to attach to the message for the address. For a push notification, this payload is * added to the data.pinpoint object. For an email or text message, this payload is added to email/SMS delivery * receipt event attributes. * </p> * * @param context * A map of custom attributes to attach to the message for the address. For a push notification, this payload * is added to the data.pinpoint object. For an email or text message, this payload is added to email/SMS * delivery receipt event attributes. * @return Returns a reference to this object so that method calls can be chained together. */ public EndpointSendConfiguration withContext(java.util.Map<String, String> context) { setContext(context); return this; } public EndpointSendConfiguration addContextEntry(String key, String value) { if (null == this.context) { this.context = new java.util.HashMap<String, String>(); } if (this.context.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.context.put(key, value); return this; } /** * Removes all the entries added into Context. * * @return Returns a reference to this object so that method calls can be chained together. */ public EndpointSendConfiguration clearContextEntries() { this.context = null; return this; } /** * <p> * The raw, JSON-formatted string to use as the payload for the message. If specified, this value overrides the * message. * </p> * * @param rawContent * The raw, JSON-formatted string to use as the payload for the message. If specified, this value overrides * the message. */ public void setRawContent(String rawContent) { this.rawContent = rawContent; } /** * <p> * The raw, JSON-formatted string to use as the payload for the message. If specified, this value overrides the * message. * </p> * * @return The raw, JSON-formatted string to use as the payload for the message. If specified, this value overrides * the message. */ public String getRawContent() { return this.rawContent; } /** * <p> * The raw, JSON-formatted string to use as the payload for the message. If specified, this value overrides the * message. * </p> * * @param rawContent * The raw, JSON-formatted string to use as the payload for the message. If specified, this value overrides * the message. * @return Returns a reference to this object so that method calls can be chained together. */ public EndpointSendConfiguration withRawContent(String rawContent) { setRawContent(rawContent); return this; } /** * <p> * A map of the message variables to merge with the variables specified for the default message * (DefaultMessage.Substitutions). The variables specified in this map take precedence over all other variables. * </p> * * @return A map of the message variables to merge with the variables specified for the default message * (DefaultMessage.Substitutions). The variables specified in this map take precedence over all other * variables. */ public java.util.Map<String, java.util.List<String>> getSubstitutions() { return substitutions; } /** * <p> * A map of the message variables to merge with the variables specified for the default message * (DefaultMessage.Substitutions). The variables specified in this map take precedence over all other variables. * </p> * * @param substitutions * A map of the message variables to merge with the variables specified for the default message * (DefaultMessage.Substitutions). The variables specified in this map take precedence over all other * variables. */ public void setSubstitutions(java.util.Map<String, java.util.List<String>> substitutions) { this.substitutions = substitutions; } /** * <p> * A map of the message variables to merge with the variables specified for the default message * (DefaultMessage.Substitutions). The variables specified in this map take precedence over all other variables. * </p> * * @param substitutions * A map of the message variables to merge with the variables specified for the default message * (DefaultMessage.Substitutions). The variables specified in this map take precedence over all other * variables. * @return Returns a reference to this object so that method calls can be chained together. */ public EndpointSendConfiguration withSubstitutions(java.util.Map<String, java.util.List<String>> substitutions) { setSubstitutions(substitutions); return this; } public EndpointSendConfiguration addSubstitutionsEntry(String key, java.util.List<String> value) { if (null == this.substitutions) { this.substitutions = new java.util.HashMap<String, java.util.List<String>>(); } if (this.substitutions.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.substitutions.put(key, value); return this; } /** * Removes all the entries added into Substitutions. * * @return Returns a reference to this object so that method calls can be chained together. */ public EndpointSendConfiguration clearSubstitutionsEntries() { this.substitutions = null; return this; } /** * <p> * The title or subject line of the message. If specified, this value overrides the default message title or subject * line. * </p> * * @param titleOverride * The title or subject line of the message. If specified, this value overrides the default message title or * subject line. */ public void setTitleOverride(String titleOverride) { this.titleOverride = titleOverride; } /** * <p> * The title or subject line of the message. If specified, this value overrides the default message title or subject * line. * </p> * * @return The title or subject line of the message. If specified, this value overrides the default message title or * subject line. */ public String getTitleOverride() { return this.titleOverride; } /** * <p> * The title or subject line of the message. If specified, this value overrides the default message title or subject * line. * </p> * * @param titleOverride * The title or subject line of the message. If specified, this value overrides the default message title or * subject line. * @return Returns a reference to this object so that method calls can be chained together. */ public EndpointSendConfiguration withTitleOverride(String titleOverride) { setTitleOverride(titleOverride); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getBodyOverride() != null) sb.append("BodyOverride: ").append(getBodyOverride()).append(","); if (getContext() != null) sb.append("Context: ").append(getContext()).append(","); if (getRawContent() != null) sb.append("RawContent: ").append(getRawContent()).append(","); if (getSubstitutions() != null) sb.append("Substitutions: ").append(getSubstitutions()).append(","); if (getTitleOverride() != null) sb.append("TitleOverride: ").append(getTitleOverride()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof EndpointSendConfiguration == false) return false; EndpointSendConfiguration other = (EndpointSendConfiguration) obj; if (other.getBodyOverride() == null ^ this.getBodyOverride() == null) return false; if (other.getBodyOverride() != null && other.getBodyOverride().equals(this.getBodyOverride()) == false) return false; if (other.getContext() == null ^ this.getContext() == null) return false; if (other.getContext() != null && other.getContext().equals(this.getContext()) == false) return false; if (other.getRawContent() == null ^ this.getRawContent() == null) return false; if (other.getRawContent() != null && other.getRawContent().equals(this.getRawContent()) == false) return false; if (other.getSubstitutions() == null ^ this.getSubstitutions() == null) return false; if (other.getSubstitutions() != null && other.getSubstitutions().equals(this.getSubstitutions()) == false) return false; if (other.getTitleOverride() == null ^ this.getTitleOverride() == null) return false; if (other.getTitleOverride() != null && other.getTitleOverride().equals(this.getTitleOverride()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getBodyOverride() == null) ? 0 : getBodyOverride().hashCode()); hashCode = prime * hashCode + ((getContext() == null) ? 0 : getContext().hashCode()); hashCode = prime * hashCode + ((getRawContent() == null) ? 0 : getRawContent().hashCode()); hashCode = prime * hashCode + ((getSubstitutions() == null) ? 0 : getSubstitutions().hashCode()); hashCode = prime * hashCode + ((getTitleOverride() == null) ? 0 : getTitleOverride().hashCode()); return hashCode; } @Override public EndpointSendConfiguration clone() { try { return (EndpointSendConfiguration) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.pinpoint.model.transform.EndpointSendConfigurationMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.modules.session.catalina; import static org.apache.geode.cache.Region.SEPARATOR; import static org.apache.geode.util.internal.UncheckedUtils.uncheckedCast; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.servlet.http.HttpSession; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.apache.geode.Statistics; import org.apache.geode.cache.AttributesMutator; import org.apache.geode.cache.CacheListener; import org.apache.geode.cache.DataPolicy; import org.apache.geode.cache.InterestResultPolicy; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionAttributes; import org.apache.geode.cache.RegionShortcut; import org.apache.geode.cache.client.ClientCache; import org.apache.geode.cache.client.ClientRegionFactory; import org.apache.geode.cache.client.ClientRegionShortcut; import org.apache.geode.cache.client.internal.InternalClientCache; import org.apache.geode.cache.client.internal.PoolImpl; import org.apache.geode.cache.execute.Function; import org.apache.geode.cache.execute.FunctionException; import org.apache.geode.cache.execute.ResultCollector; import org.apache.geode.internal.cache.GemFireCacheImpl; import org.apache.geode.modules.session.catalina.callback.SessionExpirationCacheListener; import org.apache.geode.modules.util.BootstrappingFunction; import org.apache.geode.modules.util.CreateRegionFunction; import org.apache.geode.modules.util.DebugCacheListener; import org.apache.geode.modules.util.RegionConfiguration; import org.apache.geode.modules.util.RegionStatus; import org.apache.geode.modules.util.SessionCustomExpiry; import org.apache.geode.modules.util.TouchPartitionedRegionEntriesFunction; import org.apache.geode.modules.util.TouchReplicatedRegionEntriesFunction; public class ClientServerSessionCacheTest extends AbstractSessionCacheTest { private final List<Object> regionStatusResultList = new ArrayList<>(); private final ClientCache cache = mock(GemFireCacheImpl.class); private final ResultCollector<Object, List<Object>> collector = uncheckedCast(mock(ResultCollector.class)); private final Statistics stats = mock(Statistics.class); private final ClientRegionFactory<String, HttpSession> regionFactory = uncheckedCast(mock(ClientRegionFactory.class)); private final RegionAttributes<String, HttpSession> attributes = uncheckedCast(mock(RegionAttributes.class)); @Before public void setUp() { sessionCache = spy(new ClientServerSessionCache(sessionManager, cache)); doReturn(emptyExecution).when((ClientServerSessionCache) sessionCache) .getExecutionForFunctionOnServers(); doReturn(emptyExecution).when((ClientServerSessionCache) sessionCache) .getExecutionForFunctionOnServersWithArguments(any()); doReturn(emptyExecution).when((ClientServerSessionCache) sessionCache) .getExecutionForFunctionOnServerWithRegionConfiguration(any()); doReturn(emptyExecution).when((ClientServerSessionCache) sessionCache) .getExecutionForFunctionOnRegionWithFilter(any()); when(sessionManager.getLogger()).thenReturn(logger); when(sessionManager.getEnableLocalCache()).thenReturn(true); when(sessionManager.getRegionName()).thenReturn(sessionRegionName); when(sessionManager.getMaxInactiveInterval()) .thenReturn(RegionConfiguration.DEFAULT_MAX_INACTIVE_INTERVAL); when(cache.getDistributedSystem()).thenReturn(distributedSystem); doReturn(regionFactory).when(cache) .createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY_HEAP_LRU); doReturn(sessionRegion).when(regionFactory).create(any()); when(((InternalClientCache) cache).isClient()).thenReturn(true); when(emptyExecution.execute(any(Function.class))).thenReturn(collector); when(emptyExecution.execute(any(String.class))).thenReturn(collector); when(collector.getResult()).thenReturn(regionStatusResultList); when(distributedSystem.createAtomicStatistics(any(), any())).thenReturn(stats); regionStatusResultList.clear(); regionStatusResultList.add(RegionStatus.VALID); } @Test public void initializeSessionCacheSucceeds() { sessionCache.initialize(); verify(emptyExecution).execute(any(BootstrappingFunction.class)); verify(emptyExecution).execute(CreateRegionFunction.ID); verify(cache).createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY_HEAP_LRU); verify(regionFactory, times(0)).setStatisticsEnabled(true); verify(regionFactory, times(0)).setCustomEntryIdleTimeout(any(SessionCustomExpiry.class)); verify(regionFactory, times(0)).addCacheListener(any(SessionExpirationCacheListener.class)); verify(regionFactory).create(sessionRegionName); } @Test public void bootstrappingFunctionThrowsException() { final FunctionException exception = new FunctionException(); final ResultCollector<Object, List<Object>> exceptionCollector = uncheckedCast(mock(ResultCollector.class)); when(emptyExecution.execute(new BootstrappingFunction())).thenReturn(exceptionCollector); when(exceptionCollector.getResult()).thenThrow(exception); sessionCache.initialize(); verify(logger).warn("Caught unexpected exception:", exception); } @Test public void createOrRetrieveRegionThrowsException() { final RuntimeException exception = new RuntimeException(); doThrow(exception).when((ClientServerSessionCache) sessionCache).createLocalSessionRegion(); assertThatThrownBy(() -> sessionCache.initialize()).hasCause(exception) .isInstanceOf(IllegalStateException.class); verify(logger).fatal("Unable to create or retrieve region", exception); } @Test public void createRegionFunctionFailsOnServer() { final ArgumentCaptor<String> stringCaptor = ArgumentCaptor.forClass(String.class); regionStatusResultList.clear(); regionStatusResultList.add(RegionStatus.INVALID); assertThatThrownBy(() -> sessionCache.initialize()).isInstanceOf(IllegalStateException.class) .hasCauseInstanceOf(IllegalStateException.class).hasMessageContaining( "An exception occurred on the server while attempting to create or validate region named " + sessionRegionName + ". See the server log for additional details."); verify(logger).fatal(stringCaptor.capture(), any(Exception.class)); assertThat(stringCaptor.getValue()).isEqualTo("Unable to create or retrieve region"); } @Test public void nonDefaultMaxTimeoutIntervalSetsExpirationDetails() { // Setting the mocked return value of getMaxInactiveInterval to something distinctly not equal // to the default when(sessionManager.getMaxInactiveInterval()) .thenReturn(RegionConfiguration.DEFAULT_MAX_INACTIVE_INTERVAL + 1); sessionCache.initialize(); verify(regionFactory).setStatisticsEnabled(true); verify(regionFactory).setCustomEntryIdleTimeout(any(SessionCustomExpiry.class)); verify(regionFactory).addCacheListener(any(SessionExpirationCacheListener.class)); } @Test public void createLocalSessionRegionWithoutEnableLocalCache() { when(sessionManager.getEnableLocalCache()).thenReturn(false); doReturn(regionFactory).when(cache).createClientRegionFactory(ClientRegionShortcut.PROXY); when(regionFactory.create(sessionRegionName)).thenReturn(sessionRegion); sessionCache.initialize(); verify(regionFactory).addCacheListener(any(SessionExpirationCacheListener.class)); verify(sessionRegion).registerInterestForAllKeys(InterestResultPolicy.KEYS); } @Test public void createOrRetrieveRegionWithNonNullSessionRegionDoesNotCreateRegion() { final CacheListener<String, HttpSession>[] cacheListeners = uncheckedCast(new CacheListener[] {new SessionExpirationCacheListener()}); doReturn(sessionRegion).when(cache).getRegion(sessionRegionName); doReturn(attributes).when(sessionRegion).getAttributes(); doReturn(cacheListeners).when(attributes).getCacheListeners(); sessionCache.initialize(); verify((ClientServerSessionCache) sessionCache, times(0)).createSessionRegionOnServers(); verify((ClientServerSessionCache) sessionCache, times(0)).createLocalSessionRegion(); } @Test public void createOrRetrieveRegionWithNonNullSessionRegionAndNoSessionExpirationCacheListenerCreatesListener() { final CacheListener<String, HttpSession>[] cacheListeners = uncheckedCast(new CacheListener[] {new DebugCacheListener()}); final AttributesMutator<String, HttpSession> attributesMutator = uncheckedCast(mock(AttributesMutator.class)); doReturn(sessionRegion).when(cache).getRegion(sessionRegionName); doReturn(attributes).when(sessionRegion).getAttributes(); doReturn(cacheListeners).when(attributes).getCacheListeners(); doReturn(attributesMutator).when(sessionRegion).getAttributesMutator(); sessionCache.initialize(); verify(attributesMutator).addCacheListener(any(SessionExpirationCacheListener.class)); } @Test public void createOrRetrieveRegionWithNonNullSessionProxyRegionRegistersInterestForAllKeys() { final CacheListener<String, HttpSession>[] cacheListeners = uncheckedCast(new CacheListener[] {new SessionExpirationCacheListener()}); doReturn(sessionRegion).when(cache).getRegion(sessionRegionName); doReturn(attributes).when(sessionRegion).getAttributes(); doReturn(cacheListeners).when(attributes).getCacheListeners(); when(attributes.getDataPolicy()).thenReturn(DataPolicy.DEFAULT); sessionCache.initialize(); verify(sessionRegion).registerInterestForAllKeys(InterestResultPolicy.KEYS); } @Test public void createOrRetrieveRegionWithNonNullSessionProxyRegionNotRegistersInterestIfEmpty() { final CacheListener<String, HttpSession>[] cacheListeners = uncheckedCast(new CacheListener[] {new SessionExpirationCacheListener()}); doReturn(sessionRegion).when(cache).getRegion(sessionRegionName); doReturn(attributes).when(sessionRegion).getAttributes(); doReturn(cacheListeners).when(attributes).getCacheListeners(); when(attributes.getDataPolicy()).thenReturn(DataPolicy.EMPTY); sessionCache.initialize(); verify(sessionRegion, never()).registerInterestForAllKeys(InterestResultPolicy.KEYS); } @Test public void touchSessionsInvokesPRFunctionForPRAndDoesNotThrowExceptionWhenFunctionDoesNotThrowException() { final Set<String> sessionIds = new HashSet<>(); when(sessionManager.getRegionAttributesId()).thenReturn(RegionShortcut.PARTITION.toString()); sessionCache.touchSessions(sessionIds); verify(emptyExecution).execute(TouchPartitionedRegionEntriesFunction.ID); } @Test public void touchSessionsInvokesPRFunctionForPRAndThrowsExceptionWhenFunctionThrowsException() { final Set<String> sessionIds = new HashSet<>(); final FunctionException exception = new FunctionException(); final ResultCollector<Object, List<Object>> exceptionCollector = uncheckedCast(mock(ResultCollector.class)); when(sessionManager.getRegionAttributesId()).thenReturn(RegionShortcut.PARTITION.toString()); when(emptyExecution.execute(TouchPartitionedRegionEntriesFunction.ID)) .thenReturn(exceptionCollector); when(exceptionCollector.getResult()).thenThrow(exception); sessionCache.touchSessions(sessionIds); verify(logger).warn("Caught unexpected exception:", exception); } @Test public void touchSessionsInvokesRRFunctionForRRAndDoesNotThrowExceptionWhenFunctionDoesNotThrowException() { // Need to invoke this to set the session region when(regionFactory.create(sessionRegionName)).thenReturn(sessionRegion); sessionCache.initialize(); final Set<String> sessionIds = new HashSet<>(); when(sessionRegion.getFullPath()).thenReturn(SEPARATOR + sessionRegionName); when(sessionManager.getRegionAttributesId()).thenReturn(RegionShortcut.REPLICATE.toString()); sessionCache.touchSessions(sessionIds); verify(emptyExecution).execute(TouchReplicatedRegionEntriesFunction.ID); } @Test public void touchSessionsInvokesRRFunctionForRRAndThrowsExceptionWhenFunctionThrowsException() { // Need to invoke this to set the session region when(regionFactory.create(sessionRegionName)).thenReturn(sessionRegion); sessionCache.initialize(); final Set<String> sessionIds = new HashSet<>(); final FunctionException exception = new FunctionException(); final ResultCollector<Object, List<Object>> exceptionCollector = uncheckedCast(mock(ResultCollector.class)); when(sessionRegion.getFullPath()).thenReturn(SEPARATOR + sessionRegionName); when(sessionManager.getRegionAttributesId()).thenReturn(RegionShortcut.REPLICATE.toString()); when(emptyExecution.execute(TouchReplicatedRegionEntriesFunction.ID)) .thenReturn(exceptionCollector); when(exceptionCollector.getResult()).thenThrow(exception); sessionCache.touchSessions(sessionIds); verify(logger).warn("Caught unexpected exception:", exception); } @Test public void isBackingCacheEnabledReturnsTrueWhenCommitValveFailfastDisabled() { assertThat(sessionCache.isBackingCacheAvailable()).isTrue(); } @Test public void isBackingCacheEnabledReturnsValueWhenCommitValveFailfastEnabled() { final boolean backingCacheEnabled = false; final PoolImpl pool = mock(PoolImpl.class); when(sessionManager.isCommitValveFailfastEnabled()).thenReturn(true); doReturn(pool).when((ClientServerSessionCache) sessionCache).findPoolInPoolManager(); when(pool.isPrimaryUpdaterAlive()).thenReturn(backingCacheEnabled); assertThat(sessionCache.isBackingCacheAvailable()).isEqualTo(backingCacheEnabled); } @Test public void registerInterestForSessionRegion() { final SessionManager manager = mock(SessionManager.class); final ClientCache clientCache = mock(ClientCache.class); final Region<?, ?> region = mock(Region.class, RETURNS_DEEP_STUBS); final ClientServerSessionCache cache = spy(new ClientServerSessionCache(manager, clientCache)); doReturn(region).when(cache).createLocalSessionRegion(); cache.createLocalSessionRegionWithRegisterInterest(); verify(region).registerInterestForAllKeys(InterestResultPolicy.KEYS); } @Test public void doesNotRegisterInterestIfLocalCacheNotEnabled() { final SessionManager manager = mock(SessionManager.class); final ClientCache clientCache = mock(ClientCache.class); final Region<?, ?> region = mock(Region.class); final RegionAttributes<?, ?> attributes = mock(RegionAttributes.class); final ClientServerSessionCache cache = spy(new ClientServerSessionCache(manager, clientCache)); doReturn(region).when(cache).createLocalSessionRegion(); doReturn(attributes).when(region).getAttributes(); doReturn(DataPolicy.EMPTY).when(attributes).getDataPolicy(); cache.createLocalSessionRegionWithRegisterInterest(); verify(region, never()).registerInterestForAllKeys(InterestResultPolicy.KEYS); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package javax.xml.bind; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URL; import java.util.Map; import java.util.Properties; import java.util.StringTokenizer; import java.util.logging.ConsoleHandler; import java.util.logging.Level; import java.util.logging.Logger; import java.security.AccessController; import java.security.PrivilegedAction; import static javax.xml.bind.JAXBContext.JAXB_CONTEXT_FACTORY; //import java.lang.reflect.InvocationTargetException; /** * This class is package private and therefore is not exposed as part of the * JAXB API. * * This code is designed to implement the JAXB 1.0 spec pluggability feature * * @author <ul><li>Ryan Shoemaker, Sun Microsystems, Inc.</li></ul> * @see JAXBContext */ class ContextFinder { private static final Logger logger; static { logger = Logger.getLogger("javax.xml.bind"); try { if (AccessController.doPrivileged(new GetPropertyAction("jaxb.debug")) != null) { // disconnect the logger from a bigger framework (if any) // and take the matters into our own hands logger.setUseParentHandlers(false); logger.setLevel(Level.ALL); ConsoleHandler handler = new ConsoleHandler(); handler.setLevel(Level.ALL); logger.addHandler(handler); } else { // don't change the setting of this logger // to honor what other frameworks // have done on configurations. } } catch(Throwable t) { // just to be extra safe. in particular System.getProperty may throw // SecurityException. } } /** * If the {@link InvocationTargetException} wraps an exception that shouldn't be wrapped, * throw the wrapped exception. */ private static void handleInvocationTargetException(InvocationTargetException x) throws JAXBException { Throwable t = x.getTargetException(); if( t != null ) { if( t instanceof JAXBException ) // one of our exceptions, just re-throw throw (JAXBException)t; if( t instanceof RuntimeException ) // avoid wrapping exceptions unnecessarily throw (RuntimeException)t; if( t instanceof Error ) throw (Error)t; } } /** * Determine if two types (JAXBContext in this case) will generate a ClassCastException. * * For example, (targetType)originalType * * @param originalType * The Class object of the type being cast * @param targetType * The Class object of the type that is being cast to * @return JAXBException to be thrown. */ private static JAXBException handleClassCastException(Class originalType, Class targetType) { final URL targetTypeURL = which(targetType); ClassLoader cl = originalType.getClassLoader() != null ? originalType.getClassLoader() : ClassLoader.getSystemClassLoader(); return new JAXBException(Messages.format(Messages.ILLEGAL_CAST, // we don't care where the impl class is, we want to know where JAXBContext lives in the impl // class' ClassLoader cl.getResource("javax/xml/bind/JAXBContext.class"), targetTypeURL)); } /** * Create an instance of a class using the specified ClassLoader */ static JAXBContext newInstance( String contextPath, String className, ClassLoader classLoader, Map properties ) throws JAXBException { try { Class spiClass = safeLoadClass(className,classLoader); /* * javax.xml.bind.context.factory points to a class which has a * static method called 'createContext' that * returns a javax.xml.JAXBContext. */ Object context = null; // first check the method that takes Map as the third parameter. // this is added in 2.0. try { Method m = spiClass.getMethod("createContext",String.class,ClassLoader.class,Map.class); // Throw an early exception instead of having an exception thrown in the createContext method if (m.getReturnType() != JAXBContext.class) { throw handleClassCastException(m.getReturnType(), JAXBContext.class); } // any failure in invoking this method would be considered fatal context = m.invoke(null,contextPath,classLoader,properties); } catch (NoSuchMethodException e) { // it's not an error for the provider not to have this method. } if(context==null) { // try the old method that doesn't take properties. compatible with 1.0. // it is an error for an implementation not to have both forms of the createContext method. Method m = spiClass.getMethod("createContext",String.class,ClassLoader.class); // Throw an early exception instead of having an exception thrown in the createContext method if (m.getReturnType() != JAXBContext.class) { throw handleClassCastException(m.getReturnType(), JAXBContext.class); } // any failure in invoking this method would be considered fatal context = m.invoke(null,contextPath,classLoader); } if(!(context instanceof JAXBContext)) { // the cast would fail, so generate an exception with a nice message throw handleClassCastException(context.getClass(), JAXBContext.class); } return (JAXBContext)context; } catch (ClassNotFoundException x) { throw new JAXBException( Messages.format( Messages.PROVIDER_NOT_FOUND, className ), x); } catch (InvocationTargetException x) { handleInvocationTargetException(x); // for other exceptions, wrap the internal target exception // with a JAXBException Throwable e = x; if(x.getTargetException()!=null) e = x.getTargetException(); throw new JAXBException( Messages.format( Messages.COULD_NOT_INSTANTIATE, className, e ), e ); } catch (RuntimeException x) { // avoid wrapping RuntimeException to JAXBException, // because it indicates a bug in this code. throw x; } catch (Exception x) { // can't catch JAXBException because the method is hidden behind // reflection. Root element collisions detected in the call to // createContext() are reported as JAXBExceptions - just re-throw it // some other type of exception - just wrap it throw new JAXBException( Messages.format( Messages.COULD_NOT_INSTANTIATE, className, x ), x); } } /** * Create an instance of a class using the specified ClassLoader */ static JAXBContext newInstance( Class[] classes, Map properties, String className) throws JAXBException { ClassLoader cl = Thread.currentThread().getContextClassLoader(); Class spi; try { spi = safeLoadClass(className,cl); } catch (ClassNotFoundException e) { throw new JAXBException(e); } if(logger.isLoggable(Level.FINE)) { // extra check to avoid costly which operation if not logged logger.fine("loaded "+className+" from "+which(spi)); } Method m; try { m = spi.getMethod("createContext", Class[].class, Map.class); } catch (NoSuchMethodException e) { throw new JAXBException(e); } // Fallback for JAXB 1.0 compatibility (at least JAXB TCK tests are using that feature) try { Object context = m.invoke(null, classes, properties); if(!(context instanceof JAXBContext)) { // the cast would fail, so generate an exception with a nice message throw handleClassCastException(context.getClass(), JAXBContext.class); } return (JAXBContext)context; } catch (IllegalAccessException e) { throw new JAXBException(e); } catch (InvocationTargetException e) { handleInvocationTargetException(e); Throwable x = e; if (e.getTargetException() != null) x = e.getTargetException(); throw new JAXBException(x); } } static JAXBContext find(String factoryId, String contextPath, ClassLoader classLoader, Map properties ) throws JAXBException { // TODO: do we want/need another layer of searching in $java.home/lib/jaxb.properties like JAXP? final String jaxbContextFQCN = JAXBContext.class.getName(); // search context path for jaxb.properties first StringBuilder propFileName; StringTokenizer packages = new StringTokenizer( contextPath, ":" ); String factoryClassName; if(!packages.hasMoreTokens()) // no context is specified throw new JAXBException(Messages.format(Messages.NO_PACKAGE_IN_CONTEXTPATH)); logger.fine("Searching jaxb.properties"); while( packages.hasMoreTokens() ) { String packageName = packages.nextToken(":").replace('.','/'); // com.acme.foo - > com/acme/foo/jaxb.properties propFileName = new StringBuilder().append(packageName).append("/jaxb.properties"); Properties props = loadJAXBProperties( classLoader, propFileName.toString() ); if (props != null) { if (props.containsKey(factoryId)) { factoryClassName = props.getProperty(factoryId); return newInstance( contextPath, factoryClassName, classLoader, properties ); } else { throw new JAXBException(Messages.format(Messages.MISSING_PROPERTY, packageName, factoryId)); } } } logger.fine("Searching the system property"); // search for a system property second (javax.xml.bind.JAXBContext) factoryClassName = AccessController.doPrivileged(new GetPropertyAction(jaxbContextFQCN)); if( factoryClassName != null ) { return newInstance( contextPath, factoryClassName, classLoader, properties ); } logger.fine("Searching META-INF/services"); // search META-INF services next BufferedReader r; try { final StringBuilder resource = new StringBuilder().append("META-INF/services/").append(jaxbContextFQCN); final InputStream resourceStream = classLoader.getResourceAsStream(resource.toString()); if (resourceStream != null) { r = new BufferedReader(new InputStreamReader(resourceStream, "UTF-8")); factoryClassName = r.readLine().trim(); r.close(); return newInstance(contextPath, factoryClassName, classLoader, properties); } else { logger.fine("Unable to load:" + resource.toString()); } } catch (UnsupportedEncodingException e) { // should never happen throw new JAXBException(e); } catch (IOException e) { throw new JAXBException(e); } // else no provider found logger.fine("Trying to create the platform default provider"); return newInstance(contextPath, PLATFORM_DEFAULT_FACTORY_CLASS, classLoader, properties); } // TODO: log each step in the look up process static JAXBContext find( Class[] classes, Map properties ) throws JAXBException { // TODO: do we want/need another layer of searching in $java.home/lib/jaxb.properties like JAXP? final String jaxbContextFQCN = JAXBContext.class.getName(); String factoryClassName; // search for jaxb.properties in the class loader of each class first for (final Class c : classes) { // this classloader is used only to load jaxb.properties, so doing this should be safe. ClassLoader classLoader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() { public ClassLoader run() { return c.getClassLoader(); } }); Package pkg = c.getPackage(); if(pkg==null) continue; // this is possible for primitives, arrays, and classes that are loaded by poorly implemented ClassLoaders String packageName = pkg.getName().replace('.', '/'); // TODO: do we want to optimize away searching the same package? org.Foo, org.Bar, com.Baz // classes from the same package might come from different class loades, so it might be a bad idea // TODO: it's easier to look things up from the class // c.getResourceAsStream("jaxb.properties"); // build the resource name and use the property loader code String resourceName = packageName+"/jaxb.properties"; logger.fine("Trying to locate "+resourceName); Properties props = loadJAXBProperties(classLoader, resourceName); if (props == null) { logger.fine(" not found"); } else { logger.fine(" found"); if (props.containsKey(JAXB_CONTEXT_FACTORY)) { // trim() seems redundant, but adding to satisfy customer complaint factoryClassName = props.getProperty(JAXB_CONTEXT_FACTORY).trim(); return newInstance(classes, properties, factoryClassName); } else { throw new JAXBException(Messages.format(Messages.MISSING_PROPERTY, packageName, JAXB_CONTEXT_FACTORY)); } } } // search for a system property second (javax.xml.bind.JAXBContext) logger.fine("Checking system property "+jaxbContextFQCN); factoryClassName = AccessController.doPrivileged(new GetPropertyAction(jaxbContextFQCN)); if( factoryClassName != null ) { logger.fine(" found "+factoryClassName); return newInstance( classes, properties, factoryClassName ); } logger.fine(" not found"); // search META-INF services next logger.fine("Checking META-INF/services"); BufferedReader r; try { final String resource = new StringBuilder("META-INF/services/").append(jaxbContextFQCN).toString(); ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); URL resourceURL; if(classLoader==null) resourceURL = ClassLoader.getSystemResource(resource); else resourceURL = classLoader.getResource(resource); if (resourceURL != null) { logger.fine("Reading "+resourceURL); r = new BufferedReader(new InputStreamReader(resourceURL.openStream(), "UTF-8")); factoryClassName = r.readLine().trim(); return newInstance(classes, properties, factoryClassName); } else { logger.fine("Unable to find: " + resource); } } catch (UnsupportedEncodingException e) { // should never happen throw new JAXBException(e); } catch (IOException e) { throw new JAXBException(e); } // else no provider found logger.fine("Trying to create the platform default provider"); return newInstance(classes, properties, PLATFORM_DEFAULT_FACTORY_CLASS); } private static Properties loadJAXBProperties( ClassLoader classLoader, String propFileName ) throws JAXBException { Properties props = null; try { URL url; if(classLoader==null) url = ClassLoader.getSystemResource(propFileName); else url = classLoader.getResource( propFileName ); if( url != null ) { logger.fine("loading props from "+url); props = new Properties(); InputStream is = url.openStream(); props.load( is ); is.close(); } } catch( IOException ioe ) { logger.log(Level.FINE,"Unable to load "+propFileName,ioe); throw new JAXBException( ioe.toString(), ioe ); } return props; } /** * Search the given ClassLoader for an instance of the specified class and * return a string representation of the URL that points to the resource. * * @param clazz * The class to search for * @param loader * The ClassLoader to search. If this parameter is null, then the * system class loader will be searched * @return * the URL for the class or null if it wasn't found */ static URL which(Class clazz, ClassLoader loader) { String classnameAsResource = clazz.getName().replace('.', '/') + ".class"; if(loader == null) { loader = ClassLoader.getSystemClassLoader(); } return loader.getResource(classnameAsResource); } /** * Get the URL for the Class from it's ClassLoader. * * Convenience method for {@link #which(Class, ClassLoader)}. * * Equivalent to calling: which(clazz, clazz.getClassLoader()) * * @param clazz * The class to search for * @return * the URL for the class or null if it wasn't found */ static URL which(Class clazz) { return which(clazz, clazz.getClassLoader()); } /** * When JAXB is in J2SE, rt.jar has to have a JAXB implementation. * However, rt.jar cannot have META-INF/services/javax.xml.bind.JAXBContext * because if it has, it will take precedence over any file that applications have * in their jar files. * * <p> * When the user bundles his own JAXB implementation, we'd like to use it, and we * want the platform default to be used only when there's no other JAXB provider. * * <p> * For this reason, we have to hard-code the class name into the API. */ private static final String PLATFORM_DEFAULT_FACTORY_CLASS = "com.sun.xml.internal.bind.v2.ContextFactory"; /** * Loads the class, provided that the calling thread has an access to the class being loaded. */ private static Class safeLoadClass(String className, ClassLoader classLoader) throws ClassNotFoundException { // using Osig locator to load the spi class try { Class spiClass = org.apache.servicemix.specs.locator.OsgiLocator.locate(JAXBContext.class); if (spiClass != null) { return spiClass; } } catch (Throwable t) { } logger.fine("Trying to load "+className); try { // make sure that the current thread has an access to the package of the given name. SecurityManager s = System.getSecurityManager(); if (s != null) { int i = className.lastIndexOf('.'); if (i != -1) { s.checkPackageAccess(className.substring(0,i)); } } if (classLoader == null) { return Class.forName(className); } else { return classLoader.loadClass(className); } } catch (SecurityException se) { // anyone can access the platform default factory class without permission if (PLATFORM_DEFAULT_FACTORY_CLASS.equals(className)) { return Class.forName(className); } throw se; } } }
/* * Copyright &copy 2014-2016 NetApp, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * DO NOT EDIT THIS CODE BY HAND! It has been generated with jsvcgen. */ package com.solidfire.element.api; import com.solidfire.gson.Gson; import com.solidfire.core.client.Attributes; import com.solidfire.gson.annotations.SerializedName; import com.solidfire.core.annotation.Since; import com.solidfire.core.javautil.Optional; import java.io.Serializable; import java.util.Arrays; import java.util.HashMap; import java.util.Objects; /** * DeleteVolumesRequest * DeleteVolumes marks multiple (up to 500) active volumes for deletion. * Once marked, the volumes are purged (permanently deleted) after the cleanup interval elapses. * The cleanup interval can be set in the SetClusterSettings method. * For more information on using this method, see SetClusterSettings on page 1. * After making a request to delete volumes, any active iSCSI connections to the volumes are immediately terminated * and no further connections are allowed while the volumes are in this state. * A marked volume is not returned in target discovery requests. * Any snapshots of a volume that has been marked for deletion are not affected. * Snapshots are kept until the volume is purged from the system. * If a volume is marked for deletion and has a bulk volume read or bulk volume write operation in progress, * the bulk volume read or write operation is stopped. * If the volumes you delete are paired with a volume, replication between the paired volumes is suspended * and no data is transferred to them or from them while in a deleted state. * The remote volumes the deleted volumes were paired with enter into a PausedMisconfigured state * and data is no longer sent to them or from the deleted volumes. * Until the deleted volumes are purged, they can be restored and data transfers resume. * If the deleted volumes are purged from the system, the volumes they were paired with enter into a * StoppedMisconfigured state and the volume pairing status is removed. * The purged volumes become permanently unavailable. **/ public class DeleteVolumesRequest implements Serializable { public static final long serialVersionUID = 885185871504564557L; @SerializedName("accountIDs") private Optional<Long[]> accountIDs; @SerializedName("volumeAccessGroupIDs") private Optional<Long[]> volumeAccessGroupIDs; @SerializedName("volumeIDs") private Optional<Long[]> volumeIDs; // empty constructor @Since("7.0") public DeleteVolumesRequest() {} // parameterized constructor @Since("7.0") public DeleteVolumesRequest( Optional<Long[]> accountIDs, Optional<Long[]> volumeAccessGroupIDs, Optional<Long[]> volumeIDs ) { this.accountIDs = (accountIDs == null) ? Optional.<Long[]>empty() : accountIDs; this.volumeAccessGroupIDs = (volumeAccessGroupIDs == null) ? Optional.<Long[]>empty() : volumeAccessGroupIDs; this.volumeIDs = (volumeIDs == null) ? Optional.<Long[]>empty() : volumeIDs; } /** * A list of account IDs. All volumes from these accounts are deleted from the system. **/ public Optional<Long[]> getAccountIDs() { return this.accountIDs; } public void setAccountIDs(Optional<Long[]> accountIDs) { this.accountIDs = (accountIDs == null) ? Optional.<Long[]>empty() : accountIDs; } /** * A list of volume access group IDs. All of the volumes from all of the volume access groups you specify in this list are deleted from the system. **/ public Optional<Long[]> getVolumeAccessGroupIDs() { return this.volumeAccessGroupIDs; } public void setVolumeAccessGroupIDs(Optional<Long[]> volumeAccessGroupIDs) { this.volumeAccessGroupIDs = (volumeAccessGroupIDs == null) ? Optional.<Long[]>empty() : volumeAccessGroupIDs; } /** * The list of IDs of the volumes to delete from the system. **/ public Optional<Long[]> getVolumeIDs() { return this.volumeIDs; } public void setVolumeIDs(Optional<Long[]> volumeIDs) { this.volumeIDs = (volumeIDs == null) ? Optional.<Long[]>empty() : volumeIDs; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeleteVolumesRequest that = (DeleteVolumesRequest) o; return Objects.equals(accountIDs, that.accountIDs) && Objects.equals(volumeAccessGroupIDs, that.volumeAccessGroupIDs) && Objects.equals(volumeIDs, that.volumeIDs); } @Override public int hashCode() { return Objects.hash( accountIDs,volumeAccessGroupIDs,volumeIDs ); } public java.util.Map<String, Object> toMap() { java.util.Map<String, Object> map = new HashMap<>(); map.put("accountIDs", accountIDs); map.put("volumeAccessGroupIDs", volumeAccessGroupIDs); map.put("volumeIDs", volumeIDs); return map; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); Gson gson = new Gson(); sb.append( "{ " ); if(null != accountIDs && accountIDs.isPresent()){ sb.append(" accountIDs : ").append(gson.toJson(accountIDs)).append(","); } else{ sb.append(" accountIDs : ").append("null").append(","); } if(null != volumeAccessGroupIDs && volumeAccessGroupIDs.isPresent()){ sb.append(" volumeAccessGroupIDs : ").append(gson.toJson(volumeAccessGroupIDs)).append(","); } else{ sb.append(" volumeAccessGroupIDs : ").append("null").append(","); } if(null != volumeIDs && volumeIDs.isPresent()){ sb.append(" volumeIDs : ").append(gson.toJson(volumeIDs)).append(","); } else{ sb.append(" volumeIDs : ").append("null").append(","); } sb.append( " }" ); if(sb.lastIndexOf(", }") != -1) sb.deleteCharAt(sb.lastIndexOf(", }")); return sb.toString(); } public static Builder builder() { return new Builder(); } public final Builder asBuilder() { return new Builder().buildFrom(this); } public static class Builder { private Optional<Long[]> accountIDs; private Optional<Long[]> volumeAccessGroupIDs; private Optional<Long[]> volumeIDs; private Builder() { } public DeleteVolumesRequest build() { return new DeleteVolumesRequest ( this.accountIDs, this.volumeAccessGroupIDs, this.volumeIDs); } private DeleteVolumesRequest.Builder buildFrom(final DeleteVolumesRequest req) { this.accountIDs = req.accountIDs; this.volumeAccessGroupIDs = req.volumeAccessGroupIDs; this.volumeIDs = req.volumeIDs; return this; } public DeleteVolumesRequest.Builder optionalAccountIDs(final Long[] accountIDs) { this.accountIDs = (accountIDs == null) ? Optional.<Long[]>empty() : Optional.of(accountIDs); return this; } public DeleteVolumesRequest.Builder optionalVolumeAccessGroupIDs(final Long[] volumeAccessGroupIDs) { this.volumeAccessGroupIDs = (volumeAccessGroupIDs == null) ? Optional.<Long[]>empty() : Optional.of(volumeAccessGroupIDs); return this; } public DeleteVolumesRequest.Builder optionalVolumeIDs(final Long[] volumeIDs) { this.volumeIDs = (volumeIDs == null) ? Optional.<Long[]>empty() : Optional.of(volumeIDs); return this; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.renderkit.html; import java.io.IOException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import javax.faces.component.UIComponent; import javax.faces.component.UIOutput; import javax.faces.component.UIParameter; import javax.faces.component.UIViewRoot; import javax.faces.component.html.HtmlOutputFormat; import javax.faces.context.FacesContext; import javax.faces.context.ResponseWriter; import org.apache.myfaces.buildtools.maven2.plugin.builder.annotation.JSFRenderer; import org.apache.myfaces.shared.renderkit.JSFAttr; import org.apache.myfaces.shared.renderkit.RendererUtils; import org.apache.myfaces.shared.renderkit.html.CommonPropertyConstants; import org.apache.myfaces.shared.renderkit.html.CommonPropertyUtils; import org.apache.myfaces.shared.renderkit.html.HTML; import org.apache.myfaces.shared.renderkit.html.HtmlRenderer; import org.apache.myfaces.shared.renderkit.html.HtmlRendererUtils; /** * * @author Manfred Geiler (latest modification by $Author: lu4242 $) * @author Thomas Spiegl * @version $Revision: 1364636 $ $Date: 2012-07-23 09:00:12 -0500 (Mon, 23 Jul 2012) $ */ @JSFRenderer(renderKitId = "HTML_BASIC", family = "javax.faces.Output", type = "javax.faces.Format") public class HtmlFormatRenderer extends HtmlRenderer { //private static final Log log = LogFactory.getLog(HtmlFormatRenderer.class); private static final Logger log = Logger.getLogger(HtmlFormatRenderer.class.getName()); private static final Object[] EMPTY_ARGS = new Object[0]; @Override protected boolean isCommonPropertiesOptimizationEnabled(FacesContext facesContext) { return true; } @Override protected boolean isCommonEventsOptimizationEnabled(FacesContext facesContext) { return true; } @Override public void encodeBegin(FacesContext facesContext, UIComponent uiComponent) throws IOException { } @Override public void encodeChildren(FacesContext facescontext, UIComponent uicomponent) throws IOException { } @Override public void encodeEnd(FacesContext facesContext, UIComponent component) throws IOException { RendererUtils.checkParamValidity(facesContext, component, UIOutput.class); String text = getOutputFormatText(facesContext, component); boolean escape; if (component instanceof HtmlOutputFormat) { escape = ((HtmlOutputFormat) component).isEscape(); } else { escape = RendererUtils.getBooleanAttribute(component, JSFAttr.ESCAPE_ATTR, true); } if (text != null) { ResponseWriter writer = facesContext.getResponseWriter(); boolean span = false; if (isCommonPropertiesOptimizationEnabled(facesContext)) { long commonPropertiesMarked = CommonPropertyUtils.getCommonPropertiesMarked(component); if ( (commonPropertiesMarked & ~(CommonPropertyConstants.ESCAPE_PROP)) > 0) { span = true; writer.startElement(HTML.SPAN_ELEM, component); HtmlRendererUtils.writeIdIfNecessary(writer, component, facesContext); } else if (CommonPropertyUtils.isIdRenderingNecessary(component)) { span = true; writer.startElement(HTML.SPAN_ELEM, component); writer.writeAttribute(HTML.ID_ATTR, component.getClientId(facesContext), null); } CommonPropertyUtils.renderUniversalProperties(writer, commonPropertiesMarked, component); CommonPropertyUtils.renderStyleProperties(writer, commonPropertiesMarked, component); } else { if(component.getId()!=null && !component.getId().startsWith(UIViewRoot.UNIQUE_ID_PREFIX)) { span = true; writer.startElement(HTML.SPAN_ELEM, component); HtmlRendererUtils.writeIdIfNecessary(writer, component, facesContext); HtmlRendererUtils.renderHTMLAttributes(writer, component, HTML.COMMON_PASSTROUGH_ATTRIBUTES); } else { span = HtmlRendererUtils.renderHTMLAttributesWithOptionalStartElement(writer,component, HTML.SPAN_ELEM,HTML.COMMON_PASSTROUGH_ATTRIBUTES); } } if (escape) { if (log.isLoggable(Level.FINE)) { log.fine("renderOutputText writing '" + text + "'"); } writer.writeText(text, org.apache.myfaces.shared.renderkit.JSFAttr.VALUE_ATTR); } else { writer.write(text); } if(span) { writer.endElement(org.apache.myfaces.shared.renderkit.html.HTML.SPAN_ELEM); } } } private String getOutputFormatText(FacesContext facesContext, UIComponent htmlOutputFormat) { String pattern = RendererUtils.getStringValue(facesContext, htmlOutputFormat); Object[] args; if (htmlOutputFormat.getChildCount() == 0) { args = EMPTY_ARGS; } else { List<Object> argsList = null; if (htmlOutputFormat.getChildCount() > 0) { List<UIParameter> validParams = HtmlRendererUtils.getValidUIParameterChildren( facesContext, htmlOutputFormat.getChildren(), false, false, false); for (int i = 0, size = validParams.size(); i < size; i++) { UIParameter param = validParams.get(i); if (argsList == null) { argsList = new ArrayList<Object>(); } argsList.add(param.getValue()); } } if (argsList != null) { args = argsList.toArray(new Object[argsList.size()]); } else { args = EMPTY_ARGS; } } MessageFormat format = new MessageFormat(pattern, facesContext.getViewRoot().getLocale()); try { return format.format(args); } catch (Exception e) { log.log(Level.SEVERE, "Error formatting message of component " + htmlOutputFormat.getClientId(facesContext)); return ""; } } }
package org.openprovenance.prov.json; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Hashtable; import java.util.List; import java.util.Map; import javax.xml.datatype.XMLGregorianCalendar; import org.openprovenance.prov.model.Entry; import org.openprovenance.prov.model.Key; import org.openprovenance.prov.model.ActedOnBehalfOf; import org.openprovenance.prov.model.Activity; import org.openprovenance.prov.model.Agent; import org.openprovenance.prov.model.AlternateOf; import org.openprovenance.prov.model.Attribute; import org.openprovenance.prov.model.DerivedByInsertionFrom; import org.openprovenance.prov.model.DerivedByRemovalFrom; import org.openprovenance.prov.model.DictionaryMembership; import org.openprovenance.prov.model.Document; import org.openprovenance.prov.model.Entity; import org.openprovenance.prov.model.HadMember; import org.openprovenance.prov.model.LangString; import org.openprovenance.prov.model.MentionOf; import org.openprovenance.prov.model.ModelConstructor; import org.openprovenance.prov.model.Name; import org.openprovenance.prov.model.Bundle; import org.openprovenance.prov.model.Namespace; import org.openprovenance.prov.model.QualifiedName; import org.openprovenance.prov.model.SpecializationOf; import org.openprovenance.prov.model.Statement; import org.openprovenance.prov.model.Used; import org.openprovenance.prov.model.WasAssociatedWith; import org.openprovenance.prov.model.WasAttributedTo; import org.openprovenance.prov.model.WasDerivedFrom; import org.openprovenance.prov.model.WasEndedBy; import org.openprovenance.prov.model.WasGeneratedBy; import org.openprovenance.prov.model.WasInfluencedBy; import org.openprovenance.prov.model.WasInformedBy; import org.openprovenance.prov.model.WasInvalidatedBy; import org.openprovenance.prov.model.WasStartedBy; /** * @author Trung Dong Huynh * * Constructing a JSON structure from a Document that * follows the PROV-JSON representation. * */ public class JSONConstructor implements ModelConstructor { private class JsonProvRecord { String type; String id; List<Object[]> attributes; public JsonProvRecord(String type, String id, List<Object[]> attributes) { this.type = type; this.id = id; this.attributes = attributes; } } private Namespace documentNamespace = null; private Namespace currentNamespace = null; private Map<String, Object> documentBundles = new HashMap<String, Object>(); private List<JsonProvRecord> documentRecords = new ArrayList<JsonProvRecord>(); private List<JsonProvRecord> currentRecords = documentRecords; final private Name name; public JSONConstructor(Name name) { this.name=name; } public Map<String, Object> getJSONStructure() { // Build the document-level structure Map<String, Object> document = getJSONStructure(documentRecords, documentNamespace); if (!documentBundles.isEmpty()) document.put("bundle", documentBundles); return document; } public Map<String, Object> getJSONStructure(List<JsonProvRecord> records, Namespace namespace) { Map<String, Object> bundle = new HashMap<String, Object>(); Hashtable<String, String> prefixes = new Hashtable<String, String>(namespace.getPrefixes()); if (namespace.getDefaultNamespace() != null) { prefixes.put("default", namespace.getDefaultNamespace()); } if (!prefixes.isEmpty()) bundle.put("prefix", prefixes); for (Object o : records) { if (o == null) continue; JsonProvRecord record = (JsonProvRecord) o; String type = record.type; @SuppressWarnings("unchecked") Map<Object, Object> structure = (Map<Object, Object>) bundle.get(type); if (structure == null) { structure = new HashMap<Object, Object>(); bundle.put(type, structure); } Map<Object, Object> hash = new HashMap<Object, Object>(); List<Object[]> tuples = (List<Object[]>) record.attributes; for (Object[] tuple : tuples) { Object attribute = tuple[0]; Object value = tuple[1]; if (hash.containsKey(attribute)) { Object existing = hash.get(attribute); if (existing instanceof List) { // Already a multi-value attribute @SuppressWarnings("unchecked") List<Object> values = (List<Object>) existing; values.add(value); } else { // A multi-value list needs to be created List<Object> values = new ArrayList<Object>(); values.add(existing); values.add(value); hash.put(attribute, values); } } else { hash.put(attribute, value); } } if (structure.containsKey(record.id)) { Object existing = structure.get(record.id); if (existing instanceof List) { @SuppressWarnings("unchecked") List<Object> values = (List<Object>) existing; values.add(hash); } else { // A multi-value list needs to be created List<Object> values = new ArrayList<Object>(); values.add(existing); values.add(hash); structure.put(record.id, values); } } else structure.put(record.id, hash); } return bundle; } private static final Map<String, Integer> countMap = new HashMap<String, Integer>(); private static String getBlankID(String type) { if (!countMap.containsKey(type)) { countMap.put(type, 0); } int count = countMap.get(type); count += 1; countMap.put(type, count); return "_:" + type + count; } private Object[] tuple(Object o1, Object o2) { Object[] tuple = { o1, o2 }; return tuple; } private Object typedLiteral(String value, String datatype, String lang) { // Converting default types to JSON primitives if (datatype == "xsd:string" && lang == null) return value; if (datatype == "xsd:double") return Double.parseDouble(value); if (datatype == "xsd:int") return Integer.parseInt(value); if (datatype == "xsd:boolean") return Boolean.parseBoolean(value); // Creating a typed literal structure Map<String, String> result = new HashMap<String, String>(); result.put("$", value); if (datatype != null) { result.put("type", datatype); } if (lang != null) { result.put("lang", lang); } return result; } private String convertValueToString(Object value, Object convertedValue) { if (convertedValue instanceof String) { return (String) convertedValue; } if (convertedValue instanceof QualifiedName) return currentNamespace.qualifiedNameToString((QualifiedName) convertedValue); if (convertedValue instanceof LangString) { LangString iStr = (LangString) convertedValue; return iStr.getValue(); } if (convertedValue instanceof byte[]) { return (String) value; } return (String) value; } private Object convertValue(Object value) { if (value instanceof String || value instanceof Double || value instanceof Integer || value instanceof Boolean) return value; if (value instanceof QualifiedName) { return typedLiteral(currentNamespace.qualifiedNameToString((QualifiedName) value), "xsd:QName", null); } if (value instanceof LangString) { LangString iStr = (LangString) value; String lang = iStr.getLang(); if (lang != null) // If 'lang' is defined return typedLiteral(iStr.getValue(), "prov:InternationalizedString", lang); else return iStr.getValue(); } throw new RuntimeException("Cannot convert this value: " + value.toString()); } private Object[] convertAttribute(Attribute attr) { String attrName = currentNamespace.qualifiedNameToString(attr.getElementName()); Object value = attr.getValue(); QualifiedName type = attr.getType(); String datatype = currentNamespace.qualifiedNameToString(type); Object attrValue; if (value instanceof QualifiedName) { attrValue = typedLiteral(currentNamespace.qualifiedNameToString((QualifiedName) value), datatype, null); } else if (value instanceof LangString) { LangString iStr = (LangString) value; String lang = iStr.getLang(); if (lang != null) { // If 'lang' is defined attrValue = typedLiteral(iStr.getValue(), "prov:InternationalizedString", lang); } else { // Otherwise, just return the string attrValue = iStr.getValue(); } } else { attrValue = typedLiteral(value.toString(), datatype, null); } return tuple(attrName, attrValue); } private List<Object[]> convertAttributes(Collection<Attribute> attrs) { List<Object[]> result = new ArrayList<Object[]>(); if (attrs != null) for (Attribute attr : attrs) { result.add(convertAttribute(attr)); } return result; } @Override public Entity newEntity(QualifiedName id, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); JsonProvRecord record = new JsonProvRecord("entity", currentNamespace.qualifiedNameToString(id), attrs); this.currentRecords.add(record); return null; } @Override public Activity newActivity(QualifiedName id, XMLGregorianCalendar startTime, XMLGregorianCalendar endTime, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (startTime != null) { attrs.add(tuple("prov:startTime", startTime.toXMLFormat())); } if (endTime != null) { attrs.add(tuple("prov:endTime", endTime.toXMLFormat())); } JsonProvRecord record = new JsonProvRecord("activity", currentNamespace.qualifiedNameToString(id), attrs); this.currentRecords.add(record); return null; } @Override public Agent newAgent(QualifiedName id, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); JsonProvRecord record = new JsonProvRecord("agent", currentNamespace.qualifiedNameToString(id), attrs); this.currentRecords.add(record); return null; } @Override public Used newUsed(QualifiedName id, QualifiedName activity, QualifiedName entity, XMLGregorianCalendar time, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (activity != null) attrs.add(tuple("prov:activity", currentNamespace.qualifiedNameToString(activity))); if (entity != null) attrs.add(tuple("prov:entity", currentNamespace.qualifiedNameToString(entity))); if (time != null) attrs.add(tuple("prov:time", time.toXMLFormat())); String recordID = (id != null) ? currentNamespace.qualifiedNameToString(id) : getBlankID("u"); JsonProvRecord record = new JsonProvRecord("used", recordID, attrs); this.currentRecords.add(record); return null; } @Override public WasGeneratedBy newWasGeneratedBy(QualifiedName id, QualifiedName entity, QualifiedName activity, XMLGregorianCalendar time, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (entity != null) attrs.add(tuple("prov:entity", currentNamespace.qualifiedNameToString(entity))); if (activity != null) attrs.add(tuple("prov:activity", currentNamespace.qualifiedNameToString(activity))); if (time != null) attrs.add(tuple("prov:time", time.toXMLFormat())); String recordID = (id != null) ? currentNamespace.qualifiedNameToString(id) : getBlankID("wGB"); JsonProvRecord record = new JsonProvRecord("wasGeneratedBy", recordID, attrs); this.currentRecords.add(record); return null; } @Override public WasInvalidatedBy newWasInvalidatedBy(QualifiedName id, QualifiedName entity, QualifiedName activity, XMLGregorianCalendar time, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (entity != null) attrs.add(tuple("prov:entity", currentNamespace.qualifiedNameToString(entity))); if (activity != null) attrs.add(tuple("prov:activity", currentNamespace.qualifiedNameToString(activity))); if (time != null) attrs.add(tuple("prov:time", time.toXMLFormat())); String recordID = (id != null) ? currentNamespace.qualifiedNameToString(id) : getBlankID("wIB"); JsonProvRecord record = new JsonProvRecord("wasInvalidatedBy", recordID, attrs); this.currentRecords.add(record); return null; } @Override public WasStartedBy newWasStartedBy(QualifiedName id, QualifiedName activity, QualifiedName trigger, QualifiedName starter, XMLGregorianCalendar time, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (activity != null) attrs.add(tuple("prov:activity", currentNamespace.qualifiedNameToString(activity))); if (trigger != null) attrs.add(tuple("prov:trigger", currentNamespace.qualifiedNameToString(trigger))); if (starter != null) attrs.add(tuple("prov:starter", currentNamespace.qualifiedNameToString(starter))); if (time != null) attrs.add(tuple("prov:time", time.toXMLFormat())); String recordID = (id != null) ? currentNamespace.qualifiedNameToString(id) : getBlankID("wSB"); JsonProvRecord record = new JsonProvRecord("wasStartedBy", recordID, attrs); this.currentRecords.add(record); return null; } @Override public WasEndedBy newWasEndedBy(QualifiedName id, QualifiedName activity, QualifiedName trigger, QualifiedName ender, XMLGregorianCalendar time, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (activity != null) attrs.add(tuple("prov:activity", currentNamespace.qualifiedNameToString(activity))); if (trigger != null) attrs.add(tuple("prov:trigger", currentNamespace.qualifiedNameToString(trigger))); if (ender != null) attrs.add(tuple("prov:ender", currentNamespace.qualifiedNameToString(ender))); if (time != null) attrs.add(tuple("prov:time", time.toXMLFormat())); String recordID = (id != null) ? currentNamespace.qualifiedNameToString(id) : getBlankID("wEB"); JsonProvRecord record = new JsonProvRecord("wasEndedBy", recordID, attrs); this.currentRecords.add(record); return null; } @Override public WasDerivedFrom newWasDerivedFrom(QualifiedName id, QualifiedName e2, QualifiedName e1, QualifiedName activity, QualifiedName generation, QualifiedName usage, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (e2 != null) attrs.add(tuple("prov:generatedEntity", currentNamespace.qualifiedNameToString(e2))); if (e1 != null) attrs.add(tuple("prov:usedEntity", currentNamespace.qualifiedNameToString(e1))); if (activity != null) attrs.add(tuple("prov:activity", currentNamespace.qualifiedNameToString(activity))); if (generation != null) attrs.add(tuple("prov:generation", currentNamespace.qualifiedNameToString(generation))); if (usage != null) attrs.add(tuple("prov:usage", currentNamespace.qualifiedNameToString(usage))); String recordID = (id != null) ? currentNamespace.qualifiedNameToString(id) : getBlankID("wDF"); JsonProvRecord record = new JsonProvRecord("wasDerivedFrom", recordID, attrs); this.currentRecords.add(record); return null; } @Override public WasAssociatedWith newWasAssociatedWith(QualifiedName id, QualifiedName a, QualifiedName ag, QualifiedName plan, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (a != null) attrs.add(tuple("prov:activity", currentNamespace.qualifiedNameToString(a))); if (ag != null) attrs.add(tuple("prov:agent", currentNamespace.qualifiedNameToString(ag))); if (plan != null) attrs.add(tuple("prov:plan", currentNamespace.qualifiedNameToString(plan))); String recordID = (id != null) ? currentNamespace.qualifiedNameToString(id) : getBlankID("wAW"); JsonProvRecord record = new JsonProvRecord("wasAssociatedWith", recordID, attrs); this.currentRecords.add(record); return null; } @Override public WasAttributedTo newWasAttributedTo(QualifiedName id, QualifiedName e, QualifiedName ag, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (e != null) attrs.add(tuple("prov:entity", currentNamespace.qualifiedNameToString(e))); if (ag != null) attrs.add(tuple("prov:agent", currentNamespace.qualifiedNameToString(ag))); String recordID = (id != null) ? currentNamespace.qualifiedNameToString(id) : getBlankID("wAT"); JsonProvRecord record = new JsonProvRecord("wasAttributedTo", recordID, attrs); this.currentRecords.add(record); return null; } @Override public ActedOnBehalfOf newActedOnBehalfOf(QualifiedName id, QualifiedName ag2, QualifiedName ag1, QualifiedName a, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (ag2 != null) attrs.add(tuple("prov:delegate", currentNamespace.qualifiedNameToString(ag2))); if (ag1 != null) attrs.add(tuple("prov:responsible", currentNamespace.qualifiedNameToString(ag1))); if (a != null) attrs.add(tuple("prov:activity", currentNamespace.qualifiedNameToString(a))); String recordID = (id != null) ? currentNamespace.qualifiedNameToString(id) : getBlankID("aOBO"); JsonProvRecord record = new JsonProvRecord("actedOnBehalfOf", recordID, attrs); this.currentRecords.add(record); return null; } @Override public WasInformedBy newWasInformedBy(QualifiedName id, QualifiedName a2, QualifiedName a1, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (a2 != null) attrs.add(tuple("prov:informed", currentNamespace.qualifiedNameToString(a2))); if (a1 != null) attrs.add(tuple("prov:informant", currentNamespace.qualifiedNameToString(a1))); String recordID = (id != null) ? currentNamespace.qualifiedNameToString(id) : getBlankID("Infm"); JsonProvRecord record = new JsonProvRecord("wasInformedBy", recordID, attrs); this.currentRecords.add(record); return null; } @Override public WasInfluencedBy newWasInfluencedBy(QualifiedName id, QualifiedName a2, QualifiedName a1, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (a2 != null) attrs.add(tuple("prov:influencee", currentNamespace.qualifiedNameToString(a2))); if (a1 != null) attrs.add(tuple("prov:influencer", currentNamespace.qualifiedNameToString(a1))); String recordID = (id != null) ? currentNamespace.qualifiedNameToString(id) : getBlankID("Infl"); JsonProvRecord record = new JsonProvRecord("wasInfluencedBy", recordID, attrs); this.currentRecords.add(record); return null; } @Override public AlternateOf newAlternateOf(QualifiedName e2, QualifiedName e1) { List<Object[]> attrs = new ArrayList<Object[]>(); if (e2 != null) attrs.add(tuple("prov:alternate2", currentNamespace.qualifiedNameToString(e2))); if (e1 != null) attrs.add(tuple("prov:alternate1", currentNamespace.qualifiedNameToString(e1))); String recordID = getBlankID("aO"); JsonProvRecord record = new JsonProvRecord("alternateOf", recordID, attrs); this.currentRecords.add(record); return null; } @Override public SpecializationOf newSpecializationOf(QualifiedName e2, QualifiedName e1) { List<Object[]> attrs = new ArrayList<Object[]>(); if (e2 != null) attrs.add(tuple("prov:specificEntity", currentNamespace.qualifiedNameToString(e2))); if (e1 != null) attrs.add(tuple("prov:generalEntity", currentNamespace.qualifiedNameToString(e1))); String recordID = getBlankID("sO"); JsonProvRecord record = new JsonProvRecord("specializationOf", recordID, attrs); this.currentRecords.add(record); return null; } @Override public MentionOf newMentionOf(QualifiedName e2, QualifiedName e1, QualifiedName b) { List<Object[]> attrs = new ArrayList<Object[]>(); if (e2 != null) attrs.add(tuple("prov:specificEntity", currentNamespace.qualifiedNameToString(e2))); if (e1 != null) attrs.add(tuple("prov:generalEntity", currentNamespace.qualifiedNameToString(e1))); if (b != null) attrs.add(tuple("prov:bundle", currentNamespace.qualifiedNameToString(b))); String recordID = getBlankID("mO"); JsonProvRecord record = new JsonProvRecord("mentionOf", recordID, attrs); this.currentRecords.add(record); return null; } @Override public HadMember newHadMember(QualifiedName c, Collection<QualifiedName> e) { List<Object[]> attrs = new ArrayList<Object[]>(); if (c != null) attrs.add(tuple("prov:collection", currentNamespace.qualifiedNameToString(c))); if (e != null && !e.isEmpty()) { List<String> entityList = new ArrayList<String>(); for (QualifiedName entity : e) entityList.add(currentNamespace.qualifiedNameToString(entity)); attrs.add(tuple("prov:entity", entityList)); } String recordID = getBlankID("hM"); JsonProvRecord record = new JsonProvRecord("hadMember", recordID, attrs); this.currentRecords.add(record); return null; } @Override public Document newDocument(Namespace namespaces, Collection<Statement> statements, Collection<Bundle> bundles) { // This class only collect statements into a structure ready for JSON conversion // No new document will be returned return null; } @Override public Bundle newNamedBundle(QualifiedName id, Namespace namespaces, Collection<Statement> statements) { Object bundle = getJSONStructure(currentRecords, currentNamespace); documentBundles.put(currentNamespace.qualifiedNameToString(id), bundle); // Reset to document-level records and namespaces currentRecords = documentRecords; currentNamespace = documentNamespace; return null; } @Override public void startDocument(Namespace namespace) { documentNamespace = namespace; currentNamespace = documentNamespace; } @Override public void startBundle(QualifiedName bundleId, Namespace namespaces) { currentNamespace = namespaces; // Create a new, separate list of records for the bundle currentRecords = new ArrayList<JsonProvRecord>(); } private Object encodeKeyEntitySet(List<Entry> keyEntitySet) { // Check for the types of keys boolean isAllKeyOfSameDatatype = true; Key firstKey = keyEntitySet.get(0).getKey(); QualifiedName firstKeyClass = firstKey.getType(); if (name.PROV_LANG_STRING.equals(firstKeyClass)) { // LangString cannot be encoded in the compact form isAllKeyOfSameDatatype=false; } if (isAllKeyOfSameDatatype) { for (Entry pair : keyEntitySet) { QualifiedName keyClass = pair.getKey().getType(); if (keyClass != firstKeyClass) { isAllKeyOfSameDatatype = false; break; } } } if (isAllKeyOfSameDatatype) { // encode as a dictionary Map<Object, String> dictionary = new HashMap<Object, String>(); String keyDatatype = currentNamespace.qualifiedNameToString(keyEntitySet.get(0).getKey().getType()); // TODO This does not conform with PROV-JSON !!! dictionary.put("$key-datatype", keyDatatype); for (Entry pair : keyEntitySet) { // String key = convertValueToString(pair.key); String key = convertValueToString(pair.getKey().getValue(), pair.getKey().getConvertedValue()); String entity = currentNamespace.qualifiedNameToString(pair.getEntity()); dictionary.put(key, entity); } return dictionary; } // encode as a generic list of key-value pairs List<Map<String, Object>> values = new ArrayList<Map<String, Object>>( keyEntitySet.size()); for (Entry pair : keyEntitySet) { Object entity = currentNamespace.qualifiedNameToString(pair.getEntity()); Map<String, Object> item = new Hashtable<String, Object>(); item.put("$", entity); item.put("key", convertValue(pair.getKey().getConvertedValue())); values.add(item); } return values; } @Override public DerivedByInsertionFrom newDerivedByInsertionFrom(QualifiedName id, QualifiedName after, QualifiedName before, List<Entry> keyEntitySet, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (after != null) attrs.add(tuple("prov:after", currentNamespace.qualifiedNameToString(after))); if (before != null) attrs.add(tuple("prov:before", currentNamespace.qualifiedNameToString(before))); if (keyEntitySet != null && !keyEntitySet.isEmpty()) { attrs.add(tuple("prov:key-entity-set", encodeKeyEntitySet(keyEntitySet))); } String recordID = (id != null) ? currentNamespace.qualifiedNameToString(id) : getBlankID("dBIF"); JsonProvRecord record = new JsonProvRecord("derivedByInsertionFrom", recordID, attrs); this.currentRecords.add(record); return null; } @Override public DerivedByRemovalFrom newDerivedByRemovalFrom(QualifiedName id, QualifiedName after, QualifiedName before, List<Key> keys, Collection<Attribute> attributes) { List<Object[]> attrs = convertAttributes(attributes); if (after != null) attrs.add(tuple("prov:after", currentNamespace.qualifiedNameToString(after))); if (before != null) attrs.add(tuple("prov:before", currentNamespace.qualifiedNameToString(before))); if (keys != null && !keys.isEmpty()) { List<Object> values = new ArrayList<Object>(keys.size()); for (Key key : keys) { values.add(convertValue(key.getValue())); } attrs.add(tuple("prov:key-set", values)); } String recordID = (id != null) ? currentNamespace.qualifiedNameToString(id) : getBlankID("dBRF"); JsonProvRecord record = new JsonProvRecord("derivedByRemovalFrom", recordID, attrs); this.currentRecords.add(record); return null; } @Override public DictionaryMembership newDictionaryMembership(QualifiedName dict, List<Entry> keyEntitySet) { List<Object[]> attrs = new ArrayList<Object[]>(); if (dict != null) attrs.add(tuple("prov:dictionary", currentNamespace.qualifiedNameToString(dict))); if (keyEntitySet != null && !keyEntitySet.isEmpty()) { attrs.add(tuple("prov:key-entity-set", encodeKeyEntitySet(keyEntitySet))); } String recordID = getBlankID("hDM"); JsonProvRecord record = new JsonProvRecord("hadDictionaryMember", recordID, attrs); this.currentRecords.add(record); return null; } @Override public QualifiedName newQualifiedName(String namespace, String local, String prefix) { return null; } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.testframework; import com.intellij.execution.JavaExecutionUtil; import com.intellij.execution.Location; import com.intellij.execution.actions.ConfigurationContext; import com.intellij.execution.configurations.ConfigurationType; import com.intellij.execution.configurations.ModuleBasedConfiguration; import com.intellij.execution.junit.JavaRunConfigurationProducerBase; import com.intellij.execution.junit2.PsiMemberParameterizedLocation; import com.intellij.execution.junit2.info.MethodLocation; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.actionSystem.LangDataKeys; import com.intellij.openapi.editor.Caret; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Condition; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.search.PsiElementProcessor; import com.intellij.psi.util.ClassUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.containers.ContainerUtil; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; public abstract class AbstractPatternBasedConfigurationProducer<T extends ModuleBasedConfiguration> extends JavaRunConfigurationProducerBase<T> implements Cloneable{ protected abstract boolean isTestClass(PsiClass psiClass); protected abstract boolean isTestMethod(boolean checkAbstract, PsiElement psiElement); public AbstractPatternBasedConfigurationProducer(ConfigurationType configurationType) { super(configurationType); } public Module findModule(ModuleBasedConfiguration configuration, Module contextModule, Set<String> patterns) { return JavaExecutionUtil.findModule(contextModule, patterns, configuration.getProject(), new Condition<PsiClass>() { @Override public boolean value(PsiClass psiClass) { return isTestClass(psiClass); } }); } public boolean isMultipleElementsSelected(ConfigurationContext context) { final DataContext dataContext = context.getDataContext(); if (TestsUIUtil.isMultipleSelectionImpossible(dataContext)) return false; final LinkedHashSet<String> classes = new LinkedHashSet<String>(); final PsiElementProcessor.CollectElementsWithLimit<PsiElement> processor = new PsiElementProcessor.CollectElementsWithLimit<PsiElement>(2); final PsiElement[] locationElements = collectLocationElements(classes, dataContext); if (locationElements != null) { collectTestMembers(locationElements, false, false, processor); } else { collectContextElements(dataContext, false, false, classes, processor); } return processor.getCollection().size() > 1; } public boolean isConfiguredFromContext(ConfigurationContext context, Set<String> patterns) { final LinkedHashSet<String> classes = new LinkedHashSet<String>(); final DataContext dataContext = context.getDataContext(); if (TestsUIUtil.isMultipleSelectionImpossible(dataContext)) { return false; } final PsiElement[] locationElements = collectLocationElements(classes, dataContext); if (locationElements == null) { collectContextElements(dataContext, true, false, classes, new PsiElementProcessor.CollectElements<PsiElement>()); } if (Comparing.equal(classes, patterns)) { if (patterns.size() == 1) { final String pattern = patterns.iterator().next(); if (!pattern.contains(",")) { final PsiMethod method = PsiTreeUtil.getParentOfType(CommonDataKeys.PSI_ELEMENT.getData(dataContext), PsiMethod.class); return method != null && isTestMethod(false, method); } } return true; } return false; } public PsiElement checkPatterns(ConfigurationContext context, LinkedHashSet<String> classes) { PsiElement[] result; final DataContext dataContext = context.getDataContext(); if (TestsUIUtil.isMultipleSelectionImpossible(dataContext)) { return null; } final PsiElement[] locationElements = collectLocationElements(classes, dataContext); PsiElementProcessor.CollectElements<PsiElement> processor = new PsiElementProcessor.CollectElements<PsiElement>(); if (locationElements != null) { collectTestMembers(locationElements, false, true, processor); result = processor.toArray(); } else if (collectContextElements(dataContext, true, true, classes, processor)) { result = processor.toArray(); } else { return null; } if (result.length <= 1) { return null; } return result[0]; } public void collectTestMembers(PsiElement[] psiElements, boolean checkAbstract, boolean checkIsTest, PsiElementProcessor.CollectElements<PsiElement> collectingProcessor) { for (PsiElement psiElement : psiElements) { if (psiElement instanceof PsiClassOwner) { final PsiClass[] classes = ((PsiClassOwner)psiElement).getClasses(); for (PsiClass aClass : classes) { if ((!checkIsTest && aClass.hasModifierProperty(PsiModifier.PUBLIC) || checkIsTest && isTestClass(aClass)) && !collectingProcessor.execute(aClass)) { return; } } } else if (psiElement instanceof PsiClass) { if ((!checkIsTest && ((PsiClass)psiElement).hasModifierProperty(PsiModifier.PUBLIC) || checkIsTest && isTestClass((PsiClass)psiElement)) && !collectingProcessor.execute(psiElement)) { return; } } else if (psiElement instanceof PsiMethod) { if (checkIsTest && isTestMethod(checkAbstract, psiElement) && !collectingProcessor.execute(psiElement)) { return; } if (!checkIsTest) { final PsiClass containingClass = ((PsiMethod)psiElement).getContainingClass(); if (containingClass != null && containingClass.hasModifierProperty(PsiModifier.PUBLIC) && !collectingProcessor.execute(psiElement)) { return; } } } else if (psiElement instanceof PsiDirectory) { final PsiPackage aPackage = JavaDirectoryService.getInstance().getPackage((PsiDirectory)psiElement); if (aPackage != null && !collectingProcessor.execute(aPackage)) { return; } } } } private boolean collectContextElements(DataContext dataContext, boolean checkAbstract, boolean checkIsTest, LinkedHashSet<String> classes, PsiElementProcessor.CollectElements<PsiElement> processor) { PsiElement[] elements = LangDataKeys.PSI_ELEMENT_ARRAY.getData(dataContext); if (elements != null) { return collectTestMembers(elements, checkAbstract, checkIsTest, processor, classes); } else { final Editor editor = CommonDataKeys.EDITOR.getData(dataContext); if (editor != null) { final List<Caret> allCarets = editor.getCaretModel().getAllCarets(); if (allCarets.size() > 1) { final PsiFile editorFile = CommonDataKeys.PSI_FILE.getData(dataContext); if (editorFile != null) { final Set<PsiMethod> methods = new LinkedHashSet<PsiMethod>(); for (Caret caret : allCarets) { ContainerUtil.addIfNotNull(methods, PsiTreeUtil.getParentOfType(editorFile.findElementAt(caret.getOffset()), PsiMethod.class)); } if (!methods.isEmpty()) { return collectTestMembers(methods.toArray(new PsiElement[0]), checkAbstract, checkIsTest, processor, classes); } } } } final PsiElement element = CommonDataKeys.PSI_ELEMENT.getData(dataContext); final VirtualFile[] files = CommonDataKeys.VIRTUAL_FILE_ARRAY.getData(dataContext); if (files != null) { Project project = CommonDataKeys.PROJECT.getData(dataContext); if (project != null) { final PsiManager psiManager = PsiManager.getInstance(project); for (VirtualFile file : files) { final PsiFile psiFile = psiManager.findFile(file); if (psiFile instanceof PsiClassOwner) { PsiClass[] psiClasses = ((PsiClassOwner)psiFile).getClasses(); if (element != null && psiClasses.length > 0) { for (PsiClass aClass : psiClasses) { if (PsiTreeUtil.isAncestor(aClass, element, false)) { psiClasses = new PsiClass[] {aClass}; break; } } } collectTestMembers(psiClasses, checkAbstract, checkIsTest, processor); for (PsiElement psiMember : processor.getCollection()) { classes.add(((PsiClass)psiMember).getQualifiedName()); } } } return true; } } } return false; } private boolean collectTestMembers(PsiElement[] elements, boolean checkAbstract, boolean checkIsTest, PsiElementProcessor.CollectElements<PsiElement> processor, LinkedHashSet<String> classes) { collectTestMembers(elements, checkAbstract, checkIsTest, processor); for (PsiElement psiClass : processor.getCollection()) { classes.add(getQName(psiClass)); } return classes.size() > 1; } private static PsiElement[] collectLocationElements(LinkedHashSet<String> classes, DataContext dataContext) { final Location<?>[] locations = Location.DATA_KEYS.getData(dataContext); if (locations != null) { List<PsiElement> elements = new ArrayList<PsiElement>(); for (Location<?> location : locations) { final PsiElement psiElement = location.getPsiElement(); classes.add(getQName(psiElement, location)); elements.add(psiElement); } return elements.toArray(new PsiElement[elements.size()]); } return null; } public static String getQName(PsiElement psiMember) { return getQName(psiMember, null); } public static String getQName(PsiElement psiMember, Location location) { if (psiMember instanceof PsiClass) { return ClassUtil.getJVMClassName((PsiClass)psiMember); } else if (psiMember instanceof PsiMember) { final PsiClass containingClass = location instanceof MethodLocation ? ((MethodLocation)location).getContainingClass() : location instanceof PsiMemberParameterizedLocation ? ((PsiMemberParameterizedLocation)location).getContainingClass() : ((PsiMember)psiMember).getContainingClass(); assert containingClass != null; return ClassUtil.getJVMClassName(containingClass) + "," + ((PsiMember)psiMember).getName(); } else if (psiMember instanceof PsiPackage) { return ((PsiPackage)psiMember).getQualifiedName(); } assert false; return null; } }
/** * Helios, OpenSource Monitoring * Brought to you by the Helios Development Group * * Copyright 2015, Helios Development Group and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. * */ package com.heliosapm.tsdblite.metric; import io.netty.channel.Channel; import java.util.HashSet; import java.util.Map; import java.util.Set; import javax.management.MBeanServer; import javax.management.MBeanServerDelegate; import javax.management.MBeanServerNotification; import javax.management.Notification; import javax.management.NotificationListener; import javax.management.ObjectName; import org.cliffc.high_scale_lib.NonBlockingHashMap; import org.cliffc.high_scale_lib.NonBlockingHashMapLong; import org.cliffc.high_scale_lib.NonBlockingHashSet; import com.heliosapm.tsdblite.events.Event; import com.heliosapm.tsdblite.jmx.Util; import com.heliosapm.tsdblite.metric.AppMetric.SubNotif; import com.heliosapm.utils.collections.FluentMap; import com.heliosapm.utils.jmx.JMXHelper; /** * <p>Title: MetricSubscription</p> * <p>Description: Represents a subscription to submission events for one metric (ObjectName) pattern</p> * <p>Company: Helios Development Group LLC</p> * @author Whitehead (nwhitehead AT heliosdev DOT org) * <p><code>com.heliosapm.tsdblite.metric.MetricSubscription</code></p> */ public class MetricSubscription implements NotificationListener { /** The pattern of the metric subscribed to */ protected final ObjectName pattern; /** A set of the subscribed channels */ protected final NonBlockingHashMap<String, Channel> subscribedChannels; /** A set of the known actual ObjectNames */ protected final NonBlockingHashSet<ObjectName> objectNames; /** A ref to the Metric MBeanServer */ protected final MBeanServer metricServer; /** The long hash code of the pattern */ protected final long key; /** A MetricSubscription place holder */ protected static final MetricSubscription PLACEHOLDER = new MetricSubscription(); /** A map of all MetricSubscriptions keyed by the long hash code of the ObjectName pattern */ protected static final NonBlockingHashMapLong<MetricSubscription> subscriptions = new NonBlockingHashMapLong<MetricSubscription>(128, false); /** Callback invoked when a new matching metric is registered */ protected final NotificationListener onNew = new NotificationListener(){ @Override public void handleNotification(final Notification notification, final Object handback) { final ObjectName on = ((MBeanServerNotification)notification).getMBeanName(); if(objectNames.add(on)) { JMXHelper.addNotificationListener(metricServer, on, this, null, null); final Map<String, Object> data = FluentMap .newMap(String.class, Object.class) .fput(Event.KEY, Event.NEWMETRIC.code) .fput(Event.DATA, new String[]{on.toString()}).map(); for(Channel channel: subscribedChannels.values()) { channel.writeAndFlush(data); } } } }; /** Callback invoked when a subscribed matching metric is unregistered */ protected final NotificationListener onUnReg = new NotificationListener(){ @Override public void handleNotification(final Notification notification, final Object handback) { final ObjectName on = ((MBeanServerNotification)notification).getMBeanName(); if(objectNames.remove(on)) { final Map<String, Object> data = FluentMap .newMap(String.class, Object.class) .fput(Event.KEY, Event.RETIREDMETRIC.code) .fput(Event.DATA, new String[]{on.toString()}).map(); for(Channel channel: subscribedChannels.values()) { channel.writeAndFlush(data); } } } }; /** * Retrieves the MetricSubscription for the passed pattern * @param pattern The pattern to subscribe to * @return the MetricSubscription for the passed pattern */ public static MetricSubscription get(final ObjectName pattern) { if(pattern==null) throw new IllegalArgumentException("The passed pattern was null"); final long key = Util.hashCode(pattern); MetricSubscription sub = subscriptions.putIfAbsent(key, PLACEHOLDER); if(sub==null || sub==PLACEHOLDER) { sub = new MetricSubscription(pattern, key); subscriptions.replace(key, sub); } return sub; } private MetricSubscription(final ObjectName pattern, final long key) { this.pattern = pattern; subscribedChannels = new NonBlockingHashMap<String, Channel>(); this.key = key; objectNames = new NonBlockingHashSet<ObjectName>(); metricServer = MetricCache.getInstance().getMetricServer(); JMXHelper.addMBeanRegistrationListener(metricServer, this.pattern, onNew, 0); JMXHelper.addMBeanUnregistrationListener(metricServer, this.pattern, onUnReg, 0); for(ObjectName on: metricServer.queryNames(pattern, null)) { if(objectNames.add(on)) { JMXHelper.addNotificationListener(metricServer, on, this, null, null); } } } private MetricSubscription() { pattern = null; subscribedChannels = null; key = 0; objectNames = null; metricServer = null; } /** * Adds a channel to this subscription * @param channel the channel to add */ public void subscribe(final Channel channel) { if(channel==null) throw new IllegalArgumentException("The passed channel was null"); if(subscribedChannels.putIfAbsent(channel.id().asLongText(), channel)==null) { final Set<String> initial = new HashSet<String>(objectNames.size()); for(ObjectName on: objectNames) { initial.add(on.toString()); } channel.writeAndFlush(FluentMap.newMap(String.class, Object.class).fput(Event.KEY, Event.NEWSUBMISSION.code).fput(Event.DATA, initial)); } } /** * Removes a channel from this subscription * @param channel the channel to remove */ public void remove(final Channel channel) { if(channel==null) throw new IllegalArgumentException("The passed channel was null"); subscribedChannels.remove(channel.id().asLongText()); if(subscribedChannels.isEmpty()) { subscriptions.remove(key); try { metricServer.removeNotificationListener(MBeanServerDelegate.DELEGATE_NAME, onNew); } catch (Exception x) {/* No Op */} try { metricServer.removeNotificationListener(MBeanServerDelegate.DELEGATE_NAME, onUnReg); } catch (Exception x) {/* No Op */} } } /** * {@inheritDoc} * @see javax.management.NotificationListener#handleNotification(javax.management.Notification, java.lang.Object) */ @Override public void handleNotification(final Notification notification, final Object handback) { final Map<String, Object> data = FluentMap .newMap(String.class, Object.class) .fput(Event.KEY, Event.NEWSUBMISSION.code) .fput(Event.DATA, notification.getUserData()).map(); for(Channel channel: subscribedChannels.values()) { channel.writeAndFlush(data); } } /** * {@inheritDoc} * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (int) (key ^ (key >>> 32)); return result; } /** * {@inheritDoc} * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (!(obj instanceof MetricSubscription)) return false; MetricSubscription other = (MetricSubscription) obj; if (key != other.key) return false; return true; } }
package i5.las2peer.httpConnector.client; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.StringWriter; import java.io.Writer; import java.lang.reflect.Constructor; import java.net.HttpURLConnection; import java.net.URL; import java.util.regex.Matcher; import java.util.regex.Pattern; import i5.las2peer.httpConnector.coder.InvalidCodingException; import i5.las2peer.httpConnector.coder.ParamCoder; import i5.las2peer.httpConnector.coder.ParamDecoder; /** * The connector client is the basic class for accessing a remote LAS2peer server via the http connector within any java * application. * */ public class Client { public static final int DEFAULT_PORT = 8080; public static final long DEFAULT_TIMEOUT_MS = 60 * 60 * 10; // 10 minutes public static final long DEFAULT_OUTDATE_S = 60 * 60 * 24; // 1 day public static final int WAIT_INTERVAL = 200; public static final int MAX_WAIT = 10 * 1000; public static final String DEFAULT_CODER = "i5.las2peer.httpConnector.coder.XmlCoder"; public static final String DEFAULT_DECODER = "i5.las2peer.httpConnector.coder.XmlDecoder"; private String coderClass = DEFAULT_CODER; private String decoderClass = DEFAULT_DECODER; private String sHost; private int iPort = DEFAULT_PORT; private String sUser = null; private String sPasswd = null; private long lTimeOutMs = DEFAULT_TIMEOUT_MS; private boolean bConnected = false; private String sSessionId = null; // private boolean bUsePersistent = false; private long lOutdateS = DEFAULT_OUTDATE_S; private boolean bTryPersistent = false; private boolean bIsPersistent = false; private boolean bUseHttps = false; /** * Constructor * * @param host target host name * */ public Client(String host) { sHost = host; } /** * Constructor * * @param host target host name * @param port http connector port on the target host * */ public Client(String host, int port) { sHost = host; iPort = port; } /** * Constructor * * @param host target host name * @param port http connector port on the target host * @param user login name * @param passwd password * */ public Client(String host, int port, String user, String passwd) { this(host, port); sUser = user; sPasswd = passwd; } /** * Constructor for reataching to a remote session * * @param host a String * @param port an int * @param user a String * @param passwd a String * @param session a String * */ public Client(String host, int port, String user, String passwd, String session) { this(host, port, user, passwd); this.sSessionId = session; this.bTryPersistent = true; } /** * Constructor * * @param host target host name * @param port http connector port on the target host * @param timeout timeout of the generated session in milliseconds * */ public Client(String host, int port, long timeout) { sHost = host; iPort = port; lTimeOutMs = timeout; } /** * Constructor * * @param host target host name * @param timeout timeout of the generated session in milliseconds * */ public Client(String host, long timeout) { sHost = host; lTimeOutMs = timeout; } /** * Constructor * * @param host target host name * @param port http connector port on the target host * @param timeout timeout of the generated session in milliseconds * @param user login name * @param passwd password * */ public Client(String host, int port, long timeout, String user, String passwd) { sHost = host; iPort = port; lTimeOutMs = timeout; sUser = user; sPasswd = passwd; } /** * set the password for the current user * * @param passwd new password to use * * @exception ConnectorClientException client is currently connected * */ public void setPasswd(String passwd) throws ConnectorClientException { if (bConnected) throw new ConnectorClientException("Don't change the client setting during a session!"); sPasswd = passwd; } /** * * @return name of the LAS2peer user * */ public String getUser() { return sUser; } /** * set the user login name for connecting to the remote las server * * @param user a String * * @exception ConnectorClientException client is currently connected * */ public void setUser(String user) throws ConnectorClientException { if (bConnected) throw new ConnectorClientException("Don't change the client setting during a session!"); sUser = user; } /** * * @return name or ip of the las server host * */ public String getHost() { return sHost; } /** * Set the target host of the las server to connect to * * @param host new hostname * * @exception ConnectorClientException client is currently connected * */ public void setHost(String host) throws ConnectorClientException { if (bConnected) throw new ConnectorClientException("Don't change the client setting during a session!"); sHost = host; } /** * * @return the number of the used port at the las server * */ public int getPort() { return iPort; } /** * Set the port of the connector * * @param port new port number * * @exception ConnectorClientException client is currently connected * */ public void setPort(int port) throws ConnectorClientException { if (bConnected) throw new ConnectorClientException("Don't change the client setting during a session!"); iPort = port; } /** * set the flag if the client is to try to open persistent sessions * * @param tryP a boolean * * @exception ConnectorClientException client is currently connected * */ public void setTryPersistent(boolean tryP) throws ConnectorClientException { if (bConnected) throw new ConnectorClientException("Don't change the client setting during a session!"); bTryPersistent = tryP; } /** * returns of the client tries to open persistent sessions * * @return a boolean * */ public boolean getTryPersistent() { return bTryPersistent; } /** * returns true if the client is connected and the session is persistent * * @return a boolean * */ public boolean isPersistent() { return bConnected && bIsPersistent; } /** * return the (tried or real) session timeout in ms * * @return a long * */ public long getSessionTimeout() { return lTimeOutMs; } /** * Set the session timeout value to be requested on the next connection opening * * @param time timespan in ms * * @exception ConnectorClientException client is currently connected * */ public void setSessionTimeout(long time) throws ConnectorClientException { if (bConnected) throw new ConnectorClientException("Don't change the client setting during a session!"); lTimeOutMs = time; } /** * return the (tried or real) session outdate time in s * * @return a long * */ public long getSessionOutdate() { return lOutdateS; } /** * set the outedate time to use for opening new sessions * * @param outdate timespan in s * * @exception ConnectorClientException client is currently connected * */ public void setSessionOutdate(long outdate) throws ConnectorClientException { if (bConnected) throw new ConnectorClientException("Don't change the client setting during a session!"); lOutdateS = outdate; } /** * returns if the client uses an https connection to the remote las * * @return a boolean * */ public boolean isUsingHttps() { return bUseHttps; } /** * change the setting for the usage of the https protocol * * @param use a boolean * * @exception ConnectorClientException * */ public void setUseHttps(boolean use) throws ConnectorClientException { if (bConnected) throw new ConnectorClientException("Don't change client settings during a session!"); bUseHttps = use; } /** * Tries to connect to the LAS2peer server with the current connection data. * * This method will be implicitly called on an attempt to use a not existing connection. * * @exception AuthenticationFailedException * @exception UnableToConnectException * */ public void connect() throws AuthenticationFailedException, UnableToConnectException { if (bConnected) return; if (sSessionId != null) reattachToSession(); else createSession(); } /** * Tries to open a new session with the current connection data * * @exception UnableToConnectException * */ private void createSession() throws UnableToConnectException, AuthenticationFailedException { if (bConnected) return; bIsPersistent = false; String sProtocol = (bUseHttps) ? "https://" : "http://"; String sTimeout = "timeout=" + lTimeOutMs; try { String sUrl = ""; if (sUser == null) sUrl = sProtocol + sHost + ":" + iPort + "/createsession?" + sTimeout; else sUrl = sProtocol + sHost + ":" + iPort + "/createsession?user=" + sUser + "&passwd=" + sPasswd + "&" + sTimeout; if (bTryPersistent) { if (sUser == null) sUrl += "?"; else sUrl += "&"; sUrl += "persistent=1&outdate=" + lOutdateS; } URL url = new URL(sUrl); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); int length = conn.getContentLength(); if (conn.getResponseCode() == HttpURLConnection.HTTP_UNAUTHORIZED) throw new AuthenticationFailedException(); String type = conn.getContentType(); if (!"text/xml".equals(type)) throw new UnableToConnectException("Invalid Server answer: " + type + " - is this a LAS2peer server?"); String content = readHttpContent((InputStream) conn.getContent(), length); try { interpretSessionContent(content); } catch (InvalidServerAnswerException e) { throw new UnableToConnectException("Problems interpreting server response to create session request!", e); } bConnected = true; } catch (IOException e) { throw new UnableToConnectException(e); } catch (UnableToConnectException e) { if (bTryPersistent) { // try a non-persistent session! bTryPersistent = false; createSession(); } else { throw e; } } } /** * tries to reattach to an existing (persitent) session using the current connection data * */ private void reattachToSession() throws UnableToConnectException { if (bConnected) return; String sProtocol = (bUseHttps) ? "https://" : "http://"; if (sUser == null || sPasswd == null) throw new UnableToConnectException("No user / Password given for reattaching"); if (sSessionId == null) throw new UnableToConnectException("No session id given for reattaching"); try { URL url = new URL(sProtocol + sHost + ":" + iPort + "/attachsession?user=" + sUser + "&passwd=" + sPasswd + "&SESSION=" + sSessionId); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) throw new UnableToConnectException( "Unable to connect to remote session - response code: " + conn.getResponseCode()); bConnected = true; bIsPersistent = true; } catch (IOException e) { throw new UnableToConnectException("IOException during connection attempt!", e); } } /** * disconnects an open connection * * @exception InvalidServerAnswerException * @exception UnableToConnectException * */ public void disconnect() throws InvalidServerAnswerException, UnableToConnectException { if (!bConnected) return; String sProtocol = (bUseHttps) ? "https://" : "http://"; try { URL url = new URL(sProtocol + sHost + ":" + iPort + "/closesession?SESSION=" + sSessionId); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); // int length = conn.getContentLength(); if (conn.getResponseCode() == HttpURLConnection.HTTP_PRECON_FAILED) { // session expired } else if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) { throw new InvalidServerAnswerException("Returncode from server: " + conn.getResponseCode()); } bConnected = false; sSessionId = null; } catch (IOException e) { bConnected = false; throw new UnableToConnectException(e); } } /** * tries to detach from the current session * * @exception InvalidServerAnswerException * @exception ConnectorClientException * */ public void detach() throws InvalidServerAnswerException, ConnectorClientException { if (!bConnected) return; String sProtocol = (bUseHttps) ? "https://" : "http://"; try { URL url = new URL(sProtocol + sHost + ":" + iPort + "/detachsession?SESSION=" + sSessionId); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) throw new ConnectorClientException("Unable to detach - response code: " + conn.getResponseCode()); else bConnected = false; } catch (IOException e) { throw new ConnectorClientException("Unable to detach - IOException in connection", e); } } /** * reads the content of an http answer into a resulting string the length of the expected content if given by the * length parameter * * @param content an InputStream * @param length an int * * @return a String * */ private String readHttpContent(InputStream content, int length) throws UnableToConnectException, IOException { int iWait = 0; while (content.available() < length) { iWait += WAIT_INTERVAL; try { Thread.sleep(WAIT_INTERVAL); } catch (InterruptedException e) { } if (iWait >= MAX_WAIT) throw new UnableToConnectException("Timeout at " + iWait + " milliseconds!"); } InputStreamReader isr = new InputStreamReader(content); char[] con = new char[length]; isr.read(con); String sContent = new String(con); return sContent; } /** * Set the code class to be used for encoding message parameters * * @param className a String * */ public void setCoderClass(String className) { coderClass = className; } /** * returns the currently used coder class * * @return a String * */ public String getCoder() { return coderClass; } /** * returns if the client is currently connected * * @return a boolean * */ public boolean isConnected() { return isConnected(false); } /** * returns if the client is currently connected * * depending on the tryTouch parameter a touchSession is invoked before returning the connective flag * * @param tryTouch a boolean * * @return a boolean * */ public boolean isConnected(boolean tryTouch) { try { if (tryTouch) touchSession(); } catch (ConnectorClientException e) { bConnected = false; return false; } return bConnected; } /** * returns the id of the currently used session at the server * * @return a String * */ public String getSessionId() { return sSessionId; } /** * returns the timeout in milliseconds of the currently open session. * * @return a long * */ public long getTimeoutMs() { return lTimeOutMs; } /** * Invokes a service method at the server. If not connected a connections attempt will be performed. The result of * the call will be returned as an object. * * * @param service a String * @param method a String * @param params an Object[] * * @return an Object * * @exception UnableToConnectException * @exception AuthenticationFailedException * @exception TimeoutException * @exception ParameterTypeNotImplementedException * @exception ServerErrorException * @exception AccessDeniedException * @exception NotFoundException * @exception ConnectorClientException * */ public Object invoke(String service, String method, Object... params) throws UnableToConnectException, AuthenticationFailedException, TimeoutException, ServerErrorException, AccessDeniedException, NotFoundException, ConnectorClientException { if (!bConnected) connect(); try { URL url = new URL((bUseHttps) ? "https" : "http", sHost, iPort, "/" + service + "/" + method + "?SESSION=" + sSessionId); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestProperty("Content-Type", "text/xml"); if (params != null && params.length > 0) { String paramCode = getParameterCoding(params); // ok, code the parameters into a post call connection.setDoOutput(true); OutputStreamWriter osw = new OutputStreamWriter(connection.getOutputStream()); osw.write(paramCode); osw.flush(); osw.close(); } int responseCode = connection.getResponseCode(); if (responseCode == HttpURLConnection.HTTP_FORBIDDEN) throw new AccessDeniedException(); if (responseCode == HttpURLConnection.HTTP_NOT_FOUND // method unavailable || responseCode == 503) // service unavailable throw new NotFoundException(); if (responseCode == HttpURLConnection.HTTP_INTERNAL_ERROR) { String mess = "Remote Exception during invocation"; try { if ("text/xml".equals(connection.getContentType())) { // we have a object array response describing the exception Object[] result = (Object[]) interpretInvocationResult( (InputStream) connection.getErrorStream()); throw new ServerErrorException((Exception) result[3]); } else // simple text message (to stay compatible to older versions of the connector mess = readHttpContent((InputStream) connection.getErrorStream(), connection.getContentLength()); } catch (ServerErrorException e) { throw e; } catch (Exception e) { e.printStackTrace(); mess += "Unable to create cause Exception: " + e; } throw new ServerErrorException(mess); } if (responseCode == HttpURLConnection.HTTP_PRECON_FAILED) throw new TimeoutException(); if (responseCode == HttpURLConnection.HTTP_NO_CONTENT) return null; if (responseCode == HttpURLConnection.HTTP_NOT_ACCEPTABLE) throw new ConnectorClientException("The server could no read the invocation parameters!"); if (responseCode == HttpURLConnection.HTTP_NOT_IMPLEMENTED) { // String mess = ""; //readHttpContent( (InputStream) connection.getContent(), // connection.getContentLength() ); throw new ReturnTypeNotImplementedException(); } String type = connection.getContentType(); // int length = connection.getContentLength(); if (!"text/xml".equals(type)) throw new ConnectorClientException( "Problems to interpret the server's answer - content type not text/xml!"); Object result = interpretInvocationResult((InputStream) connection.getContent()); return result; } catch (IOException e) { bConnected = false; throw new UnableToConnectException("IOException with the connection!", e); } } /** * writes an encoding of the object parameter array to the outputStream * * @param params an Object[] * * @exception ParameterTypeNotImplementedException * @exception IOException * @exception ConnectorClientException * */ public String getParameterCoding(Object[] params) throws ParameterTypeNotImplementedException, IOException, ConnectorClientException { try { ParamCoder coder = null; StringWriter sw = new StringWriter(); try { @SuppressWarnings("rawtypes") Constructor constr = Class.forName(coderClass).getConstructor(new Class[] { Writer.class }); coder = (ParamCoder) constr.newInstance(new Object[] { sw }); } catch (Exception e) { throw new ConnectorClientException("Unable to loadecoader!", e); } coder.header(params.length); for (int i = 0; i < params.length; i++) { coder.write(params[i]); } coder.footer(); return sw.toString(); } catch (i5.las2peer.httpConnector.coder.ParameterTypeNotImplementedException e) { throw new ParameterTypeNotImplementedException( "One or more of the invocation parameters could not be coded for transfer!", e); } } /** * tries to touch the session at the server * * @exception ConnectorClientException * */ public void touchSession() throws ConnectorClientException { if (!bConnected) return; try { URL url = new URL(bUseHttps ? "https" : "http", sHost, iPort, "/touchsession?SESSION=" + sSessionId); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); // int length = conn.getContentLength(); int response = conn.getResponseCode(); if (response == 401) { // 401 = UNAUTHORIZED throw new ConnectorClientException("The Session I tried to access does not exist!!"); } else if (response != HttpURLConnection.HTTP_NO_CONTENT) { throw new ConnectorClientException("Unkown Answer!"); } // ok, touch was successfull } catch (IOException e) { bConnected = false; throw new ConnectorClientException("I/O-Exception in the http connection!", e); } } /** * interprets the content of a open session request an sets the attributes of the connection * * @param content a String * * @exception InvalidServerAnswerException * */ private void interpretSessionContent(String content) throws InvalidServerAnswerException { String[] lines = content.split("\\s*\r?\n\\s*"); if (!lines[0].matches("<\\?xml\\s+version=\"1.0\"\\s*\\?>")) throw new InvalidServerAnswerException("answer is not xml conform ( lacking header )"); if (lines[1].matches("<session persistent=\"true\">")) bIsPersistent = true; else if (!lines[1].trim().matches("<session>")) throw new InvalidServerAnswerException( "answer has not the expected root node (<session>)" + lines[1] + "..." + content); Matcher m = Pattern.compile("<id>([^>]+)</id>").matcher(lines[2]); if (m.matches()) { sSessionId = m.group(1); } else throw new InvalidServerAnswerException("first element of session is not the id!"); m = Pattern.compile("<timeout>([0-9]+)</timeout>").matcher(lines[3]); if (m.matches()) { lTimeOutMs = Long.valueOf(m.group(1)).longValue(); } else throw new InvalidServerAnswerException("Second element of session is not the timeout!"); m = Pattern.compile("<outdate>([0-9]+)</outdate>").matcher(lines[4]); if (m.matches()) { lOutdateS = Long.valueOf(m.group(1)).longValue(); } } /** * tries to interpret the content of the urlConnections (given as InputStream) either as a single object or an * array. * * @param content an InputStream * * @return an Object * * @exception ConnectorClientException * */ private Object interpretInvocationResult(InputStream content) throws ConnectorClientException { ParamDecoder decoder = null; try { @SuppressWarnings("rawtypes") Constructor constr = Class.forName(decoderClass).getConstructor(new Class[] { InputStream.class }); decoder = (ParamDecoder) constr.newInstance(new Object[] { content }); } catch (Exception e) { throw new ConnectorClientException("Unable to instanciate decoder class " + decoderClass + "!", e); } Object result = null; try { int count = decoder.checkHeader(); if (count != 1) result = decoder.decodeArray(); else result = decoder.decodeSingle(); decoder.checkFooter(); } catch (IOException e) { throw new ConnectorClientException("Error with the connections input stream", e); } catch (InvalidCodingException e) { throw new ConnectorClientException("Response of the server is not interpretable as an Object!", e); } return result; } }
/** * Copyright 2014 isandlaTech * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.psem2m.isolates.ui.admin.panels; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.swing.table.AbstractTableModel; /** * @author ogattaz * * @param <T> */ class CEntityBean<T> { private T pEntity; /** * @param aEntity */ public CEntityBean(final T aEntity) { super(); pEntity = aEntity; } /** * @return */ public T getEntity() { return pEntity; } /** * @param entity */ public void setEntity(final T entity) { pEntity = entity; } } /** * @author ogattaz * */ public abstract class CTableModel<T> extends AbstractTableModel { private static final long serialVersionUID = -3434843033520198107L; private final int pColKeyIdx; private final List<String[]> pList = new ArrayList<String[]>(); private final Map<String, CEntityBean<T>> pMap = new HashMap<String, CEntityBean<T>>(); private final CJPanelTable<T> pPanel; private final String[] pTitles; /** * @param aNbCol * @param aColKeyIdx */ public CTableModel(final CJPanelTable<T> aPanel, final String[] aTitles, final int aColKeyIdx) { super(); pPanel = aPanel; pTitles = aTitles; pColKeyIdx = aColKeyIdx; traceDebug("%15s| ColKeyIdx=[%d], Titles=[%s]", "<init>", pColKeyIdx, Arrays.asList(pTitles)); } /** * @param aEntity * @return */ private int addEntity(final T aEntity) { final String[] wData = pPanel.buildRowData(aEntity); if (pPanel.acceptRow(aEntity, wData)) { pList.add(wData); final String wKey = getRowKey(wData); pMap.put(wKey, buildEntityBean(aEntity)); traceDebug("%15s| adds row=[%s]", "addEntity", wKey); return pList.size() - 1; } return -1; } /** * @param aEntity */ public synchronized boolean addRow(final T aEntity) { if (aEntity == null) { return false; } traceDebug("%15s| NbExistingRows=[%d], NbRowsToAdd=[%d]", "addRow", pList.size(), 1); int wFirstRow = -1; int wLastRow = -1; synchronized (this) { wFirstRow = pList.size(); wLastRow = addEntity(aEntity); } // Notifies all listeners that rows in the range [firstRow, lastRow], // inclusive, have been inserted. if (wLastRow > -1) { traceDebug( "%15s| fireTableRowsInserted FirstRow=[%d], wLastRow=[%d]", "addRow", wFirstRow, wLastRow); fireTableRowsInserted(wFirstRow, wLastRow); return true; } return false; } /** * @param aEntities */ public synchronized boolean addRows(final T[] aEntities) { if (aEntities == null || aEntities.length == 0) { return false; } traceDebug("%15s| NbExistingRows=[%d], NbRowsToAdd=[%d]", "addRows", pList.size(), aEntities.length); int wFirstRow = -1; int wLastRow = -1; int wNbAdded = 0; synchronized (this) { wFirstRow = pList.size(); for (final T wEntity : aEntities) { final int wNewRowIdx = addEntity(wEntity); if (wNewRowIdx != -1) { wLastRow = wNewRowIdx; wNbAdded++; } } } // Notifies all listeners that rows in the range [firstRow, lastRow], // inclusive, have been inserted. if (wLastRow > -1) { traceDebug( "%15s| fireTableRowsInserted FirstRow=[%d], wLastRow=[%d] wNbAdded=[%d]", "addRows", wFirstRow, wLastRow, wNbAdded); fireTableRowsInserted(wFirstRow, wLastRow); return true; } return false; } /** * @param aEntity * @return */ CEntityBean<T> buildEntityBean(final T aEntity) { return new CEntityBean<T>(aEntity); } /** * @param aRowIdx * @return */ String buildTextInfos(final int aRowIdx) { if (aRowIdx < 0 || aRowIdx > getRowCount() - 1) { return String.format("OutBound row index ! RowIdx=[%d] max=[%d]", aRowIdx, getRowCount() - 1); } return pPanel.buildTextInfos(getEntity(aRowIdx)); } /** * */ public synchronized void destroy() { pMap.clear(); pList.clear(); } /* * (non-Javadoc) * * @see javax.swing.table.TableModel#getColumnCount() */ @Override public int getColumnCount() { return pTitles.length; } /* * (non-Javadoc) * * @see javax.swing.table.AbstractTableModel#getColumnName(int) */ @Override public String getColumnName(final int aColIdx) { return pTitles[aColIdx]; } /** * @param aRowIdx * @return */ T getEntity(final int aRowIdx) { return getEntityBean(aRowIdx).getEntity(); } /** * @param aRowIdx * @return */ synchronized CEntityBean<T> getEntityBean(final int aRowIdx) { return pMap.get(getRowKey(pList.get(aRowIdx))); } /* * (non-Javadoc) * * @see javax.swing.table.TableModel#getRowCount() */ @Override public synchronized int getRowCount() { return pList.size(); } /** * @param aRowData * @return */ private String getRowKey(final String[] aRowData) { return aRowData[pColKeyIdx]; } /* * (non-Javadoc) * * @see javax.swing.table.TableModel#getValueAt(int, int) */ @Override public synchronized Object getValueAt(final int row, final int col) { // traceDebug("%15s| NbExistingRows=[%d] row=[%d] col=[%d]", // "getValueAt", // pList.size(), row, col); if (row > -1 && row < pList.size()) { final String[] rowContent = pList.get(row); if (rowContent != null && col > -1 && col < rowContent.length) { return rowContent[col]; } } return null; } /** * @param aKey * @return */ synchronized int indexOf(final String aKey) { int wRowIdx = -1; int wI = 0; for (final String[] wRowData : pList) { if (wRowData[pColKeyIdx].equals(aKey)) { wRowIdx = wI; break; } wI++; } return wRowIdx; } /* * (non-Javadoc) * * @see javax.swing.table.AbstractTableModel#isCellEditable(int, int) */ @Override public boolean isCellEditable(final int row, final int col) { return false; } /** * */ public synchronized void removeAllRows() { traceDebug("%15s| NbExistingRows=[%d]", "removeAllRows", pList.size()); if (pList.size() > 0) { final int wFirstRow = 0; final int wLastRow = pList.size() - 1; pMap.clear(); pList.clear(); traceDebug( "%15s| fireTableRowsDeleted FirstRow=[%d], wLastRow=[%d]", "removeAllRows", wFirstRow, wLastRow); fireTableRowsDeleted(wFirstRow, wLastRow); } } /** * @param row */ public synchronized void removeRow(final T aEntity) { traceDebug("%15s| NbExistingRows=[%d]", "removeRow", pList.size()); final int wRowIdx; synchronized (this) { final String wKey = pPanel.buildRowKey(aEntity); wRowIdx = indexOf(wKey); if (wRowIdx > -1) { pMap.remove(wKey); pList.remove(wRowIdx); } } if (wRowIdx > -1) { traceDebug( "%15s| fireTableRowsDeleted FirstRow=[%d], wLastRow=[%d]", "removeRow", wRowIdx, wRowIdx); fireTableRowsDeleted(wRowIdx, wRowIdx); } } /** * @param aEntity */ public synchronized void setRow(final T aEntity) { traceDebug("%15s| NbExistingRows=[%d]", "setRow", pList.size()); int wRowIdx = -1; synchronized (this) { final String[] wData = pPanel.buildRowData(aEntity); final String wKey = getRowKey(wData); wRowIdx = indexOf(wKey); if (wRowIdx > -1) { int wI = 0; for (final String wCellData : wData) { pList.get(wRowIdx)[wI] = wCellData; wI++; } pMap.put(wKey, buildEntityBean(aEntity)); traceDebug("%15s| sets row=[%s]", "setRow", wKey); } } if (wRowIdx > -1) { traceDebug( "%15s| fireTableRowsUpdated FirstRow=[%d], wLastRow=[%d]", "setRow", wRowIdx, wRowIdx); fireTableRowsUpdated(wRowIdx, wRowIdx); } else { addRow(aEntity); } } /** * @param aEntities */ public synchronized void setRows(final T[] aEntities) { traceDebug("%15s| NbExistingRows=[%d]", "setRows", pList.size()); synchronized (this) { removeAllRows(); addRows(aEntities); } } /* * (non-Javadoc) * * @see javax.swing.table.AbstractTableModel#setValueAt(java.lang.Object, * int, int) */ @Override public synchronized void setValueAt(final Object value, final int row, final int col) { final String[] rowContent = pList.get(row); if (rowContent != null && col > -1 && col < rowContent.length) { pList.get(row)[col] = (String) value; fireTableCellUpdated(row, col); } } /** * @param aFormat * @param aArgs */ private void traceDebug(final String aFormat, final Object... aArgs) { // System.out.println(String.format("[%25s] %s", pPanel.getClass() // .getSimpleName(), String.format(aFormat, aArgs))); } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.adapter; import com.hazelcast.cache.HazelcastExpiryPolicy; import com.hazelcast.cache.ICache; import com.hazelcast.cache.impl.HazelcastServerCachingProvider; import com.hazelcast.config.CacheConfig; import com.hazelcast.config.CacheConfiguration; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.ICompletableFuture; import com.hazelcast.query.TruePredicate; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.ParallelTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import javax.cache.CacheManager; import javax.cache.configuration.FactoryBuilder; import javax.cache.expiry.ExpiryPolicy; import javax.cache.processor.EntryProcessorResult; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import static com.hazelcast.cache.impl.HazelcastServerCachingProvider.createCachingProvider; import static java.util.Arrays.asList; import static java.util.Collections.singleton; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelTest.class}) public class ICacheDataStructureAdapterTest extends HazelcastTestSupport { private ICache<Integer, String> cache; private ICache<Integer, String> cacheWithLoader; private ICacheDataStructureAdapter<Integer, String> adapter; private ICacheDataStructureAdapter<Integer, String> adapterWithLoader; @Before public void setUp() { TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(); HazelcastInstance hazelcastInstance = factory.newHazelcastInstance(); HazelcastServerCachingProvider cachingProvider = createCachingProvider(hazelcastInstance); CacheManager cacheManager = cachingProvider.getCacheManager(); CacheConfig<Integer, String> cacheConfig = new CacheConfig<Integer, String>(); CacheConfiguration<Integer, String> cacheConfigWithLoader = new CacheConfig<Integer, String>() .setReadThrough(true) .setCacheLoaderFactory(FactoryBuilder.factoryOf(ICacheCacheLoader.class)); cache = (ICache<Integer, String>) cacheManager.createCache("CacheDataStructureAdapterTest", cacheConfig); cacheWithLoader = (ICache<Integer, String>) cacheManager.createCache("CacheDataStructureAdapterLoaderTest", cacheConfigWithLoader); adapter = new ICacheDataStructureAdapter<Integer, String>(cache); adapterWithLoader = new ICacheDataStructureAdapter<Integer, String>(cacheWithLoader); } @Test public void testSize() { cache.put(23, "foo"); cache.put(42, "bar"); assertEquals(2, adapter.size()); } @Test public void testGet() { cache.put(42, "foobar"); String result = adapter.get(42); assertEquals("foobar", result); } @Test public void testGetAsync() throws Exception { cache.put(42, "foobar"); Future<String> future = adapter.getAsync(42); String result = future.get(); assertEquals("foobar", result); } @Test public void testSet() { adapter.set(23, "test"); assertEquals("test", cache.get(23)); } @Test public void testSetAsync() throws Exception { cache.put(42, "oldValue"); ICompletableFuture<Void> future = adapter.setAsync(42, "newValue"); Void oldValue = future.get(); assertNull(oldValue); assertEquals("newValue", cache.get(42)); } @Test(expected = MethodNotAvailableException.class) public void testSetAsyncWithTtl() { adapter.setAsync(42, "value", 1, TimeUnit.MILLISECONDS); } @Test public void testSetAsyncWithExpiryPolicy() throws Exception { ExpiryPolicy expiryPolicy = new HazelcastExpiryPolicy(1000, 1, 1, TimeUnit.MILLISECONDS); adapter.setAsync(42, "value", expiryPolicy).get(); String value = cache.get(42); if (value != null) { assertEquals("value", value); sleepMillis(1100); assertNull(cache.get(42)); } } @Test public void testPut() { cache.put(42, "oldValue"); String oldValue = adapter.put(42, "newValue"); assertEquals("oldValue", oldValue); assertEquals("newValue", cache.get(42)); } @Test public void testPutAsync() throws Exception { cache.put(42, "oldValue"); ICompletableFuture<String> future = adapter.putAsync(42, "newValue"); String oldValue = future.get(); assertEquals("oldValue", oldValue); assertEquals("newValue", cache.get(42)); } @Test(expected = MethodNotAvailableException.class) public void testPutAsyncWithTtl() { adapter.putAsync(42, "value", 1, TimeUnit.MILLISECONDS); } @Test public void testPutAsyncWithExpiryPolicy() throws Exception { cache.put(42, "oldValue"); ExpiryPolicy expiryPolicy = new HazelcastExpiryPolicy(1000, 1, 1, TimeUnit.MILLISECONDS); ICompletableFuture<String> future = adapter.putAsync(42, "newValue", expiryPolicy); String oldValue = future.get(); String newValue = cache.get(42); assertEquals("oldValue", oldValue); if (newValue != null) { assertEquals("newValue", newValue); sleepMillis(1100); assertNull(cache.get(42)); } } @Test(expected = MethodNotAvailableException.class) public void testPutTransient() { adapter.putTransient(42, "value", 1, TimeUnit.MILLISECONDS); } @Test public void testPutIfAbsent() { cache.put(42, "oldValue"); assertTrue(adapter.putIfAbsent(23, "newValue")); assertFalse(adapter.putIfAbsent(42, "newValue")); assertEquals("newValue", cache.get(23)); assertEquals("oldValue", cache.get(42)); } @Test public void testPutIfAbsentAsync() throws Exception { cache.put(42, "oldValue"); assertTrue(adapter.putIfAbsentAsync(23, "newValue").get()); assertFalse(adapter.putIfAbsentAsync(42, "newValue").get()); assertEquals("newValue", cache.get(23)); assertEquals("oldValue", cache.get(42)); } @Test public void testReplace() { cache.put(42, "oldValue"); String oldValue = adapter.replace(42, "newValue"); assertEquals("oldValue", oldValue); assertEquals("newValue", cache.get(42)); } @Test public void testReplaceWithOldValue() { cache.put(42, "oldValue"); assertFalse(adapter.replace(42, "foobar", "newValue")); assertTrue(adapter.replace(42, "oldValue", "newValue")); assertEquals("newValue", cache.get(42)); } @Test public void testRemove() { cache.put(23, "value-23"); assertTrue(cache.containsKey(23)); assertEquals("value-23", adapter.remove(23)); assertFalse(cache.containsKey(23)); } @Test public void testRemoveWithOldValue() { cache.put(23, "value-23"); assertTrue(cache.containsKey(23)); assertFalse(adapter.remove(23, "foobar")); assertTrue(adapter.remove(23, "value-23")); assertFalse(cache.containsKey(23)); } @Test public void testRemoveAsync() throws Exception { cache.put(23, "value-23"); assertTrue(cache.containsKey(23)); String value = adapter.removeAsync(23).get(); assertEquals("value-23", value); assertFalse(cache.containsKey(23)); } @Test public void testDelete() { cache.put(23, "value-23"); assertTrue(cache.containsKey(23)); adapter.delete(23); assertFalse(cache.containsKey(23)); } @Test public void testDeleteAsync() throws Exception { cache.put(23, "value-23"); assertTrue(cache.containsKey(23)); adapter.deleteAsync(23).get(); assertFalse(cache.containsKey(23)); } @Test(expected = MethodNotAvailableException.class) public void testEvict() { adapter.evict(23); } @Test public void testInvoke() { cache.put(23, "value-23"); cache.put(42, "value-42"); String result = adapter.invoke(23, new ICacheReplaceEntryProcessor(), "value", "newValue"); assertEquals("newValue-23", result); assertEquals("newValue-23", cache.get(23)); assertEquals("value-42", cache.get(42)); } @Test(expected = MethodNotAvailableException.class) public void testExecuteOnKey() { adapter.executeOnKey(23, new IMapReplaceEntryProcessor("value", "newValue")); } @Test(expected = MethodNotAvailableException.class) public void testExecuteOnKeys() { Set<Integer> keys = new HashSet<Integer>(singleton(23)); adapter.executeOnKeys(keys, new IMapReplaceEntryProcessor("value", "newValue")); } @Test(expected = MethodNotAvailableException.class) public void testExecuteOnEntries() { adapter.executeOnEntries(new IMapReplaceEntryProcessor("value", "newValue")); } @Test(expected = MethodNotAvailableException.class) public void testExecuteOnEntriesWithPredicate() { adapter.executeOnEntries(new IMapReplaceEntryProcessor("value", "newValue"), TruePredicate.INSTANCE); } @Test public void testContainsKey() { cache.put(23, "value-23"); assertTrue(adapter.containsKey(23)); assertFalse(adapter.containsKey(42)); } @Test(expected = MethodNotAvailableException.class) public void testLoadAll() { adapterWithLoader.loadAll(true); } @Test(expected = MethodNotAvailableException.class) public void testLoadAllWithKeys() { adapterWithLoader.loadAll(Collections.<Integer>emptySet(), true); } @Test public void testLoadAllWithListener() { ICacheCompletionListener listener = new ICacheCompletionListener(); cacheWithLoader.put(23, "value-23"); adapterWithLoader.loadAll(Collections.singleton(23), true, listener); listener.await(); assertEquals("newValue-23", cacheWithLoader.get(23)); } @Test public void testGetAll() { cache.put(23, "value-23"); cache.put(42, "value-42"); Map<Integer, String> expectedResult = new HashMap<Integer, String>(); expectedResult.put(23, "value-23"); expectedResult.put(42, "value-42"); Map<Integer, String> result = adapter.getAll(expectedResult.keySet()); assertEquals(expectedResult, result); } @Test public void testPutAll() { Map<Integer, String> expectedResult = new HashMap<Integer, String>(); expectedResult.put(23, "value-23"); expectedResult.put(42, "value-42"); adapter.putAll(expectedResult); assertEquals(expectedResult.size(), cache.size()); for (Integer key : expectedResult.keySet()) { assertTrue(cache.containsKey(key)); } } @Test public void testRemoveAll() { cache.put(23, "value-23"); cache.put(42, "value-42"); adapter.removeAll(); assertEquals(0, cache.size()); } @Test public void testRemoveAllWithKeys() { cache.put(23, "value-23"); cache.put(42, "value-42"); adapter.removeAll(singleton(42)); assertEquals(1, cache.size()); assertTrue(cache.containsKey(23)); assertFalse(cache.containsKey(42)); } @Test(expected = MethodNotAvailableException.class) public void testEvictAll() { adapter.evictAll(); } @Test public void testInvokeAll() { cache.put(23, "value-23"); cache.put(42, "value-42"); cache.put(65, "value-65"); Set<Integer> keys = new HashSet<Integer>(asList(23, 65, 88)); Map<Integer, EntryProcessorResult<String>> resultMap = adapter.invokeAll(keys, new ICacheReplaceEntryProcessor(), "value", "newValue"); assertEquals(2, resultMap.size()); assertEquals("newValue-23", resultMap.get(23).get()); assertEquals("newValue-65", resultMap.get(65).get()); assertEquals("newValue-23", cache.get(23)); assertEquals("value-42", cache.get(42)); assertEquals("newValue-65", cache.get(65)); assertNull(cache.get(88)); } @Test public void testClear() { cache.put(23, "foobar"); adapter.clear(); assertEquals(0, cache.size()); } @Test public void testClose() { adapter.close(); assertTrue(cache.isClosed()); } @Test public void testDestroy() { adapter.destroy(); assertTrue(cache.isDestroyed()); } @Test(expected = MethodNotAvailableException.class) public void testGetLocalMapStats() { adapter.getLocalMapStats(); } }
/* * Copyright (c) 2008-2016 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.haulmont.cuba.core.entity; import com.google.common.base.MoreObjects; import com.haulmont.bali.util.Dom4j; import com.haulmont.chile.core.annotations.MetaProperty; import com.haulmont.chile.core.annotations.NamePattern; import com.haulmont.cuba.core.app.scheduled.MethodParameterInfo; import com.haulmont.cuba.core.entity.annotation.SystemLevel; import com.haulmont.cuba.core.global.PersistenceHelper; import org.apache.commons.lang3.StringUtils; import org.dom4j.Document; import org.dom4j.DocumentHelper; import org.dom4j.Element; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Table; import javax.persistence.Transient; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; /** * Entity that stores an information about a scheduled task. */ @Entity(name = "sys$ScheduledTask") @Table(name = "SYS_SCHEDULED_TASK") @NamePattern("#name|beanName,methodName,className,scriptName") @SystemLevel public class ScheduledTask extends BaseUuidEntity implements Creatable, Updatable, SoftDelete, TenantEntity { private static final long serialVersionUID = -2330884126746644884L; // ScheduledTask has no @Version field because it is locked pessimistically when processed. // Moreover unfortunately OpenJPA issues a lot of unnecessary "select version from ..." when loads versioned // objects with PESSIMISTIC lock type. @Column(name = "CREATE_TS") protected Date createTs; @Column(name = "CREATED_BY", length = 50) protected String createdBy; @Column(name = "UPDATE_TS") protected Date updateTs; @Column(name = "UPDATED_BY", length = 50) protected String updatedBy; @Column(name = "DELETE_TS") protected Date deleteTs; @Column(name = "DELETED_BY", length = 50) protected String deletedBy; @Column(name = "DEFINED_BY") protected String definedBy; @SystemLevel @Column(name = "SYS_TENANT_ID") protected String sysTenantId; @Column(name = "BEAN_NAME") protected String beanName; @Column(name = "METHOD_NAME") protected String methodName; @Column(name = "CLASS_NAME") protected String className; @Column(name = "SCRIPT_NAME") protected String scriptName; @Column(name = "USER_NAME") protected String userName; @Column(name = "IS_SINGLETON") protected Boolean singleton; @Column(name = "IS_ACTIVE") protected Boolean active; @Column(name = "PERIOD_") protected Integer period; @Column(name = "TIMEOUT") protected Integer timeout; @Column(name = "START_DATE") protected Date startDate; @Column(name = "CRON") protected String cron; @Column(name = "SCHEDULING_TYPE") protected String schedulingType; @Column(name = "TIME_FRAME") protected Integer timeFrame; @Column(name = "START_DELAY") protected Integer startDelay; @Column(name = "PERMITTED_SERVERS") protected String permittedServers; @Column(name = "LOG_START") protected Boolean logStart; @Column(name = "LOG_FINISH") protected Boolean logFinish; @Column(name = "LAST_START_TIME") protected Date lastStartTime; @Column(name = "LAST_START_SERVER") protected String lastStartServer; @Column(name = "METHOD_PARAMS") protected String methodParamsXml; @Column(name = "DESCRIPTION", length = 1000) protected String description; //the following field is part of private API, please do not use it @Transient protected volatile long currentStartTimestamp; @Override public Date getCreateTs() { return createTs; } @Override public void setCreateTs(Date createTs) { this.createTs = createTs; } @Override public String getCreatedBy() { return createdBy; } @Override public void setCreatedBy(String createdBy) { this.createdBy = createdBy; } @Override public Date getUpdateTs() { return updateTs; } @Override public void setUpdateTs(Date updateTs) { this.updateTs = updateTs; } @Override public String getUpdatedBy() { return updatedBy; } @Override public void setUpdatedBy(String updatedBy) { this.updatedBy = updatedBy; } @Override public Boolean isDeleted() { return deleteTs != null; } @Override public Date getDeleteTs() { return deleteTs; } @Override public void setDeleteTs(Date deleteTs) { this.deleteTs = deleteTs; } @Override public String getDeletedBy() { return deletedBy; } public String getSysTenantId() { return sysTenantId; } public void setSysTenantId(String sysTenantId) { this.sysTenantId = sysTenantId; } @Override public void setDeletedBy(String deletedBy) { this.deletedBy = deletedBy; } public String getBeanName() { return beanName; } public void setBeanName(String beanName) { this.beanName = beanName; } public String getMethodName() { return methodName; } public void setMethodName(String methodName) { this.methodName = methodName; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public Boolean getSingleton() { return singleton; } public void setSingleton(Boolean singleton) { this.singleton = singleton; } public Integer getTimeout() { return timeout; } public void setTimeout(Integer timeout) { this.timeout = timeout; } public Boolean getActive() { return active; } public void setActive(Boolean active) { this.active = active; } public Integer getPeriod() { return period; } public void setPeriod(Integer period) { this.period = period; } public Date getStartDate() { return startDate; } public void setStartDate(Date startDate) { this.startDate = startDate; } public Integer getTimeFrame() { return timeFrame; } public void setTimeFrame(Integer timeFrame) { this.timeFrame = timeFrame; } public Integer getStartDelay() { return startDelay; } public void setStartDelay(Integer startDelay) { this.startDelay = startDelay; } public String getPermittedServers() { return permittedServers; } public void setPermittedServers(String permittedServers) { this.permittedServers = permittedServers; } public Boolean getLogStart() { return logStart; } public void setLogStart(Boolean logStart) { this.logStart = logStart; } public Boolean getLogFinish() { return logFinish; } public void setLogFinish(Boolean logFinish) { this.logFinish = logFinish; } public Date getLastStartTime() { return lastStartTime; } public void setLastStartTime(Date lastStartTime) { this.lastStartTime = lastStartTime; } public long getLastStart() { return lastStartTime == null ? 0 : lastStartTime.getTime(); } public String getLastStartServer() { return lastStartServer; } public void setLastStartServer(String lastStartServer) { this.lastStartServer = lastStartServer; } public ScheduledTaskDefinedBy getDefinedBy() { return ScheduledTaskDefinedBy.fromId(definedBy); } public void setDefinedBy(ScheduledTaskDefinedBy definedBy) { this.definedBy = ScheduledTaskDefinedBy.getId(definedBy); } public String getClassName() { return className; } public void setClassName(String className) { this.className = className; } public String getScriptName() { return scriptName; } public void setScriptName(String scriptName) { this.scriptName = scriptName; } public String getMethodParamsXml() { return methodParamsXml; } public void setMethodParamsXml(String methodParamsXml) { this.methodParamsXml = methodParamsXml; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getCron() { return cron; } public void setCron(String cron) { this.cron = cron; } public SchedulingType getSchedulingType() { return SchedulingType.fromId(schedulingType); } public void setSchedulingType(SchedulingType schedulingType) { this.schedulingType = SchedulingType.getId(schedulingType); } public long getCurrentStartTimestamp() { return currentStartTimestamp; } public void setCurrentStartTimestamp(long currentStartTimestamp) { this.currentStartTimestamp = currentStartTimestamp; } public List<MethodParameterInfo> getMethodParameters() { ArrayList<MethodParameterInfo> result = new ArrayList<>(); String xml = getMethodParamsXml(); if (!StringUtils.isBlank(xml)) { Document doc = Dom4j.readDocument(xml); List<Element> elements = Dom4j.elements(doc.getRootElement(), "param"); for (Element paramEl : elements) { String typeName = paramEl.attributeValue("type"); String name = paramEl.attributeValue("name"); Object value = paramEl.getText(); result.add(new MethodParameterInfo(typeName, name, value)); } } return result; } public void updateMethodParameters(List<MethodParameterInfo> params) { Document doc = DocumentHelper.createDocument(); Element paramsEl = doc.addElement("params"); for (MethodParameterInfo param : params) { Element paramEl = paramsEl.addElement("param"); paramEl.addAttribute("type", param.getType().getName()); paramEl.addAttribute("name", param.getName()); paramEl.setText(param.getValue() != null ? param.getValue().toString() : ""); } setMethodParamsXml(Dom4j.writeDocument(doc, true)); } @MetaProperty public String name() { if (beanName != null && methodName != null) { return beanName + "." + methodName; } else if (className != null) { return className; } else { return scriptName; } } @Override public String toString() { MoreObjects.ToStringHelper builder = MoreObjects.toStringHelper("ScheduledTask") .omitNullValues() .addValue(id); String[] fields = new String[]{"beanName", "methodName", "className", "scriptName", "singleton", "period", "cron", "startDate"}; for (String field : fields) { if (PersistenceHelper.isLoaded(this, field)) { Object value = getValue(field); if (value instanceof Date) { DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); value = df.format(value); } builder.add(field, value); } } return builder.toString(); } @MetaProperty public String getMethodParametersString() { StringBuilder sb = new StringBuilder(); int count = 0; List<MethodParameterInfo> parameters = getMethodParameters(); for (MethodParameterInfo param : parameters) { sb.append(param.getType().getSimpleName()) .append(" ") .append(param.getName()) .append(" = ") .append(param.getValue()); if (++count != parameters.size()) sb.append(", "); } return sb.toString(); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.iothub.models; import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; /** The properties related to service bus topic endpoint types. */ @Fluent public final class RoutingServiceBusTopicEndpointProperties { @JsonIgnore private final ClientLogger logger = new ClientLogger(RoutingServiceBusTopicEndpointProperties.class); /* * Id of the service bus topic endpoint */ @JsonProperty(value = "id") private String id; /* * The connection string of the service bus topic endpoint. */ @JsonProperty(value = "connectionString") private String connectionString; /* * The url of the service bus topic endpoint. It must include the protocol * sb:// */ @JsonProperty(value = "endpointUri") private String endpointUri; /* * Queue name on the service bus topic */ @JsonProperty(value = "entityPath") private String entityPath; /* * Method used to authenticate against the service bus topic endpoint */ @JsonProperty(value = "authenticationType") private AuthenticationType authenticationType; /* * Managed identity properties of routing service bus topic endpoint. */ @JsonProperty(value = "identity") private ManagedIdentity identity; /* * The name that identifies this endpoint. The name can only include * alphanumeric characters, periods, underscores, hyphens and has a maximum * length of 64 characters. The following names are reserved: events, * fileNotifications, $default. Endpoint names must be unique across * endpoint types. The name need not be the same as the actual topic name. */ @JsonProperty(value = "name", required = true) private String name; /* * The subscription identifier of the service bus topic endpoint. */ @JsonProperty(value = "subscriptionId") private String subscriptionId; /* * The name of the resource group of the service bus topic endpoint. */ @JsonProperty(value = "resourceGroup") private String resourceGroup; /** * Get the id property: Id of the service bus topic endpoint. * * @return the id value. */ public String id() { return this.id; } /** * Set the id property: Id of the service bus topic endpoint. * * @param id the id value to set. * @return the RoutingServiceBusTopicEndpointProperties object itself. */ public RoutingServiceBusTopicEndpointProperties withId(String id) { this.id = id; return this; } /** * Get the connectionString property: The connection string of the service bus topic endpoint. * * @return the connectionString value. */ public String connectionString() { return this.connectionString; } /** * Set the connectionString property: The connection string of the service bus topic endpoint. * * @param connectionString the connectionString value to set. * @return the RoutingServiceBusTopicEndpointProperties object itself. */ public RoutingServiceBusTopicEndpointProperties withConnectionString(String connectionString) { this.connectionString = connectionString; return this; } /** * Get the endpointUri property: The url of the service bus topic endpoint. It must include the protocol sb://. * * @return the endpointUri value. */ public String endpointUri() { return this.endpointUri; } /** * Set the endpointUri property: The url of the service bus topic endpoint. It must include the protocol sb://. * * @param endpointUri the endpointUri value to set. * @return the RoutingServiceBusTopicEndpointProperties object itself. */ public RoutingServiceBusTopicEndpointProperties withEndpointUri(String endpointUri) { this.endpointUri = endpointUri; return this; } /** * Get the entityPath property: Queue name on the service bus topic. * * @return the entityPath value. */ public String entityPath() { return this.entityPath; } /** * Set the entityPath property: Queue name on the service bus topic. * * @param entityPath the entityPath value to set. * @return the RoutingServiceBusTopicEndpointProperties object itself. */ public RoutingServiceBusTopicEndpointProperties withEntityPath(String entityPath) { this.entityPath = entityPath; return this; } /** * Get the authenticationType property: Method used to authenticate against the service bus topic endpoint. * * @return the authenticationType value. */ public AuthenticationType authenticationType() { return this.authenticationType; } /** * Set the authenticationType property: Method used to authenticate against the service bus topic endpoint. * * @param authenticationType the authenticationType value to set. * @return the RoutingServiceBusTopicEndpointProperties object itself. */ public RoutingServiceBusTopicEndpointProperties withAuthenticationType(AuthenticationType authenticationType) { this.authenticationType = authenticationType; return this; } /** * Get the identity property: Managed identity properties of routing service bus topic endpoint. * * @return the identity value. */ public ManagedIdentity identity() { return this.identity; } /** * Set the identity property: Managed identity properties of routing service bus topic endpoint. * * @param identity the identity value to set. * @return the RoutingServiceBusTopicEndpointProperties object itself. */ public RoutingServiceBusTopicEndpointProperties withIdentity(ManagedIdentity identity) { this.identity = identity; return this; } /** * Get the name property: The name that identifies this endpoint. The name can only include alphanumeric characters, * periods, underscores, hyphens and has a maximum length of 64 characters. The following names are reserved: * events, fileNotifications, $default. Endpoint names must be unique across endpoint types. The name need not be * the same as the actual topic name. * * @return the name value. */ public String name() { return this.name; } /** * Set the name property: The name that identifies this endpoint. The name can only include alphanumeric characters, * periods, underscores, hyphens and has a maximum length of 64 characters. The following names are reserved: * events, fileNotifications, $default. Endpoint names must be unique across endpoint types. The name need not be * the same as the actual topic name. * * @param name the name value to set. * @return the RoutingServiceBusTopicEndpointProperties object itself. */ public RoutingServiceBusTopicEndpointProperties withName(String name) { this.name = name; return this; } /** * Get the subscriptionId property: The subscription identifier of the service bus topic endpoint. * * @return the subscriptionId value. */ public String subscriptionId() { return this.subscriptionId; } /** * Set the subscriptionId property: The subscription identifier of the service bus topic endpoint. * * @param subscriptionId the subscriptionId value to set. * @return the RoutingServiceBusTopicEndpointProperties object itself. */ public RoutingServiceBusTopicEndpointProperties withSubscriptionId(String subscriptionId) { this.subscriptionId = subscriptionId; return this; } /** * Get the resourceGroup property: The name of the resource group of the service bus topic endpoint. * * @return the resourceGroup value. */ public String resourceGroup() { return this.resourceGroup; } /** * Set the resourceGroup property: The name of the resource group of the service bus topic endpoint. * * @param resourceGroup the resourceGroup value to set. * @return the RoutingServiceBusTopicEndpointProperties object itself. */ public RoutingServiceBusTopicEndpointProperties withResourceGroup(String resourceGroup) { this.resourceGroup = resourceGroup; return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ public void validate() { if (identity() != null) { identity().validate(); } if (name() == null) { throw logger .logExceptionAsError( new IllegalArgumentException( "Missing required property name in model RoutingServiceBusTopicEndpointProperties")); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package org.apache.hadoop.hbase.regionserver.wal; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.util.Dictionary; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALFactory; import org.apache.hadoop.hbase.wal.WALProvider; /** * A set of static functions for running our custom WAL compression/decompression. * Also contains a command line tool to compress and uncompress WALs. */ @InterfaceAudience.Private public class Compressor { /** * Command line tool to compress and uncompress WALs. */ public static void main(String[] args) throws IOException { if (args.length != 2 || args[0].equals("--help") || args[0].equals("-h")) { printHelp(); System.exit(-1); } Path inputPath = new Path(args[0]); Path outputPath = new Path(args[1]); transformFile(inputPath, outputPath); } private static void printHelp() { System.err.println("usage: Compressor <input> <output>"); System.err.println("If <input> WAL is compressed, <output> will be decompressed."); System.err.println("If <input> WAL is uncompressed, <output> will be compressed."); return; } private static void transformFile(Path input, Path output) throws IOException { Configuration conf = HBaseConfiguration.create(); FileSystem inFS = input.getFileSystem(conf); FileSystem outFS = output.getFileSystem(conf); WAL.Reader in = WALFactory.createReaderIgnoreCustomClass(inFS, input, conf); WALProvider.Writer out = null; try { if (!(in instanceof ReaderBase)) { System.err.println("Cannot proceed, invalid reader type: " + in.getClass().getName()); return; } boolean compress = ((ReaderBase)in).hasCompression(); conf.setBoolean(HConstants.ENABLE_WAL_COMPRESSION, !compress); out = WALFactory.createWALWriter(outFS, output, conf); WAL.Entry e = null; while ((e = in.next()) != null) out.append(e); } finally { in.close(); if (out != null) { out.close(); out = null; } } } /** * Reads the next compressed entry and returns it as a byte array * * @param in the DataInput to read from * @param dict the dictionary we use for our read. * @return the uncompressed array. */ @Deprecated static byte[] readCompressed(DataInput in, Dictionary dict) throws IOException { byte status = in.readByte(); if (status == Dictionary.NOT_IN_DICTIONARY) { int length = WritableUtils.readVInt(in); // if this isn't in the dictionary, we need to add to the dictionary. byte[] arr = new byte[length]; in.readFully(arr); if (dict != null) dict.addEntry(arr, 0, length); return arr; } else { // Status here is the higher-order byte of index of the dictionary entry // (when its not Dictionary.NOT_IN_DICTIONARY -- dictionary indices are // shorts). short dictIdx = toShort(status, in.readByte()); byte[] entry = dict.getEntry(dictIdx); if (entry == null) { throw new IOException("Missing dictionary entry for index " + dictIdx); } return entry; } } /** * Reads a compressed entry into an array. * The output into the array ends up length-prefixed. * * @param to the array to write into * @param offset array offset to start writing to * @param in the DataInput to read from * @param dict the dictionary to use for compression * * @return the length of the uncompressed data */ @Deprecated static int uncompressIntoArray(byte[] to, int offset, DataInput in, Dictionary dict) throws IOException { byte status = in.readByte(); if (status == Dictionary.NOT_IN_DICTIONARY) { // status byte indicating that data to be read is not in dictionary. // if this isn't in the dictionary, we need to add to the dictionary. int length = WritableUtils.readVInt(in); in.readFully(to, offset, length); dict.addEntry(to, offset, length); return length; } else { // the status byte also acts as the higher order byte of the dictionary // entry short dictIdx = toShort(status, in.readByte()); byte[] entry; try { entry = dict.getEntry(dictIdx); } catch (Exception ex) { throw new IOException("Unable to uncompress the log entry", ex); } if (entry == null) { throw new IOException("Missing dictionary entry for index " + dictIdx); } // now we write the uncompressed value. Bytes.putBytes(to, offset, entry, 0, entry.length); return entry.length; } } /** * Compresses and writes an array to a DataOutput * * @param data the array to write. * @param out the DataOutput to write into * @param dict the dictionary to use for compression */ @Deprecated static void writeCompressed(byte[] data, int offset, int length, DataOutput out, Dictionary dict) throws IOException { short dictIdx = Dictionary.NOT_IN_DICTIONARY; if (dict != null) { dictIdx = dict.findEntry(data, offset, length); } if (dictIdx == Dictionary.NOT_IN_DICTIONARY) { // not in dict out.writeByte(Dictionary.NOT_IN_DICTIONARY); WritableUtils.writeVInt(out, length); out.write(data, offset, length); } else { out.writeShort(dictIdx); } } static short toShort(byte hi, byte lo) { short s = (short) (((hi & 0xFF) << 8) | (lo & 0xFF)); Preconditions.checkArgument(s >= 0); return s; } }
/* * Copyright (c) 2007 David Crawshaw <david@zentus.com> * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ package org.sqlite; import java.io.IOException; import java.io.Reader; import java.math.BigDecimal; import java.sql.Date; import java.sql.ParameterMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import java.util.Calendar; final class PrepStmt extends Stmt implements PreparedStatement, ParameterMetaData, Codes { private int columnCount; private int paramCount; PrepStmt(Conn conn, String sql) throws SQLException { super(conn); this.sql = sql; db.prepare(this); rs.colsMeta = db.column_names(pointer); columnCount = db.column_count(pointer); paramCount = db.bind_parameter_count(pointer); batch = new Object[paramCount]; batchPos = 0; } public void clearParameters() throws SQLException { checkOpen(); db.reset(pointer); clearBatch(); } @Override protected void finalize() throws SQLException { close(); } public boolean execute() throws SQLException { checkOpen(); rs.close(); db.reset(pointer); resultsWaiting = db.execute(this, batch); return columnCount != 0; } public ResultSet executeQuery() throws SQLException { checkOpen(); if (columnCount == 0) throw new SQLException("query does not return results"); rs.close(); db.reset(pointer); resultsWaiting = db.execute(this, batch); return getResultSet(); } public int executeUpdate() throws SQLException { checkOpen(); if (columnCount != 0) throw new SQLException("query returns results"); rs.close(); db.reset(pointer); return db.executeUpdate(this, batch); } @Override public int[] executeBatch() throws SQLException { if (batchPos == 0) return new int[] {}; try { return db.executeBatch(pointer, batchPos / paramCount, batch); } finally { clearBatch(); } } @Override public int getUpdateCount() throws SQLException { checkOpen(); if (pointer == 0 || resultsWaiting) return -1; return db.changes(); } public void addBatch() throws SQLException { checkOpen(); batchPos += paramCount; if (batchPos + paramCount > batch.length) { Object[] nb = new Object[batch.length * 2]; System.arraycopy(batch, 0, nb, 0, batch.length); batch = nb; } System.arraycopy(batch, batchPos - paramCount, batch, batchPos, paramCount); } // ParameterMetaData FUNCTIONS ////////////////////////////////// public ParameterMetaData getParameterMetaData() { return this; } public int getParameterCount() throws SQLException { checkOpen(); return paramCount; } public String getParameterClassName(int param) throws SQLException { checkOpen(); return "java.lang.String"; } public String getParameterTypeName(int pos) { return "VARCHAR"; } public int getParameterType(int pos) { return Types.VARCHAR; } public int getParameterMode(int pos) { return parameterModeIn; } public int getPrecision(int pos) { return 0; } public int getScale(int pos) { return 0; } public int isNullable(int pos) { return parameterNullable; } public boolean isSigned(int pos) { return true; } public Statement getStatement() { return this; } // PARAMETER FUNCTIONS ////////////////////////////////////////// private void batch(int pos, Object value) throws SQLException { checkOpen(); if (batch == null) batch = new Object[paramCount]; batch[batchPos + pos - 1] = value; } public void setBigDecimal(int pos, BigDecimal value) throws SQLException { batch(pos, value == null ? null : value.toString()); } public void setBoolean(int pos, boolean value) throws SQLException { setInt(pos, value ? 1 : 0); } public void setByte(int pos, byte value) throws SQLException { setInt(pos, value); } public void setBytes(int pos, byte[] value) throws SQLException { batch(pos, value); } public void setDouble(int pos, double value) throws SQLException { batch(pos, new Double(value)); } public void setFloat(int pos, float value) throws SQLException { batch(pos, new Float(value)); } public void setInt(int pos, int value) throws SQLException { batch(pos, new Integer(value)); } public void setLong(int pos, long value) throws SQLException { batch(pos, new Long(value)); } public void setNull(int pos, int u1) throws SQLException { setNull(pos, u1, null); } public void setNull(int pos, int u1, String u2) throws SQLException { batch(pos, null); } public void setObject(int pos, Object value) throws SQLException { if (value == null) batch(pos, null); else if (value instanceof java.util.Date) batch(pos, new Long(((java.util.Date) value).getTime())); else if (value instanceof Date) batch(pos, new Long(((Date) value).getTime())); else if (value instanceof Time) batch(pos, new Long(((Time) value).getTime())); else if (value instanceof Timestamp) batch(pos, new Long(((Timestamp) value).getTime())); else if (value instanceof Long) batch(pos, value); else if (value instanceof Integer) batch(pos, value); else if (value instanceof Short) batch(pos, new Integer(((Short) value).intValue())); else if (value instanceof Float) batch(pos, value); else if (value instanceof Double) batch(pos, value); else if (value instanceof Boolean) setBoolean(pos, ((Boolean) value).booleanValue()); else if (value instanceof byte[]) batch(pos, value); else if (value instanceof BigDecimal) setBigDecimal(pos, (BigDecimal)value); else batch(pos, value.toString()); } public void setObject(int p, Object v, int t) throws SQLException { setObject(p, v); } public void setObject(int p, Object v, int t, int s) throws SQLException { setObject(p, v); } public void setShort(int pos, short value) throws SQLException { setInt(pos, value); } public void setString(int pos, String value) throws SQLException { batch(pos, value); } public void setCharacterStream(int pos, Reader reader, int length) throws SQLException { try { // copy chars from reader to StringBuffer StringBuffer sb = new StringBuffer(); char[] cbuf = new char[8192]; int cnt; while ((cnt = reader.read(cbuf)) > 0) { sb.append(cbuf, 0, cnt); } // set as string setString(pos, sb.toString()); } catch (IOException e) { throw new SQLException("Cannot read from character stream, exception message: " + e.getMessage()); } } public void setDate(int pos, Date x) throws SQLException { setObject(pos, x); } public void setDate(int pos, Date x, Calendar cal) throws SQLException { setObject(pos, x); } public void setTime(int pos, Time x) throws SQLException { setObject(pos, x); } public void setTime(int pos, Time x, Calendar cal) throws SQLException { setObject(pos, x); } public void setTimestamp(int pos, Timestamp x) throws SQLException { setObject(pos, x); } public void setTimestamp(int pos, Timestamp x, Calendar cal) throws SQLException { setObject(pos, x); } public ResultSetMetaData getMetaData() throws SQLException { checkOpen(); return rs; } // UNUSED /////////////////////////////////////////////////////// @Override public boolean execute(String sql) throws SQLException { throw unused(); } @Override public int executeUpdate(String sql) throws SQLException { throw unused(); } @Override public ResultSet executeQuery(String sql) throws SQLException { throw unused(); } @Override public void addBatch(String sql) throws SQLException { throw unused(); } private SQLException unused() { return new SQLException("not supported by PreparedStatment"); } }
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.socket.sockjs.client; import java.net.URI; import java.security.Principal; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.http.HttpHeaders; import org.springframework.lang.Nullable; import org.springframework.scheduling.TaskScheduler; import org.springframework.util.Assert; import org.springframework.util.concurrent.ListenableFutureCallback; import org.springframework.util.concurrent.SettableListenableFuture; import org.springframework.web.socket.WebSocketHandler; import org.springframework.web.socket.WebSocketSession; import org.springframework.web.socket.sockjs.SockJsTransportFailureException; import org.springframework.web.socket.sockjs.frame.SockJsMessageCodec; import org.springframework.web.socket.sockjs.transport.TransportType; /** * A default implementation of {@link TransportRequest}. * * @author Rossen Stoyanchev * @since 4.1 */ class DefaultTransportRequest implements TransportRequest { private static final Log logger = LogFactory.getLog(DefaultTransportRequest.class); private final SockJsUrlInfo sockJsUrlInfo; private final HttpHeaders handshakeHeaders; private final HttpHeaders httpRequestHeaders; private final Transport transport; private final TransportType serverTransportType; private final SockJsMessageCodec codec; @Nullable private Principal user; private long timeoutValue; @Nullable private TaskScheduler timeoutScheduler; private final List<Runnable> timeoutTasks = new ArrayList<>(); @Nullable private DefaultTransportRequest fallbackRequest; public DefaultTransportRequest(SockJsUrlInfo sockJsUrlInfo, @Nullable HttpHeaders handshakeHeaders, @Nullable HttpHeaders httpRequestHeaders, Transport transport, TransportType serverTransportType, SockJsMessageCodec codec) { Assert.notNull(sockJsUrlInfo, "SockJsUrlInfo is required"); Assert.notNull(transport, "Transport is required"); Assert.notNull(serverTransportType, "TransportType is required"); Assert.notNull(codec, "SockJsMessageCodec is required"); this.sockJsUrlInfo = sockJsUrlInfo; this.handshakeHeaders = (handshakeHeaders != null ? handshakeHeaders : new HttpHeaders()); this.httpRequestHeaders = (httpRequestHeaders != null ? httpRequestHeaders : new HttpHeaders()); this.transport = transport; this.serverTransportType = serverTransportType; this.codec = codec; } @Override public SockJsUrlInfo getSockJsUrlInfo() { return this.sockJsUrlInfo; } @Override public HttpHeaders getHandshakeHeaders() { return this.handshakeHeaders; } @Override public HttpHeaders getHttpRequestHeaders() { return this.httpRequestHeaders; } @Override public URI getTransportUrl() { return this.sockJsUrlInfo.getTransportUrl(this.serverTransportType); } public void setUser(Principal user) { this.user = user; } @Override @Nullable public Principal getUser() { return this.user; } @Override public SockJsMessageCodec getMessageCodec() { return this.codec; } public void setTimeoutValue(long timeoutValue) { this.timeoutValue = timeoutValue; } public void setTimeoutScheduler(TaskScheduler scheduler) { this.timeoutScheduler = scheduler; } @Override public void addTimeoutTask(Runnable runnable) { this.timeoutTasks.add(runnable); } public void setFallbackRequest(DefaultTransportRequest fallbackRequest) { this.fallbackRequest = fallbackRequest; } public void connect(WebSocketHandler handler, SettableListenableFuture<WebSocketSession> future) { if (logger.isTraceEnabled()) { logger.trace("Starting " + this); } ConnectCallback connectCallback = new ConnectCallback(handler, future); scheduleConnectTimeoutTask(connectCallback); this.transport.connect(this, handler).addCallback(connectCallback); } private void scheduleConnectTimeoutTask(ConnectCallback connectHandler) { if (this.timeoutScheduler != null) { if (logger.isTraceEnabled()) { logger.trace("Scheduling connect to time out after " + this.timeoutValue + " ms."); } Date timeoutDate = new Date(System.currentTimeMillis() + this.timeoutValue); this.timeoutScheduler.schedule(connectHandler, timeoutDate); } else if (logger.isTraceEnabled()) { logger.trace("Connect timeout task not scheduled (no TaskScheduler configured)."); } } @Override public String toString() { return "TransportRequest[url=" + getTransportUrl() + "]"; } /** * Updates the given (global) future based success or failure to connect for * the entire SockJS request regardless of which transport actually managed * to connect. Also implements {@code Runnable} to handle a scheduled timeout * callback. */ private class ConnectCallback implements ListenableFutureCallback<WebSocketSession>, Runnable { private final WebSocketHandler handler; private final SettableListenableFuture<WebSocketSession> future; private final AtomicBoolean handled = new AtomicBoolean(); public ConnectCallback(WebSocketHandler handler, SettableListenableFuture<WebSocketSession> future) { this.handler = handler; this.future = future; } @Override public void onSuccess(@Nullable WebSocketSession session) { if (this.handled.compareAndSet(false, true)) { this.future.set(session); } else if (logger.isErrorEnabled()) { logger.error("Connect success/failure already handled for " + DefaultTransportRequest.this); } } @Override public void onFailure(Throwable ex) { handleFailure(ex, false); } @Override public void run() { handleFailure(null, true); } private void handleFailure(@Nullable Throwable ex, boolean isTimeoutFailure) { if (this.handled.compareAndSet(false, true)) { if (isTimeoutFailure) { String message = "Connect timed out for " + DefaultTransportRequest.this; logger.error(message); ex = new SockJsTransportFailureException(message, getSockJsUrlInfo().getSessionId(), ex); } if (fallbackRequest != null) { logger.error(DefaultTransportRequest.this + " failed. Falling back on next transport.", ex); fallbackRequest.connect(this.handler, this.future); } else { logger.error("No more fallback transports after " + DefaultTransportRequest.this, ex); if (ex != null) { this.future.setException(ex); } } if (isTimeoutFailure) { try { for (Runnable runnable : timeoutTasks) { runnable.run(); } } catch (Throwable ex2) { logger.error("Transport failed to run timeout tasks for " + DefaultTransportRequest.this, ex2); } } } else { logger.error("Connect success/failure events already took place for " + DefaultTransportRequest.this + ". Ignoring this additional failure event.", ex); } } } }
/* * Copyright 2015 NEC Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.o3project.odenos.remoteobject.manager.component; import org.o3project.odenos.core.component.Component; import org.o3project.odenos.core.component.SystemManagerInterface; import org.o3project.odenos.core.manager.system.event.ComponentManagerChanged; import org.o3project.odenos.remoteobject.ObjectProperty; import org.o3project.odenos.remoteobject.RemoteObject; import org.o3project.odenos.remoteobject.RequestParser; import org.o3project.odenos.remoteobject.manager.ObjectPropertyList; import org.o3project.odenos.remoteobject.manager.component.event.ComponentChanged; import org.o3project.odenos.remoteobject.message.Event; import org.o3project.odenos.remoteobject.message.MessageBodyUnpacker.ParseBodyException; import org.o3project.odenos.remoteobject.message.Request; import org.o3project.odenos.remoteobject.message.Response; import org.o3project.odenos.remoteobject.messagingclient.MessageDispatcher; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.lang.reflect.Constructor; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; /** * ComponentManager class manages components life-cycle. * */ public class ComponentManager extends RemoteObject { private static final Logger log = LoggerFactory.getLogger(ComponentManager.class); protected SystemManagerInterface sysMngIf; protected Map<String, Class<? extends Component>> componentClasses; protected Map<String, Component> components; private final RequestParser<IActionCallback> parser; private static final String COMP_ID = "comp_id"; /** * Constructor. * @param objectId object ID. * @param baseUri base URI. * @param dispatcher Message Dispatcher object. * @deprecated {@link #ComponentManager(String, MessageDispatcher)} */ @Deprecated public ComponentManager(String objectId, String baseUri, MessageDispatcher dispatcher) { this(objectId, dispatcher); } /** * Constructor. * @param objectId object ID. * @param dispatcher Message Dispatcher object. */ public ComponentManager(String objectId, MessageDispatcher dispatcher) { super(objectId, dispatcher); componentClasses = new HashMap<String, Class<? extends Component>>(); components = new HashMap<String, Component>(); this.getProperty().setObjectState(ObjectProperty.State.RUNNING); this.sysMngIf = new SystemManagerInterface(dispatcher, objectId); parser = this.createParser(); } /** * Register to system manager. * @throws Exception if an error occurs. */ public void registerToSystemManager() throws Exception { Response rsp = this.sysMngIf.putComponentMng(objectProperty); if (!rsp.statusCode.equals(Response.OK)) { throw new Exception("failed 'PUT component_managers'"); } this.registerEventManager(); this.subscribeEvents(); this.registerComponentManagers(); return; } private void registerEventManager() throws Exception { String eventManagerId = this.getEventManagerId(); ObjectProperty eventMngObj = this.sysMngIf.getObject(eventManagerId); if (eventMngObj == null) { log.error("Internal Error to Get objects/" + eventManagerId); throw new Exception(); } try { this.addRemoteObject(eventManagerId); } catch (IOException e) { log.error("Failure to addRemoteObject"); throw new Exception(); } return; } private void subscribeEvents() throws Exception { this.eventSubscription.addFilter( this.getSystemManagerId(), ComponentManagerChanged.TYPE); Response rsp = this.applyEventSubscription(); if (!rsp.statusCode.equals(Response.OK)) { log.error("Can't subscribe the ComponentManagerChanged."); throw new Exception(); } } private void registerComponentManagers() throws Exception { ObjectPropertyList componentManagers = this.sysMngIf.getComponentMngs(); if (componentManagers == null) { log.error("Internal Error to Get component_managers."); throw new Exception(); } for (ObjectProperty componentManager : componentManagers) { this.registerOtherComponentManager(componentManager); } return; } private void registerOtherComponentManager(ObjectProperty componentManager) throws Exception { if (this.getObjectId().equals(componentManager.getObjectId())) { return; } try { this.addRemoteObject(componentManager.getObjectId()); } catch (IOException e) { log.error("Failure to addRemoteObject"); throw new Exception(); } return; } private void unregisterComponentManager(String objectId) throws Exception { try { this.removeRemoteObject(objectId); } catch (IOException e) { log.error("Failure to unregisterComponentManager"); throw new Exception(); } return; } /** * Register a type of component. * @param component class of component. */ public void registerComponentType(Class<? extends Component> component) { String componentType = component.getSimpleName(); if (!componentClasses.containsKey(componentType)) { componentClasses.put(componentType, component); String componentTypes = this.objectProperty.getProperty( ObjectProperty.PropertyNames.COMPONENT_TYPES); if (componentTypes == null) { componentTypes = componentType; } else { componentTypes += "," + componentType; } this.objectProperty.setProperty( ObjectProperty.PropertyNames.COMPONENT_TYPES, componentTypes); } } @Override protected Response onRequest(Request request) { log.debug("onRequest: " + request.method + ", " + request.path); RequestParser<IActionCallback>.ParsedRequest parsed = parser .parse(request); Response response = null; IActionCallback callback = parsed.getResult(); if (callback == null) { return new Response(Response.BAD_REQUEST, null); } try { response = callback.process(parsed); } catch (Exception e) { log.error("Exception Request: " + request.method + ", " + request.path); response = new Response(Response.BAD_REQUEST, null); } if (response == null) { response = new Response(Response.BAD_REQUEST, null); } return response; } private RequestParser<IActionCallback> createParser() { return new RequestParser<IActionCallback>() { { addRule(Request.Method.GET, "component_types", new IActionCallback() { @Override public Response process( final RequestParser<IActionCallback>.ParsedRequest parsed) { return getComponentTypes(); } }); addRule(Request.Method.GET, "components", new IActionCallback() { @Override public Response process( final RequestParser<IActionCallback>.ParsedRequest parsed) { return getComponents(); } }); addRule(Request.Method.GET, "components/<comp_id>", new IActionCallback() { @Override public Response process( final RequestParser<IActionCallback>.ParsedRequest parsed) { return getComponentId(parsed.getParam(COMP_ID)); } }); addRule(Request.Method.PUT, "components/<comp_id>", new IActionCallback() { @Override public Response process( final RequestParser<IActionCallback>.ParsedRequest parsed) throws ParseBodyException { return putComponentId( parsed.getParam(COMP_ID), parsed.getRequest().getBody(ObjectProperty.class)); } }); addRule(Request.Method.DELETE, "components/<comp_id>", new IActionCallback() { @Override public Response process( final RequestParser<IActionCallback>.ParsedRequest parsed) { return deleteComponentId(parsed .getParam(COMP_ID)); } }); } }; } protected Response getComponentTypes() { Map<String, ComponentType> compTypes = new HashMap<>(); for (String cmType : componentClasses.keySet()) { Component component = null; String objectId = String.format("%s_%s", this.getObjectId(), cmType); try { Class<? extends Component> componentClass = componentClasses.get(cmType); Constructor<? extends Component> ct = null; try { ct = componentClass.getConstructor(String.class, MessageDispatcher.class); component = ct.newInstance(objectId, null); } catch (NoSuchMethodException e) { ct = componentClass.getConstructor(String.class, String.class, MessageDispatcher.class); component = ct.newInstance(objectId, objectProperty.getBaseUri(), null); } ObjectProperty objProp = component.getProperty(); String type = objProp.getProperty( ObjectProperty.PropertyNames.OBJECT_TYPE); String superType = objProp.getProperty( ObjectProperty.PropertyNames.OBJECT_SUPER_TYPE); Map<String, String> connectionTypes = new HashMap<>(); String connectionTypesStr = objProp.getProperty( ObjectProperty.PropertyNames.CONNECTION_TYPES); String[] connStrList = connectionTypesStr.split(","); for (String connTypeElem : connStrList) { String[] connTypeElemList = connTypeElem.split(":"); if (connTypeElemList.length == 2) { connectionTypes.put(connTypeElemList[0], connTypeElemList[1]); } } String description = objProp.getProperty( ObjectProperty.PropertyNames.DESCRIPTION); compTypes.put(type, new ComponentType(type, superType, connectionTypes, description)); } catch (Exception e) { return new Response(Response.INTERNAL_SERVER_ERROR, e.getMessage()); } } return new Response(Response.OK, compTypes); } protected Response getComponents() { Map<String, ObjectProperty> componentsObj = new HashMap<String, ObjectProperty>(); for (Entry<String, Component> e : components.entrySet()) { componentsObj.put(e.getKey(), e.getValue().getProperty()); } return new Response(Response.OK, componentsObj); } protected Response getComponentId(String objectId) { Component component = components.get(objectId); if (component == null) { return new Response(Response.NOT_FOUND, null); } return new Response(Response.OK, component.getProperty()); } protected Response putComponentId( String objectId, ObjectProperty prop) { if (prop == null) { return new Response(Response.BAD_REQUEST, "Bad format: ObjectProperty is expected"); } if (!componentClasses.containsKey(prop.getObjectType())) { return new Response(Response.BAD_REQUEST, "Error unknown type "); } if (components.containsKey(objectId)) { return new Response(Response.CONFLICT, "Component is already created"); } prop.setProperty( ObjectProperty.PropertyNames.OBJECT_ID, objectId); Component component = null; try { Class<? extends Component> componentClass = componentClasses .get(prop.getObjectType()); Constructor<? extends Component> ct = null; try { ct = componentClass.getConstructor(String.class, MessageDispatcher.class); component = ct.newInstance(objectId, messageDispatcher); } catch (NoSuchMethodException e) { ct = componentClass.getConstructor(String.class, String.class, MessageDispatcher.class); component = ct.newInstance(objectId, objectProperty.getBaseUri(), messageDispatcher); } components.put(objectId, component); if (component.onInitialize(prop)) { component.setState(ObjectProperty.State.RUNNING); } else { component.setState(ObjectProperty.State.ERROR); } } catch (Exception e) { return new Response(Response.INTERNAL_SERVER_ERROR, e.getMessage()); } componentChanged(ComponentChanged.Action.add.name(), null, prop); return new Response(Response.CREATED, component.getProperty()); } protected Response deleteComponentId(String objectId) { if (components.containsKey(objectId)) { Component component = components.get(objectId); ObjectProperty prev = (ObjectProperty) component .getProperty().clone(); component.onFinalize(); components.remove(objectId); componentChanged(ComponentChanged.Action.delete.name(), prev, null); } return new Response(Response.OK, null); } protected final void componentChanged(final String action, final ObjectProperty prev, final ObjectProperty curr) { ComponentChanged msg = new ComponentChanged(action, prev, curr); try { publishEvent(ComponentChanged.TYPE, msg); } catch (Exception e) { log.error("Failed to ComponentChanged"); } } @Override protected void onEvent(Event event) { if (event.eventType.equals( ComponentManagerChanged.TYPE)) { try { ComponentManagerChanged prop = event .getBody(ComponentManagerChanged.class); if (prop.action().equals( ComponentManagerChanged.Action.add.name())) { this.registerOtherComponentManager(prop.curr()); } else if (prop.action().equals( ComponentManagerChanged.Action.delete.name())) { this.unregisterComponentManager(prop.prev().getObjectId()); } } catch (Exception e) { log.error("Can't register ComponentManager.", e); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.translog; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.store.Lock; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.store.NativeFSLockFactory; import org.apache.lucene.store.OutputStreamDataOutput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cli.EnvironmentAwareCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.seqno.SequenceNumbers; import java.io.IOException; import java.nio.channels.Channels; import java.nio.channels.FileChannel; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.nio.file.StandardOpenOption; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; public class TruncateTranslogCommand extends EnvironmentAwareCommand { private final OptionSpec<String> translogFolder; private final OptionSpec<Void> batchMode; public TruncateTranslogCommand() { super("Truncates a translog to create a new, empty translog"); this.translogFolder = parser.acceptsAll(Arrays.asList("d", "dir"), "Translog Directory location on disk") .withRequiredArg() .required(); this.batchMode = parser.acceptsAll(Arrays.asList("b", "batch"), "Enable batch mode explicitly, automatic confirmation of warnings"); } // Visible for testing public OptionParser getParser() { return this.parser; } @Override protected void printAdditionalHelp(Terminal terminal) { terminal.println("This tool truncates the translog and translog"); terminal.println("checkpoint files to create a new translog"); } @SuppressForbidden(reason = "Necessary to use the path passed in") private Path getTranslogPath(OptionSet options) { return PathUtils.get(translogFolder.value(options), "", ""); } @Override protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { boolean batch = options.has(batchMode); Path translogPath = getTranslogPath(options); Path idxLocation = translogPath.getParent().resolve("index"); if (Files.exists(translogPath) == false || Files.isDirectory(translogPath) == false) { throw new ElasticsearchException("translog directory [" + translogPath + "], must exist and be a directory"); } if (Files.exists(idxLocation) == false || Files.isDirectory(idxLocation) == false) { throw new ElasticsearchException("unable to find a shard at [" + idxLocation + "], which must exist and be a directory"); } // Hold the lock open for the duration of the tool running try (Directory dir = FSDirectory.open(idxLocation, NativeFSLockFactory.INSTANCE); Lock writeLock = dir.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { Set<Path> translogFiles; try { terminal.println("Checking existing translog files"); translogFiles = filesInDirectory(translogPath); } catch (IOException e) { terminal.println("encountered IOException while listing directory, aborting..."); throw new ElasticsearchException("failed to find existing translog files", e); } // Warn about ES being stopped and files being deleted warnAboutDeletingFiles(terminal, translogFiles, batch); List<IndexCommit> commits; try { terminal.println("Reading translog UUID information from Lucene commit from shard at [" + idxLocation + "]"); commits = DirectoryReader.listCommits(dir); } catch (IndexNotFoundException infe) { throw new ElasticsearchException("unable to find a valid shard at [" + idxLocation + "]", infe); } // Retrieve the generation and UUID from the existing data Map<String, String> commitData = commits.get(commits.size() - 1).getUserData(); String translogGeneration = commitData.get(Translog.TRANSLOG_GENERATION_KEY); String translogUUID = commitData.get(Translog.TRANSLOG_UUID_KEY); if (translogGeneration == null || translogUUID == null) { throw new ElasticsearchException("shard must have a valid translog generation and UUID but got: [{}] and: [{}]", translogGeneration, translogUUID); } terminal.println("Translog Generation: " + translogGeneration); terminal.println("Translog UUID : " + translogUUID); Path tempEmptyCheckpoint = translogPath.resolve("temp-" + Translog.CHECKPOINT_FILE_NAME); Path realEmptyCheckpoint = translogPath.resolve(Translog.CHECKPOINT_FILE_NAME); Path tempEmptyTranslog = translogPath.resolve("temp-" + Translog.TRANSLOG_FILE_PREFIX + translogGeneration + Translog.TRANSLOG_FILE_SUFFIX); Path realEmptyTranslog = translogPath.resolve(Translog.TRANSLOG_FILE_PREFIX + translogGeneration + Translog.TRANSLOG_FILE_SUFFIX); // Write empty checkpoint and translog to empty files long gen = Long.parseLong(translogGeneration); int translogLen = writeEmptyTranslog(tempEmptyTranslog, translogUUID); writeEmptyCheckpoint(tempEmptyCheckpoint, translogLen, gen); terminal.println("Removing existing translog files"); IOUtils.rm(translogFiles.toArray(new Path[]{})); terminal.println("Creating new empty checkpoint at [" + realEmptyCheckpoint + "]"); Files.move(tempEmptyCheckpoint, realEmptyCheckpoint, StandardCopyOption.ATOMIC_MOVE); terminal.println("Creating new empty translog at [" + realEmptyTranslog + "]"); Files.move(tempEmptyTranslog, realEmptyTranslog, StandardCopyOption.ATOMIC_MOVE); // Fsync the translog directory after rename IOUtils.fsync(translogPath, true); } catch (LockObtainFailedException lofe) { throw new ElasticsearchException("Failed to lock shard's directory at [" + idxLocation + "], is Elasticsearch still running?"); } terminal.println("Done."); } /** Write a checkpoint file to the given location with the given generation */ public static void writeEmptyCheckpoint(Path filename, int translogLength, long translogGeneration) throws IOException { Checkpoint emptyCheckpoint = Checkpoint.emptyTranslogCheckpoint(translogLength, translogGeneration, SequenceNumbers.UNASSIGNED_SEQ_NO, translogGeneration); Checkpoint.write(FileChannel::open, filename, emptyCheckpoint, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW); // fsync with metadata here to make sure. IOUtils.fsync(filename, false); } /** * Write a translog containing the given translog UUID to the given location. Returns the number of bytes written. */ public static int writeEmptyTranslog(Path filename, String translogUUID) throws IOException { final BytesRef translogRef = new BytesRef(translogUUID); try (FileChannel fc = FileChannel.open(filename, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW); OutputStreamDataOutput out = new OutputStreamDataOutput(Channels.newOutputStream(fc))) { TranslogWriter.writeHeader(out, translogRef); fc.force(true); } return TranslogWriter.getHeaderLength(translogRef.length); } /** Show a warning about deleting files, asking for a confirmation if {@code batchMode} is false */ public static void warnAboutDeletingFiles(Terminal terminal, Set<Path> files, boolean batchMode) { terminal.println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); terminal.println("! WARNING: Elasticsearch MUST be stopped before running this tool !"); terminal.println("! !"); terminal.println("! WARNING: Documents inside of translog files will be lost !"); terminal.println("! !"); terminal.println("! WARNING: The following files will be DELETED! !"); terminal.println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); for (Path file : files) { terminal.println("--> " + file); } terminal.println(""); if (batchMode == false) { String text = terminal.readText("Continue and DELETE files? [y/N] "); if (!text.equalsIgnoreCase("y")) { throw new ElasticsearchException("aborted by user"); } } } /** Return a Set of all files in a given directory */ public static Set<Path> filesInDirectory(Path directory) throws IOException { Set<Path> files = new HashSet<>(); try (DirectoryStream<Path> stream = Files.newDirectoryStream(directory)) { for (Path file : stream) { files.add(file); } } return files; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.compute.model; /** * [Input Only] Specifies the parameters for a new disk that will be created alongside the new * instance. Use initialization parameters to create boot disks or local SSDs attached to the new * instance. This property is mutually exclusive with the source property; you can only define one * or the other, but not both. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class AttachedDiskInitializeParams extends com.google.api.client.json.GenericJson { /** * An optional description. Provide this property when creating the disk. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String description; /** * Specifies the disk name. If not specified, the default is to use the name of the instance. If a * disk with the same name already exists in the given region, the existing disk is attached to * the new instance and the new disk is not created. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String diskName; /** * Specifies the size of the disk in base-2 GB. The size must be at least 10 GB. If you specify a * sourceImage, which is required for boot disks, the default size is the size of the sourceImage. * If you do not specify a sourceImage, the default disk size is 500 GB. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long diskSizeGb; /** * Specifies the disk type to use to create the instance. If not specified, the default is pd- * standard, specified using the full URL. For example: * https://www.googleapis.com/compute/v1/projects/project/zones/zone /diskTypes/pd-standard For a * full list of acceptable values, see Persistent disk types. If you define this field, you can * provide either the full or partial URL. For example, the following are valid values: - * https://www.googleapis.com/compute/v1/projects/project/zones/zone /diskTypes/diskType - * projects/project/zones/zone/diskTypes/diskType - zones/zone/diskTypes/diskType Note that for * InstanceTemplate, this is the name of the disk type, not URL. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String diskType; /** * A list of features to enable on the guest operating system. Applicable only for bootable * images. Read Enabling guest operating system features to see a list of available options. Guest * OS features are applied by merging initializeParams.guestOsFeatures and disks.guestOsFeatures * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<GuestOsFeature> guestOsFeatures; /** * Labels to apply to this disk. These can be later modified by the disks.setLabels method. This * field is only applicable for persistent disks. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> labels; /** * A list of publicly visible licenses. Reserved for Google's use. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> licenses; /** * Indicates whether or not the disk can be read/write attached to more than one instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean multiWriter; /** * Specifies which action to take on instance update with this disk. Default is to use the * existing disk. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String onUpdateAction; /** * Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per * second that the disk can handle. Values must be between 10,000 and 120,000. For more details, * see the Extreme persistent disk documentation. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long provisionedIops; /** * Resource policies applied to this disk for automatic snapshot creations. Specified using the * full or partial URL. For instance template, specify only the resource policy name. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> resourcePolicies; /** * The source image to create this disk. When creating a new instance, one of * initializeParams.sourceImage or initializeParams.sourceSnapshot or disks.source is required * except for local SSD. To create a disk with one of the public operating system images, specify * the image by its family name. For example, specify family/debian-9 to use the latest Debian 9 * image: projects/debian-cloud/global/images/family/debian-9 Alternatively, use a specific * version of a public operating system image: projects/debian-cloud/global/images/debian-9 * -stretch-vYYYYMMDD To create a disk with a custom image that you created, specify the image * name in the following format: global/images/my-custom-image You can also specify a custom image * by its image family, which returns the latest version of the image in that family. Replace the * image name with family/family-name: global/images/family/my-image-family If the source image is * deleted later, this field will not be set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sourceImage; /** * The customer-supplied encryption key of the source image. Required if the source image is * protected by a customer-supplied encryption key. Instance templates do not store customer- * supplied encryption keys, so you cannot create disks for instances in a managed instance group * if the source images are encrypted with your own keys. * The value may be {@code null}. */ @com.google.api.client.util.Key private CustomerEncryptionKey sourceImageEncryptionKey; /** * The source snapshot to create this disk. When creating a new instance, one of * initializeParams.sourceSnapshot or initializeParams.sourceImage or disks.source is required * except for local SSD. To create a disk with a snapshot that you created, specify the snapshot * name in the following format: global/snapshots/my-backup If the source snapshot is deleted * later, this field will not be set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sourceSnapshot; /** * The customer-supplied encryption key of the source snapshot. * The value may be {@code null}. */ @com.google.api.client.util.Key private CustomerEncryptionKey sourceSnapshotEncryptionKey; /** * An optional description. Provide this property when creating the disk. * @return value or {@code null} for none */ public java.lang.String getDescription() { return description; } /** * An optional description. Provide this property when creating the disk. * @param description description or {@code null} for none */ public AttachedDiskInitializeParams setDescription(java.lang.String description) { this.description = description; return this; } /** * Specifies the disk name. If not specified, the default is to use the name of the instance. If a * disk with the same name already exists in the given region, the existing disk is attached to * the new instance and the new disk is not created. * @return value or {@code null} for none */ public java.lang.String getDiskName() { return diskName; } /** * Specifies the disk name. If not specified, the default is to use the name of the instance. If a * disk with the same name already exists in the given region, the existing disk is attached to * the new instance and the new disk is not created. * @param diskName diskName or {@code null} for none */ public AttachedDiskInitializeParams setDiskName(java.lang.String diskName) { this.diskName = diskName; return this; } /** * Specifies the size of the disk in base-2 GB. The size must be at least 10 GB. If you specify a * sourceImage, which is required for boot disks, the default size is the size of the sourceImage. * If you do not specify a sourceImage, the default disk size is 500 GB. * @return value or {@code null} for none */ public java.lang.Long getDiskSizeGb() { return diskSizeGb; } /** * Specifies the size of the disk in base-2 GB. The size must be at least 10 GB. If you specify a * sourceImage, which is required for boot disks, the default size is the size of the sourceImage. * If you do not specify a sourceImage, the default disk size is 500 GB. * @param diskSizeGb diskSizeGb or {@code null} for none */ public AttachedDiskInitializeParams setDiskSizeGb(java.lang.Long diskSizeGb) { this.diskSizeGb = diskSizeGb; return this; } /** * Specifies the disk type to use to create the instance. If not specified, the default is pd- * standard, specified using the full URL. For example: * https://www.googleapis.com/compute/v1/projects/project/zones/zone /diskTypes/pd-standard For a * full list of acceptable values, see Persistent disk types. If you define this field, you can * provide either the full or partial URL. For example, the following are valid values: - * https://www.googleapis.com/compute/v1/projects/project/zones/zone /diskTypes/diskType - * projects/project/zones/zone/diskTypes/diskType - zones/zone/diskTypes/diskType Note that for * InstanceTemplate, this is the name of the disk type, not URL. * @return value or {@code null} for none */ public java.lang.String getDiskType() { return diskType; } /** * Specifies the disk type to use to create the instance. If not specified, the default is pd- * standard, specified using the full URL. For example: * https://www.googleapis.com/compute/v1/projects/project/zones/zone /diskTypes/pd-standard For a * full list of acceptable values, see Persistent disk types. If you define this field, you can * provide either the full or partial URL. For example, the following are valid values: - * https://www.googleapis.com/compute/v1/projects/project/zones/zone /diskTypes/diskType - * projects/project/zones/zone/diskTypes/diskType - zones/zone/diskTypes/diskType Note that for * InstanceTemplate, this is the name of the disk type, not URL. * @param diskType diskType or {@code null} for none */ public AttachedDiskInitializeParams setDiskType(java.lang.String diskType) { this.diskType = diskType; return this; } /** * A list of features to enable on the guest operating system. Applicable only for bootable * images. Read Enabling guest operating system features to see a list of available options. Guest * OS features are applied by merging initializeParams.guestOsFeatures and disks.guestOsFeatures * @return value or {@code null} for none */ public java.util.List<GuestOsFeature> getGuestOsFeatures() { return guestOsFeatures; } /** * A list of features to enable on the guest operating system. Applicable only for bootable * images. Read Enabling guest operating system features to see a list of available options. Guest * OS features are applied by merging initializeParams.guestOsFeatures and disks.guestOsFeatures * @param guestOsFeatures guestOsFeatures or {@code null} for none */ public AttachedDiskInitializeParams setGuestOsFeatures(java.util.List<GuestOsFeature> guestOsFeatures) { this.guestOsFeatures = guestOsFeatures; return this; } /** * Labels to apply to this disk. These can be later modified by the disks.setLabels method. This * field is only applicable for persistent disks. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getLabels() { return labels; } /** * Labels to apply to this disk. These can be later modified by the disks.setLabels method. This * field is only applicable for persistent disks. * @param labels labels or {@code null} for none */ public AttachedDiskInitializeParams setLabels(java.util.Map<String, java.lang.String> labels) { this.labels = labels; return this; } /** * A list of publicly visible licenses. Reserved for Google's use. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getLicenses() { return licenses; } /** * A list of publicly visible licenses. Reserved for Google's use. * @param licenses licenses or {@code null} for none */ public AttachedDiskInitializeParams setLicenses(java.util.List<java.lang.String> licenses) { this.licenses = licenses; return this; } /** * Indicates whether or not the disk can be read/write attached to more than one instance. * @return value or {@code null} for none */ public java.lang.Boolean getMultiWriter() { return multiWriter; } /** * Indicates whether or not the disk can be read/write attached to more than one instance. * @param multiWriter multiWriter or {@code null} for none */ public AttachedDiskInitializeParams setMultiWriter(java.lang.Boolean multiWriter) { this.multiWriter = multiWriter; return this; } /** * Specifies which action to take on instance update with this disk. Default is to use the * existing disk. * @return value or {@code null} for none */ public java.lang.String getOnUpdateAction() { return onUpdateAction; } /** * Specifies which action to take on instance update with this disk. Default is to use the * existing disk. * @param onUpdateAction onUpdateAction or {@code null} for none */ public AttachedDiskInitializeParams setOnUpdateAction(java.lang.String onUpdateAction) { this.onUpdateAction = onUpdateAction; return this; } /** * Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per * second that the disk can handle. Values must be between 10,000 and 120,000. For more details, * see the Extreme persistent disk documentation. * @return value or {@code null} for none */ public java.lang.Long getProvisionedIops() { return provisionedIops; } /** * Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per * second that the disk can handle. Values must be between 10,000 and 120,000. For more details, * see the Extreme persistent disk documentation. * @param provisionedIops provisionedIops or {@code null} for none */ public AttachedDiskInitializeParams setProvisionedIops(java.lang.Long provisionedIops) { this.provisionedIops = provisionedIops; return this; } /** * Resource policies applied to this disk for automatic snapshot creations. Specified using the * full or partial URL. For instance template, specify only the resource policy name. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getResourcePolicies() { return resourcePolicies; } /** * Resource policies applied to this disk for automatic snapshot creations. Specified using the * full or partial URL. For instance template, specify only the resource policy name. * @param resourcePolicies resourcePolicies or {@code null} for none */ public AttachedDiskInitializeParams setResourcePolicies(java.util.List<java.lang.String> resourcePolicies) { this.resourcePolicies = resourcePolicies; return this; } /** * The source image to create this disk. When creating a new instance, one of * initializeParams.sourceImage or initializeParams.sourceSnapshot or disks.source is required * except for local SSD. To create a disk with one of the public operating system images, specify * the image by its family name. For example, specify family/debian-9 to use the latest Debian 9 * image: projects/debian-cloud/global/images/family/debian-9 Alternatively, use a specific * version of a public operating system image: projects/debian-cloud/global/images/debian-9 * -stretch-vYYYYMMDD To create a disk with a custom image that you created, specify the image * name in the following format: global/images/my-custom-image You can also specify a custom image * by its image family, which returns the latest version of the image in that family. Replace the * image name with family/family-name: global/images/family/my-image-family If the source image is * deleted later, this field will not be set. * @return value or {@code null} for none */ public java.lang.String getSourceImage() { return sourceImage; } /** * The source image to create this disk. When creating a new instance, one of * initializeParams.sourceImage or initializeParams.sourceSnapshot or disks.source is required * except for local SSD. To create a disk with one of the public operating system images, specify * the image by its family name. For example, specify family/debian-9 to use the latest Debian 9 * image: projects/debian-cloud/global/images/family/debian-9 Alternatively, use a specific * version of a public operating system image: projects/debian-cloud/global/images/debian-9 * -stretch-vYYYYMMDD To create a disk with a custom image that you created, specify the image * name in the following format: global/images/my-custom-image You can also specify a custom image * by its image family, which returns the latest version of the image in that family. Replace the * image name with family/family-name: global/images/family/my-image-family If the source image is * deleted later, this field will not be set. * @param sourceImage sourceImage or {@code null} for none */ public AttachedDiskInitializeParams setSourceImage(java.lang.String sourceImage) { this.sourceImage = sourceImage; return this; } /** * The customer-supplied encryption key of the source image. Required if the source image is * protected by a customer-supplied encryption key. Instance templates do not store customer- * supplied encryption keys, so you cannot create disks for instances in a managed instance group * if the source images are encrypted with your own keys. * @return value or {@code null} for none */ public CustomerEncryptionKey getSourceImageEncryptionKey() { return sourceImageEncryptionKey; } /** * The customer-supplied encryption key of the source image. Required if the source image is * protected by a customer-supplied encryption key. Instance templates do not store customer- * supplied encryption keys, so you cannot create disks for instances in a managed instance group * if the source images are encrypted with your own keys. * @param sourceImageEncryptionKey sourceImageEncryptionKey or {@code null} for none */ public AttachedDiskInitializeParams setSourceImageEncryptionKey(CustomerEncryptionKey sourceImageEncryptionKey) { this.sourceImageEncryptionKey = sourceImageEncryptionKey; return this; } /** * The source snapshot to create this disk. When creating a new instance, one of * initializeParams.sourceSnapshot or initializeParams.sourceImage or disks.source is required * except for local SSD. To create a disk with a snapshot that you created, specify the snapshot * name in the following format: global/snapshots/my-backup If the source snapshot is deleted * later, this field will not be set. * @return value or {@code null} for none */ public java.lang.String getSourceSnapshot() { return sourceSnapshot; } /** * The source snapshot to create this disk. When creating a new instance, one of * initializeParams.sourceSnapshot or initializeParams.sourceImage or disks.source is required * except for local SSD. To create a disk with a snapshot that you created, specify the snapshot * name in the following format: global/snapshots/my-backup If the source snapshot is deleted * later, this field will not be set. * @param sourceSnapshot sourceSnapshot or {@code null} for none */ public AttachedDiskInitializeParams setSourceSnapshot(java.lang.String sourceSnapshot) { this.sourceSnapshot = sourceSnapshot; return this; } /** * The customer-supplied encryption key of the source snapshot. * @return value or {@code null} for none */ public CustomerEncryptionKey getSourceSnapshotEncryptionKey() { return sourceSnapshotEncryptionKey; } /** * The customer-supplied encryption key of the source snapshot. * @param sourceSnapshotEncryptionKey sourceSnapshotEncryptionKey or {@code null} for none */ public AttachedDiskInitializeParams setSourceSnapshotEncryptionKey(CustomerEncryptionKey sourceSnapshotEncryptionKey) { this.sourceSnapshotEncryptionKey = sourceSnapshotEncryptionKey; return this; } @Override public AttachedDiskInitializeParams set(String fieldName, Object value) { return (AttachedDiskInitializeParams) super.set(fieldName, value); } @Override public AttachedDiskInitializeParams clone() { return (AttachedDiskInitializeParams) super.clone(); } }
package org.rabix.bindings.draft2.helper; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.rabix.bindings.model.FileValue; import org.rabix.common.helper.ChecksumHelper; import org.rabix.common.helper.ChecksumHelper.HashAlgorithm; public class Draft2FileValueHelper extends Draft2BeanHelper { private static final String KEY_NAME = "name"; private static final String KEY_PATH = "path"; private static final String KEY_LOCATION = "location"; private static final String KEY_SIZE = "size"; private static final String KEY_CHECKSUM = "checksum"; private static final String KEY_METADATA = "metadata"; private static final String KEY_CONTENTS = "contents"; private static final String KEY_ORIGINAL_PATH = "originalPath"; private static final String KEY_SECONDARY_FILES = "secondaryFiles"; private static final int CONTENTS_NUMBER_OF_BYTES = 65536; public static void setFileType(Object raw) { setValue(Draft2SchemaHelper.KEY_JOB_TYPE, Draft2SchemaHelper.TYPE_JOB_FILE, raw); } public static String getName(Object raw) { return getValue(KEY_NAME, raw); } public static void setName(String name, Object raw) { setValue(KEY_NAME, name, raw); } public static void setSize(Long size, Object raw) { setValue(KEY_SIZE, size, raw); } public static Long getSize(Object raw) { Object number = getValue(KEY_SIZE, raw); if (number == null) { return null; } if (number instanceof Integer) { return new Long(number.toString()); } return (Long) number; } public static void setChecksum(File file, Object raw, HashAlgorithm hashAlgorithm) { if (!file.exists()) { throw new RuntimeException("Missing file " + file); } String checksum = ChecksumHelper.checksum(file, hashAlgorithm); if (checksum != null) { setValue(KEY_CHECKSUM, checksum, raw); } } public static void setChecksum(String checksum, Object raw) { setValue(KEY_CHECKSUM, checksum, raw); } public static void setContents(Object raw) throws IOException { String contents = loadContents(raw); setValue(KEY_CONTENTS, contents, raw); } public static String getContents(Object raw) { return getValue(KEY_CONTENTS, raw); } private static void setContents(String contents, Object raw) { setValue(KEY_CONTENTS, contents, raw); } public static String getChecksum(Object raw) { return getValue(KEY_CHECKSUM, raw); } public static String getPath(Object raw) { return getValue(KEY_PATH, raw); } public static void setPath(String path, Object raw) { setValue(KEY_PATH, path, raw); } public static String getLocation(Object raw) { return getValue(KEY_LOCATION, raw); } public static void setLocation(String location, Object raw) { setValue(KEY_LOCATION, location, raw); } public static void setOriginalPath(String path, Object raw) { setValue(KEY_ORIGINAL_PATH, path, raw); } public static String getOriginalPath(Object raw) { return getValue(KEY_ORIGINAL_PATH, raw); } public static void setMetadata(Object metadata, Object raw) { setValue(KEY_METADATA, metadata, raw); } public static Map<String, Object> getMetadata(Object raw) { return getValue(KEY_METADATA, raw); } public static void setSecondaryFiles(List<?> secondaryFiles, Object raw) { setValue(KEY_SECONDARY_FILES, secondaryFiles, raw); } public static List<Map<String, Object>> getSecondaryFiles(Object raw) { return getValue(KEY_SECONDARY_FILES, raw); } /** * Extract paths from unknown data */ public static Set<String> flattenPaths(Object value) { Set<String> paths = new HashSet<>(); if (value == null) { return paths; } else if (Draft2SchemaHelper.isFileFromValue(value)) { paths.add(getPath(value)); List<Map<String, Object>> secondaryFiles = getSecondaryFiles(value); if (secondaryFiles != null) { paths.addAll(flattenPaths(secondaryFiles)); } return paths; } else if (value instanceof List<?>) { for (Object subvalue : ((List<?>) value)) { paths.addAll(flattenPaths(subvalue)); } return paths; } else if (value instanceof Map<?, ?>) { for (Object subvalue : ((Map<?, ?>) value).values()) { paths.addAll(flattenPaths(subvalue)); } } return paths; } /** * Load first CONTENTS_NUMBER_OF_BYTES bytes from file */ private static String loadContents(Object fileData) throws IOException { String path = Draft2FileValueHelper.getPath(fileData); InputStream is = null; try { File file = new File(path); is = new FileInputStream(file); byte[] buffer = new byte[Math.min(CONTENTS_NUMBER_OF_BYTES, (int) file.length())]; is.read(buffer); return new String(buffer, "UTF-8"); } finally { if (is != null) { try { is.close(); } catch (IOException e) { // do nothing } } } } public static FileValue createFileValue(Object value) { String path = Draft2FileValueHelper.getPath(value); String name = Draft2FileValueHelper.getName(value); String contents = Draft2FileValueHelper.getContents(value); String location = Draft2FileValueHelper.getLocation(value); String checksum = Draft2FileValueHelper.getChecksum(value); Long size = Draft2FileValueHelper.getSize(value); Map<String, Object> properties = new HashMap<>(); properties.put(Draft2BindingHelper.KEY_SBG_METADATA, Draft2FileValueHelper.getMetadata(value)); List<FileValue> secondaryFiles = new ArrayList<>(); List<Map<String, Object>> secondaryFileValues = Draft2FileValueHelper.getSecondaryFiles(value); if (secondaryFileValues != null) { for (Map<String, Object> secondaryFileValue : secondaryFileValues) { secondaryFiles.add(createFileValue(secondaryFileValue)); } } return new FileValue(size, path, location, checksum, secondaryFiles, properties, name, null, contents); } public static Map<String, Object> createFileRaw(FileValue fileValue) { Map<String, Object> raw = new HashMap<>(); setFileType(raw); setPath(fileValue.getPath(), raw); setName(fileValue.getName(), raw); setLocation(fileValue.getLocation(), raw); setChecksum(fileValue.getChecksum(), raw); setSize(fileValue.getSize(), raw); setContents(fileValue.getContents(), raw); Map<String, Object> properties = fileValue.getProperties(); if (properties != null) { setMetadata(properties.get(Draft2BindingHelper.KEY_SBG_METADATA), raw); } List<FileValue> secondaryFileValues = fileValue.getSecondaryFiles(); if (secondaryFileValues != null) { List<Map<String, Object>> secondaryFilesRaw = new ArrayList<>(); for (FileValue secondaryFileValue : secondaryFileValues) { secondaryFilesRaw.add(createFileRaw(secondaryFileValue)); } setSecondaryFiles(secondaryFilesRaw, raw); } return raw; } }
/** * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twitter.elephanttwin.util; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.StringWriter; import java.util.Deque; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import com.esotericsoftware.yamlbeans.YamlException; import com.esotericsoftware.yamlbeans.YamlReader; import com.esotericsoftware.yamlbeans.YamlWriter; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** * Configuration Map that can have nested sub elements and which can be initialized from * various forms of Yaml sources. * * @author Dmitriy Ryaboy */ public class YamlConfig { public static final Logger LOG = LogManager.getLogger(YamlConfig.class); /** * This is used for pattern matching in the transform method when a key is passed that is meant * to be a reference to a list item transform (i.e. foo.bar[2]) * @see YamlConfig.transform(YamlConfig) **/ private static final Pattern PATTERN = Pattern.compile("(.+)\\[(\\d+|APPEND)\\]"); /** * Reference Type */ public static enum RefType { SCALAR, MAP, LIST; } protected final Map<String, Object> config; public YamlConfig() { this(null); } /** * Builds a new YamlConfig object from the items in the map. * @param config */ public YamlConfig(Map<String, Object> config) { this.config = config == null ? new HashMap<String, Object>() : config; } /** * Loads a new YamlConfig from the given Reader * @param rdr * @return */ public static YamlConfig load(Reader rdr) { YamlReader reader = new YamlReader(rdr); return loadFromReader(reader); } /** * Loads a new YamlConfig from the given InputStream * @param in * @return */ public static YamlConfig load(InputStream in) { YamlReader reader = new YamlReader(new InputStreamReader(in)); return loadFromReader(reader); } /** * Loads a new YamlConfig from the given String * @param yaml * @return */ public static YamlConfig load(String yaml) { YamlReader reader = new YamlReader(yaml); return loadFromReader(reader); } /** * Returns a new YamlConfig loaded from the given YamlReader * @param reader * @return */ @SuppressWarnings("unchecked") private static YamlConfig loadFromReader(YamlReader reader) { try { return new YamlConfig(reader.read(Map.class)); } catch (YamlException e) { LOG.error("Could not read config", e); return null; } } /** * Loads the YamlConfig from the given file * @param filename * @return */ public static YamlConfig loadFile(String filename) { FileInputStream istream = null; try { istream = new FileInputStream(filename); return load(istream); } catch (FileNotFoundException e) { LOG.error("Failed loading yaml file " + filename, e); return null; } finally { if (istream != null) { try { istream.close(); } catch (IOException e) { // } } } } /** * Transforms the fields from <code>fromConf</code> into this object. Transforming fields is done * by iterating over all of the top-level key names passed and overridding each. See * <code>transform(String key, Object value)</code>. * @param transformPlan The configs to transform the YamlConfig with * @throws IllegalArgumentException if a key doesn't already exist */ public void transform(YamlConfig transformPlan) { //for each top-level overriden param for (String key : transformPlan.getRawConfig().keySet()) { transform(key, transformPlan.get(key)); } } /** * Merges <code>value</code> into the <code>YamlConfig</code> object at location <code>key</code>. * The transform key supports a dot-notation to override any key value at any depth of the * YamlConfig. For example: * <ul> * <li><i>foo</i> Override the top-level <i>foo</i> value with the contents of <i>foo</i></li> * <li><i>foo.bar.bat</i> Override the node at <i>foo.bar.bat</i> the assigned value</li> * <li><i>foo.bar[2].bat</i> Override the value of <i>bat</i> in the 3rd item of the list, or add * a 3rd element if the list is of size 2. * <li><i>foo.bar[APPEND].bat</i> Appends <i>bat</i> at the end of the list. * </ul> * @param key The key of the configs to transform * @param value The config object to merge in. * @throws IllegalArgumentException if a param doesn't already exist */ @SuppressWarnings("unchecked") public void transform(String key, Object value) { Preconditions.checkNotNull(key, "Can not call transform with a null key"); //for key aa.bb.cc, transpose into a deque of: // aa // bb // cc Deque<String> keyTokensDeque = new LinkedList<String>(); for (String token : key.split("\\.")) { keyTokensDeque.addLast(token); } recursiveTransform(this, keyTokensDeque, key, value); } /** * Pops a token from keyTokensDeque and if we're at a leaf, adds the value to the config. * Otherwise, pull out the referenced YamlConfig and recurse one level deeper to the next node. */ @SuppressWarnings("unchecked") private void recursiveTransform(YamlConfig thisConfig, Deque<String> keyTokensDeque, String key, Object value) { Preconditions.checkState(keyTokensDeque.size() > 0, "Trying to pop more key tokens than exist for " + key); String token = keyTokensDeque.pop(); boolean atLeaf = keyTokensDeque.size() == 0; // tokens support a dot notation to insert items at arbitrary locations in the config tree. // traverse down each level of the branch passed until we find the config to override Matcher m = PATTERN.matcher(token); // matches if we're overriding an item in a list, i.e., foo[1] if (m.matches()) { String targetSection = m.group(1); // First, make sure we have list to add to List listObject = (List) thisConfig.get(targetSection); if (listObject == null) { throw new IllegalArgumentException("YAML list does not exist for key: " + key); } int itemIndex = "APPEND".equals(m.group(2)) ? listObject.size() : Integer.parseInt(m.group(2)); // then we have to determine if we're dealing with a list of Strings or a List of YamlConfigs. // if we're at a leaf and the value is a string, then it should a List of Strings. if (atLeaf) { List list = (value instanceof String) ? thisConfig.getList(targetSection) : (List) thisConfig.getRawConfig().get(targetSection); // are we appending a new item to the list or updating an existing item if (itemIndex == list.size()) { list.add(value); } else if (itemIndex > list.size()) { throw new IllegalArgumentException("Transform: trying to insert at index " + itemIndex + " into a list of size " + list.size() + ". Transform key=" + key); } else { list.set(itemIndex, value); } return; //traversal complete } else { List<Map<String, Object>> list = (List<Map<String, Object>>) thisConfig.getRawConfig().get(targetSection); // are we appending a new YamlConfig to the list or modifying an existing one? YamlConfig listItemYaml; if (itemIndex == list.size()) { listItemYaml = YamlConfig.load(""); list.add(listItemYaml.getRawConfig()); } else if (itemIndex > list.size()) { throw new IllegalArgumentException("YAML map config list index out of range: " + key); } else { listItemYaml = new YamlConfig(list.get(itemIndex)); } recursiveTransform(listItemYaml, keyTokensDeque, key, value); } } else { // if at the leaf node, update config and we're done if (atLeaf) { thisConfig.getRawConfig().put(token, value); return; // traversal complete } // adding a new node if one does not exist if (thisConfig.getNestedConfig(token) == null) { thisConfig.getRawConfig().put(token, new HashMap<String, Object>()); } recursiveTransform(thisConfig.getNestedConfig(token), keyTokensDeque, key, value); } } public static YamlConfig loadResource(String resource) { return load(YamlConfig.class.getResourceAsStream(resource)); } /** * Returns the RefType item associated with key if present or null * @param key * @return */ public RefType getRefType(String key) { RefType result = null; if (config.containsKey(key)) { Object obj = config.get(key); if (obj instanceof List<?>) { result = RefType.LIST; } else if (obj instanceof Map<?, ?>) { result = RefType.MAP; } else { result = RefType.SCALAR; } } return result; } public Object get(String key) { return config.get(key); } /** * When we have a key without a value like: * * <p> key: * * <p>The value is an empty string. * * @param key * @return true if the key has no contents or does not exist. */ public boolean isEmptyKey(String key) { if (config.containsKey(key)) { if (config.get(key).getClass() == String.class && ((String) config.get(key)).length() == 0) { return true; } return false; } return true; } /** * Returns a String config element if present or a default value * @param key * @param defaultValue * @return */ public String getString(String key, String defaultValue) { if (config.containsKey(key)) { return (String) config.get(key); } else { return defaultValue; } } /** * Retrieves the String value associated with key from the config. * @param key * @return */ public String getString(String key) { return getString(key, null); } /** * Retrieves the required String value associated with key from the * config, with the pre-condition that it is not null. * @param key * @return value associated with key as String */ public String getRequiredString(String key) { Preconditions.checkState(config.containsKey(key), key + " is not set!"); return (String) config.get(key); } /** * Returns an Integer config element if present or a default value * @param key * @param defaultValue * @return */ public Integer getInt(String key, Integer defaultValue) { if (config.containsKey(key)) { Object val = config.get(key); return val instanceof Number ? ((Number) val).intValue() : Integer.parseInt((String) val); } else { return defaultValue; } } public Integer getInt(String key) { return getInt(key, null); } /** * Returns a Boolean config element if present or a default value * @param key * @param defaultValue * @return */ public Boolean getBoolean(String key, Boolean defaultValue) { if (config.containsKey(key)) { Object val = config.get(key); return val instanceof Boolean ? (Boolean) val : Boolean.valueOf((String) val); } else { return defaultValue; } } public Boolean getBoolean(String key) { return getBoolean(key, null); } /** * Returns a Long config element if present or a default value * @param key * @param defaultValue * @return */ public Long getLong(String key, Long defaultValue) { if (config.containsKey(key)) { Object val = config.get(key); return val instanceof Number ? ((Number) val).longValue() : Long.parseLong((String) val); } else { return defaultValue; } } public Long getLong(String key) { return getLong(key, null); } /** * Returns a list of config elements associated with key with an optional default value is key is * not found * @param key * @param defaultValue * @return */ @SuppressWarnings("unchecked") public List<String> getList(String key, List<String> defaultValue) { if (config.containsKey(key)) { return (List<String>) config.get(key); } else { return defaultValue; } } /** * Returns a List of config elements associated with key * @param key * @return */ public List<String> getList(String key) { return getList(key, null); } /** * Returns a nested List of YamlConfig objects or null * @param key * @return */ @SuppressWarnings("unchecked") public List<YamlConfig> getNestedList(String key) { if (config.containsKey(key)) { List<Map<String, Object>> list = (List<Map<String, Object>>) config.get(key); return Lists.transform(list, new Function<Map<String, Object>, YamlConfig>() { @Override public YamlConfig apply(Map<String, Object> m) { return new YamlConfig(m); } }); } else { return null; } } /** * Returns nested YamlConfig object for key, or a new, empty one. * @param key * @return */ public YamlConfig getNestedOrEmptyConfig(String key) { YamlConfig nestedConfig = getNestedConfig(key); if (nestedConfig == null) { nestedConfig = new YamlConfig(new HashMap<String, Object>()); } return nestedConfig; } /** * Returns nested YamlConfig object for key, or null * @param key * @return */ @SuppressWarnings("unchecked") public YamlConfig getNestedConfig(String key) { if (config.containsKey(key)) { return new YamlConfig((Map<String, Object>) config.get(key)); } else { return null; } } /** * Returns a map of configuration key value pairs, crated from a nested YamlConfig object. * @param key * @param defaultValue used when no nested config is found for key * @return */ public Map<String, String> getNestedStringMap(String key, Map<String, String> defaultValue) { YamlConfig nestedConfig = getNestedConfig(key); if (nestedConfig != null) { Map<String, String> map = Maps.newHashMap(); for (String subKey : nestedConfig.getKeys()) { map.put(subKey, nestedConfig.getString(subKey)); } return map; } else { return defaultValue; } } public Map<String, String> getNestedStringMap(String key) { return getNestedStringMap(key, null); } public Set<String> getKeys() { return config.keySet(); } /** * Does this config contain the given key? * @param key config key * @return */ public boolean containsKey(String key) { return config.containsKey(key); } /** * Because this little wrapper class doesn't implement everything... */ public Map<String, Object> getRawConfig() { return config; } @Override public String toString() { StringWriter stringWriter = new StringWriter(); YamlWriter writer = new YamlWriter(stringWriter); try { writer.write(config); writer.close(); } catch (YamlException e) { LOG.error(e); return ""; } return stringWriter.toString(); } /** * Dirty test main * @param args argument array */ public static void main(String[] args) { YamlConfig conf = null; if (args.length > 0) { System.err.println("reading from file " + args[0]); conf = YamlConfig.loadFile(args[0]); } else { System.err.println("reading from stdin"); conf = YamlConfig.load(System.in); } if (conf != null) { System.out.println(conf.toString()); } else { System.err.println("Failed to load the config."); } } }
/* * Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ // -- This file was mechanically generated: Do not edit! -- // package java.nio; import java.io.FileDescriptor; import sun.misc.Cleaner; import sun.misc.Unsafe; import sun.misc.VM; import sun.nio.ch.DirectBuffer; class DirectDoubleBufferS extends DoubleBuffer implements DirectBuffer { // Cached unsafe-access object protected static final Unsafe unsafe = Bits.unsafe(); // Cached array base offset private static final long arrayBaseOffset = (long)unsafe.arrayBaseOffset(double[].class); // Cached unaligned-access capability protected static final boolean unaligned = Bits.unaligned(); // Base address, used in all indexing calculations // NOTE: moved up to Buffer.java for speed in JNI GetDirectBufferAddress // protected long address; // An object attached to this buffer. If this buffer is a view of another // buffer then we use this field to keep a reference to that buffer to // ensure that its memory isn't freed before we are done with it. private final Object att; public Object attachment() { return att; } public Cleaner cleaner() { return null; } // For duplicates and slices // DirectDoubleBufferS(DirectBuffer db, // package-private int mark, int pos, int lim, int cap, int off) { super(mark, pos, lim, cap); address = db.address() + off; att = db; } public DoubleBuffer slice() { int pos = this.position(); int lim = this.limit(); assert (pos <= lim); int rem = (pos <= lim ? lim - pos : 0); int off = (pos << 3); assert (off >= 0); return new DirectDoubleBufferS(this, -1, 0, rem, rem, off); } public DoubleBuffer duplicate() { return new DirectDoubleBufferS(this, this.markValue(), this.position(), this.limit(), this.capacity(), 0); } public DoubleBuffer asReadOnlyBuffer() { return new DirectDoubleBufferRS(this, this.markValue(), this.position(), this.limit(), this.capacity(), 0); } public long address() { return address; } private long ix(int i) { return address + ((long)i << 3); } public double get() { return Double.longBitsToDouble(Bits.swap(unsafe.getLong(ix(nextGetIndex())))); } public double get(int i) { return Double.longBitsToDouble(Bits.swap(unsafe.getLong(ix(checkIndex(i))))); } public DoubleBuffer get(double[] dst, int offset, int length) { if (((long)length << 3) > Bits.JNI_COPY_TO_ARRAY_THRESHOLD) { checkBounds(offset, length, dst.length); int pos = position(); int lim = limit(); assert (pos <= lim); int rem = (pos <= lim ? lim - pos : 0); if (length > rem) throw new BufferUnderflowException(); if (order() != ByteOrder.nativeOrder()) Bits.copyToLongArray(ix(pos), dst, (long)offset << 3, (long)length << 3); else Bits.copyToArray(ix(pos), dst, arrayBaseOffset, (long)offset << 3, (long)length << 3); position(pos + length); } else { super.get(dst, offset, length); } return this; } public DoubleBuffer put(double x) { unsafe.putLong(ix(nextPutIndex()), Bits.swap(Double.doubleToRawLongBits(x))); return this; } public DoubleBuffer put(int i, double x) { unsafe.putLong(ix(checkIndex(i)), Bits.swap(Double.doubleToRawLongBits(x))); return this; } public DoubleBuffer put(DoubleBuffer src) { if (src instanceof DirectDoubleBufferS) { if (src == this) throw new IllegalArgumentException(); DirectDoubleBufferS sb = (DirectDoubleBufferS)src; int spos = sb.position(); int slim = sb.limit(); assert (spos <= slim); int srem = (spos <= slim ? slim - spos : 0); int pos = position(); int lim = limit(); assert (pos <= lim); int rem = (pos <= lim ? lim - pos : 0); if (srem > rem) throw new BufferOverflowException(); unsafe.copyMemory(sb.ix(spos), ix(pos), (long)srem << 3); sb.position(spos + srem); position(pos + srem); } else if (src.hb != null) { int spos = src.position(); int slim = src.limit(); assert (spos <= slim); int srem = (spos <= slim ? slim - spos : 0); put(src.hb, src.offset + spos, srem); src.position(spos + srem); } else { super.put(src); } return this; } public DoubleBuffer put(double[] src, int offset, int length) { if (((long)length << 3) > Bits.JNI_COPY_FROM_ARRAY_THRESHOLD) { checkBounds(offset, length, src.length); int pos = position(); int lim = limit(); assert (pos <= lim); int rem = (pos <= lim ? lim - pos : 0); if (length > rem) throw new BufferOverflowException(); if (order() != ByteOrder.nativeOrder()) Bits.copyFromLongArray(src, (long)offset << 3, ix(pos), (long)length << 3); else Bits.copyFromArray(src, arrayBaseOffset, (long)offset << 3, ix(pos), (long)length << 3); position(pos + length); } else { super.put(src, offset, length); } return this; } public DoubleBuffer compact() { int pos = position(); int lim = limit(); assert (pos <= lim); int rem = (pos <= lim ? lim - pos : 0); unsafe.copyMemory(ix(pos), ix(0), (long)rem << 3); position(rem); limit(capacity()); discardMark(); return this; } public boolean isDirect() { return true; } public boolean isReadOnly() { return false; } public ByteOrder order() { return ((ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN) ? ByteOrder.LITTLE_ENDIAN : ByteOrder.BIG_ENDIAN); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rave.portal.repository.impl; import org.apache.rave.model.Page; import org.apache.rave.model.Region; import org.apache.rave.model.RegionWidget; import org.apache.rave.portal.model.impl.RegionWidgetImpl; import org.apache.rave.portal.repository.MongoPageOperations; import org.apache.rave.portal.repository.RegionWidgetRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.stereotype.Repository; import java.util.ArrayList; import java.util.List; import static org.springframework.data.mongodb.core.query.Criteria.where; import static org.springframework.data.mongodb.core.query.Query.query; @Repository public class MongoDbRegionWidgetRepository implements RegionWidgetRepository { @Autowired private MongoPageOperations template; @Override public Class<? extends RegionWidget> getType() { return RegionWidgetImpl.class; } @Override public RegionWidget get(String id) { Page page = getPageByRegionWidgetId(id); return getRegionWidgetById(page, id); } @Override public RegionWidget save(RegionWidget item) { return item.getId() == null ? addNewRegionWidget(item) : updateRegionWidget(item); } @Override public void delete(RegionWidget item) { Page page = getPageByRegionWidgetId(item.getId()); replaceOrRemoveWidget(page, item, false); template.save(page); } @Override public List<RegionWidget> getAll(){ Query q = new Query(); List<Page> allPages = template.find(q); List<RegionWidget> regionWidgets = new ArrayList<RegionWidget>(); for(Page page: allPages){ List<Region> regions = page.getRegions(); if(regions != null){ for(Region region : regions) { List<RegionWidget> rws = region.getRegionWidgets(); if(rws != null){ for(RegionWidget rw : rws){ regionWidgets.add(rw); } } } } } return regionWidgets; } @Override public List<RegionWidget> getLimitedList(int offset, int pageSize) { List<RegionWidget> regionWidgets = this.getAll(); int end = regionWidgets.size() < offset + pageSize ? regionWidgets.size() : offset + pageSize; return regionWidgets.subList(offset, end); } @Override public int getCountAll() { return this.getAll().size(); } private RegionWidget updateRegionWidget(RegionWidget item) { RegionWidget savedWidget; Page page = getPageByRegionWidgetId(item.getId()); replaceOrRemoveWidget(page, item, true); Page saved = template.save(page); savedWidget = getRegionWidgetById(saved, item.getId()); return savedWidget; } private RegionWidget addNewRegionWidget(RegionWidget item) { Page page = getPageFromRepository(item); Region parent = getRegionById(item.getRegion().getId(), page.getRegions()); if(parent == null) throw new IllegalStateException("Unable to find parent for page"); parent.getRegionWidgets().add(item); Page saved = template.save(page); return getRegionById(parent.getId(), saved.getRegions()).getRegionWidgets().get(parent.getRegionWidgets().size() -1); } private RegionWidget getRegionWidgetById(Page page, String id) { List<Region> regions = page.getRegions(); RegionWidget regionWidget = getRegionWidget(id, regions); if(regionWidget == null && page.getSubPages() != null) { for(Page subPage : page.getSubPages()) { regionWidget = getRegionWidgetById(subPage, id); if(regionWidget != null) break; } } return regionWidget; } private RegionWidget getRegionWidget(String id, List<Region> regions) { for(Region region : regions) { for(RegionWidget widget : region.getRegionWidgets()) { if(widget.getId().equals(id)) { return widget; } } } return null; } private int replaceOrRemoveWidget(Page page, RegionWidget item, boolean replace) { List<Region> regions = page.getRegions(); int index = replaceOrRemoveWidget(item, replace, regions); if(index == -1 && page.getSubPages() != null) { for(Page subPage : page.getSubPages()) { index = replaceOrRemoveWidget(item, replace, subPage.getRegions()); if(index != -1) break; } } if(index == -1) throw new IllegalStateException("Widget does not exist in parent page regions"); return index; } private int replaceOrRemoveWidget(RegionWidget item, boolean replace, List<Region> regions) { for(Region region : regions) { List<RegionWidget> regionWidgets = region.getRegionWidgets(); for(int i=0; i< regionWidgets.size(); i++) { if(regionWidgets.get(i).getId().equals(item.getId())) { regionWidgets.remove(i); if(replace) { regionWidgets.add(i, item); } return i; } } } return -1; } private Region getRegionById(String id, List<Region> regions) { for(Region region: regions) { if(id.equals(region.getId())) { return region; } } return null; } private Page getPageFromRepository(RegionWidget item) { if(item.getRegion() != null && item.getRegion().getPage() != null && item.getRegion().getPage().getId() != null) { return template.get(item.getRegion().getPage().getId()); } else { throw new IllegalStateException("Unable to find page for region"); } } private Page getPageByRegionWidgetId(String id) { Criteria criteria = getRegionWidgetIdCriteria(id); return template.findOne(query(new Criteria().orOperator(criteria, where("subPages").elemMatch(criteria)))); } private Criteria getRegionWidgetIdCriteria(String id) { return where("regions").elemMatch(where("regionWidgets").elemMatch(where("_id").is(id))); } public void setTemplate(MongoPageOperations template) { this.template = template; } }
/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.react.views.view; import javax.annotation.Nullable; import java.util.Locale; import java.util.Map; import android.graphics.Color; import android.os.Build; import android.view.View; import com.facebook.csslayout.CSSConstants; import com.facebook.csslayout.Spacing; import com.facebook.react.bridge.JSApplicationIllegalArgumentException; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.common.MapBuilder; import com.facebook.react.common.annotations.VisibleForTesting; import com.facebook.react.uimanager.CatalystStylesDiffMap; import com.facebook.react.uimanager.PixelUtil; import com.facebook.react.uimanager.PointerEvents; import com.facebook.react.uimanager.ReactProp; import com.facebook.react.uimanager.ReactPropGroup; import com.facebook.react.uimanager.ThemedReactContext; import com.facebook.react.uimanager.ViewGroupManager; import com.facebook.react.uimanager.ViewProps; /** * View manager for AndroidViews (plain React Views). */ public class ReactViewManager extends ViewGroupManager<ReactViewGroup> { @VisibleForTesting public static final String REACT_CLASS = ViewProps.VIEW_CLASS_NAME; private static final int[] SPACING_TYPES = { Spacing.ALL, Spacing.LEFT, Spacing.RIGHT, Spacing.TOP, Spacing.BOTTOM, }; private static final int CMD_HOTSPOT_UPDATE = 1; private static final int CMD_SET_PRESSED = 2; private static final int[] sLocationBuf = new int[2]; @ReactProp(name = "accessible") public void setAccessible(ReactViewGroup view, boolean accessible) { view.setFocusable(accessible); } @ReactProp(name = "borderRadius") public void setBorderRadius(ReactViewGroup view, float borderRadius) { view.setBorderRadius(PixelUtil.toPixelFromDIP(borderRadius)); } @ReactProp(name = "borderStyle") public void setBorderStyle(ReactViewGroup view, @Nullable String borderStyle) { view.setBorderStyle(borderStyle); } @ReactProp(name = "pointerEvents") public void setPointerEvents(ReactViewGroup view, @Nullable String pointerEventsStr) { if (pointerEventsStr != null) { PointerEvents pointerEvents = PointerEvents.valueOf(pointerEventsStr.toUpperCase(Locale.US).replace("-", "_")); view.setPointerEvents(pointerEvents); } } @ReactProp(name = "nativeBackgroundAndroid") public void setNativeBackground(ReactViewGroup view, @Nullable ReadableMap bg) { view.setTranslucentBackgroundDrawable(bg == null ? null : ReactDrawableHelper.createDrawableFromJSDescription(view.getContext(), bg)); } @ReactProp(name = ViewProps.BORDER_WIDTH, defaultFloat = CSSConstants.UNDEFINED) public void setBorderWidth(ReactViewGroup view, float width) { if (!CSSConstants.isUndefined(width)) { width = PixelUtil.toPixelFromDIP(width); } view.setBorderWidth(Spacing.ALL, width); } @ReactProp(name = ReactClippingViewGroupHelper.PROP_REMOVE_CLIPPED_SUBVIEWS) public void setRemoveClippedSubviews(ReactViewGroup view, boolean removeClippedSubviews) { view.setRemoveClippedSubviews(removeClippedSubviews); } @ReactProp(name = ViewProps.NEEDS_OFFSCREEN_ALPHA_COMPOSITING) public void setNeedsOffscreenAlphaCompositing( ReactViewGroup view, boolean needsOffscreenAlphaCompositing) { view.setNeedsOffscreenAlphaCompositing(needsOffscreenAlphaCompositing); } @ReactPropGroup(names = { ViewProps.BORDER_WIDTH, ViewProps.BORDER_LEFT_WIDTH, ViewProps.BORDER_RIGHT_WIDTH, ViewProps.BORDER_TOP_WIDTH, ViewProps.BORDER_BOTTOM_WIDTH, }, defaultFloat = CSSConstants.UNDEFINED) public void setBorderWidth(ReactViewGroup view, int index, float width) { if (!CSSConstants.isUndefined(width)) { width = PixelUtil.toPixelFromDIP(width); } view.setBorderWidth(SPACING_TYPES[index], width); } @ReactPropGroup(names = { "borderColor", "borderLeftColor", "borderRightColor", "borderTopColor", "borderBottomColor" }, customType = "Color") public void setBorderColor(ReactViewGroup view, int index, Integer color) { view.setBorderColor( SPACING_TYPES[index], color == null ? CSSConstants.UNDEFINED : (float) color); } @Override public String getName() { return REACT_CLASS; } @Override public ReactViewGroup createViewInstance(ThemedReactContext context) { return new ReactViewGroup(context); } @Override public Map<String, Integer> getCommandsMap() { return MapBuilder.of("hotspotUpdate", CMD_HOTSPOT_UPDATE, "setPressed", CMD_SET_PRESSED); } @Override public void receiveCommand(ReactViewGroup root, int commandId, @Nullable ReadableArray args) { switch (commandId) { case CMD_HOTSPOT_UPDATE: { if (args == null || args.size() != 2) { throw new JSApplicationIllegalArgumentException( "Illegal number of arguments for 'updateHotspot' command"); } if (Build.VERSION.SDK_INT >= 21) { root.getLocationOnScreen(sLocationBuf); float x = PixelUtil.toPixelFromDIP(args.getDouble(0)) - sLocationBuf[0]; float y = PixelUtil.toPixelFromDIP(args.getDouble(1)) - sLocationBuf[1]; root.drawableHotspotChanged(x, y); } break; } case CMD_SET_PRESSED: { if (args == null || args.size() != 1) { throw new JSApplicationIllegalArgumentException( "Illegal number of arguments for 'setPressed' command"); } root.setPressed(args.getBoolean(0)); break; } } } @Override public void addView(ReactViewGroup parent, View child, int index) { boolean removeClippedSubviews = parent.getRemoveClippedSubviews(); if (removeClippedSubviews) { parent.addViewWithSubviewClippingEnabled(child, index); } else { parent.addView(child, index); } } @Override public int getChildCount(ReactViewGroup parent) { boolean removeClippedSubviews = parent.getRemoveClippedSubviews(); if (removeClippedSubviews) { return parent.getAllChildrenCount(); } else { return parent.getChildCount(); } } @Override public View getChildAt(ReactViewGroup parent, int index) { boolean removeClippedSubviews = parent.getRemoveClippedSubviews(); if (removeClippedSubviews) { return parent.getChildAtWithSubviewClippingEnabled(index); } else { return parent.getChildAt(index); } } @Override public void removeViewAt(ReactViewGroup parent, int index) { boolean removeClippedSubviews = parent.getRemoveClippedSubviews(); if (removeClippedSubviews) { View child = getChildAt(parent, index); if (child.getParent() != null) { parent.removeView(child); } parent.removeViewWithSubviewClippingEnabled(child); } else { parent.removeViewAt(index); } } }
/** * Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.core.data.manager.realtime; import com.linkedin.pinot.core.data.extractors.FieldExtractorFactory; import com.linkedin.pinot.core.data.extractors.PlainFieldExtractor; import java.io.File; import java.util.List; import java.util.Map; import java.util.TimerTask; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.plist.PropertyListConfiguration; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.util.concurrent.Uninterruptibles; import com.linkedin.pinot.common.config.AbstractTableConfig; import com.linkedin.pinot.common.config.IndexingConfig; import com.linkedin.pinot.common.data.Schema; import com.linkedin.pinot.common.metadata.instance.InstanceZKMetadata; import com.linkedin.pinot.common.metadata.segment.IndexLoadingConfigMetadata; import com.linkedin.pinot.common.metadata.segment.RealtimeSegmentZKMetadata; import com.linkedin.pinot.common.metrics.ServerGauge; import com.linkedin.pinot.common.metrics.ServerMeter; import com.linkedin.pinot.common.metrics.ServerMetrics; import com.linkedin.pinot.common.segment.ReadMode; import com.linkedin.pinot.common.utils.CommonConstants.Segment.Realtime.Status; import com.linkedin.pinot.common.utils.CommonConstants.Segment.SegmentType; import com.linkedin.pinot.core.data.GenericRow; import com.linkedin.pinot.core.data.manager.offline.SegmentDataManager; import com.linkedin.pinot.core.indexsegment.IndexSegment; import com.linkedin.pinot.core.realtime.StreamProvider; import com.linkedin.pinot.core.realtime.StreamProviderConfig; import com.linkedin.pinot.core.realtime.StreamProviderFactory; import com.linkedin.pinot.core.realtime.converter.RealtimeSegmentConverter; import com.linkedin.pinot.core.realtime.impl.RealtimeSegmentImpl; import com.linkedin.pinot.core.realtime.impl.kafka.KafkaHighLevelStreamProviderConfig; import com.linkedin.pinot.core.segment.index.loader.Loaders; public class RealtimeSegmentDataManager extends SegmentDataManager { private static final Logger LOGGER = LoggerFactory.getLogger(RealtimeSegmentDataManager.class); private final static long ONE_MINUTE_IN_MILLSEC = 1000 * 60; private final String tableName; private final String segmentName; private final Schema schema; private final PlainFieldExtractor extractor; private final RealtimeSegmentZKMetadata segmentMetatdaZk; private final StreamProviderConfig kafkaStreamProviderConfig; private final StreamProvider kafkaStreamProvider; private final File resourceDir; private final File resourceTmpDir; private final Object lock = new Object(); private RealtimeSegmentImpl realtimeSegment; private final String tableStreamName; private final long start = System.currentTimeMillis(); private long segmentEndTimeThreshold; private AtomicLong lastUpdatedRawDocuments = new AtomicLong(0); private volatile boolean keepIndexing = true; private volatile boolean isShuttingDown = false; private TimerTask segmentStatusTask; private final ServerMetrics serverMetrics; private final RealtimeTableDataManager notifier; private Thread indexingThread; private final String sortedColumn; private final List<String> invertedIndexColumns; private Logger segmentLogger = LOGGER; // An instance of this class exists only for the duration of the realtime segment that is currently being consumed. // Once the segment is committed, the segment is handled by OfflineSegmentDataManager public RealtimeSegmentDataManager(final RealtimeSegmentZKMetadata segmentMetadata, final AbstractTableConfig tableConfig, InstanceZKMetadata instanceMetadata, RealtimeTableDataManager realtimeResourceManager, final String resourceDataDir, final ReadMode mode, final Schema schema, final ServerMetrics serverMetrics) throws Exception { super(); this.schema = schema; this.extractor = (PlainFieldExtractor) FieldExtractorFactory.getPlainFieldExtractor(schema); this.serverMetrics =serverMetrics; this.segmentName = segmentMetadata.getSegmentName(); this.tableName = tableConfig.getTableName(); IndexingConfig indexingConfig = tableConfig.getIndexingConfig(); if (indexingConfig.getSortedColumn().isEmpty()) { LOGGER.info("RealtimeDataResourceZKMetadata contains no information about sorted column for segment {}", segmentName); this.sortedColumn = null; } else { String firstSortedColumn = indexingConfig.getSortedColumn().get(0); if (this.schema.hasColumn(firstSortedColumn)) { LOGGER.info("Setting sorted column name: {} from RealtimeDataResourceZKMetadata for segment {}", firstSortedColumn, segmentName); this.sortedColumn = firstSortedColumn; } else { LOGGER.warn( "Sorted column name: {} from RealtimeDataResourceZKMetadata is not existed in schema for segment {}.", firstSortedColumn, segmentName); this.sortedColumn = null; } } //inverted index columns invertedIndexColumns = indexingConfig.getInvertedIndexColumns(); this.segmentMetatdaZk = segmentMetadata; // create and init stream provider config // TODO : ideally resourceMetatda should create and give back a streamProviderConfig this.kafkaStreamProviderConfig = new KafkaHighLevelStreamProviderConfig(); this.kafkaStreamProviderConfig.init(tableConfig, instanceMetadata, schema); segmentLogger = LoggerFactory.getLogger(RealtimeSegmentDataManager.class.getName() + "_" + segmentName + "_" + kafkaStreamProviderConfig.getStreamName() ); segmentLogger.info("Created segment data manager with Sorted column:{}, invertedIndexColumns:{}", sortedColumn, invertedIndexColumns); segmentEndTimeThreshold = start + kafkaStreamProviderConfig.getTimeThresholdToFlushSegment(); this.resourceDir = new File(resourceDataDir); this.resourceTmpDir = new File(resourceDataDir, "_tmp"); if (!resourceTmpDir.exists()) { resourceTmpDir.mkdirs(); } // create and init stream provider final String tableName = tableConfig.getTableName(); this.kafkaStreamProvider = StreamProviderFactory.buildStreamProvider(); this.kafkaStreamProvider.init(kafkaStreamProviderConfig, tableName, serverMetrics); this.kafkaStreamProvider.start(); this.tableStreamName = tableName + "_" + kafkaStreamProviderConfig.getStreamName(); // lets create a new realtime segment segmentLogger.info("Started kafka stream provider"); realtimeSegment = new RealtimeSegmentImpl(schema, kafkaStreamProviderConfig.getSizeThresholdToFlushSegment(), tableName, segmentMetadata.getSegmentName(), kafkaStreamProviderConfig.getStreamName(), serverMetrics); realtimeSegment.setSegmentMetadata(segmentMetadata, this.schema); notifier = realtimeResourceManager; segmentStatusTask = new TimerTask() { @Override public void run() { computeKeepIndexing(); } }; // start the indexing thread indexingThread = new Thread(new Runnable() { @Override public void run() { // continue indexing until criteria is met boolean notFull = true; long exceptionSleepMillis = 50L; segmentLogger.info("Starting to collect rows"); do { GenericRow row = null; try { row = kafkaStreamProvider.next(); row = extractor.transform(row); if (row != null) { notFull = realtimeSegment.index(row); exceptionSleepMillis = 50L; } } catch (Exception e) { segmentLogger.warn("Caught exception while indexing row, sleeping for {} ms, row contents {}", exceptionSleepMillis, row, e); // Sleep for a short time as to avoid filling the logs with exceptions too quickly Uninterruptibles.sleepUninterruptibly(exceptionSleepMillis, TimeUnit.MILLISECONDS); exceptionSleepMillis = Math.min(60000L, exceptionSleepMillis * 2); } catch (Error e) { segmentLogger.error("Caught error in indexing thread", e); throw e; } } while (notFull && keepIndexing && (!isShuttingDown)); if (isShuttingDown) { segmentLogger.info("Shutting down indexing thread!"); return; } try { int numErrors, numConversions, numNulls, numNullCols; if ((numErrors = extractor.getTotalErrors()) > 0) { serverMetrics.addMeteredTableValue(tableStreamName, ServerMeter.ROWS_WITH_ERRORS, (long) numErrors); } Map<String, Integer> errorCount = extractor.getError_count(); for (String column : errorCount.keySet()) { if ((numErrors = errorCount.get(column)) > 0) { segmentLogger.warn("Column {} had {} rows with errors", column, numErrors); } } if ((numConversions = extractor.getTotalConversions()) > 0) { serverMetrics.addMeteredTableValue(tableStreamName, ServerMeter.ROWS_NEEDING_CONVERSIONS, (long) numConversions); segmentLogger.info("{} rows needed conversions ", numConversions); } if ((numNulls = extractor.getTotalNulls()) > 0) { serverMetrics.addMeteredTableValue(tableStreamName, ServerMeter.ROWS_WITH_NULL_VALUES, (long) numNulls); segmentLogger.info("{} rows had null columns", numNulls); } if ((numNullCols = extractor.getTotalNullCols()) > 0) { serverMetrics.addMeteredTableValue(tableStreamName, ServerMeter.COLUMNS_WITH_NULL_VALUES, (long) numNullCols); segmentLogger.info("{} columns had null values", numNullCols); } segmentLogger.info("Indexing threshold reached, proceeding with index conversion"); // kill the timer first segmentStatusTask.cancel(); updateCurrentDocumentCountMetrics(); segmentLogger.info("Indexed {} raw events, current number of docs = {}", realtimeSegment.getRawDocumentCount(), realtimeSegment.getSegmentMetadata().getTotalDocs()); File tempSegmentFolder = new File(resourceTmpDir, "tmp-" + String.valueOf(System.currentTimeMillis())); // lets convert the segment now RealtimeSegmentConverter converter = new RealtimeSegmentConverter(realtimeSegment, tempSegmentFolder.getAbsolutePath(), schema, segmentMetadata.getTableName(), segmentMetadata.getSegmentName(), sortedColumn, invertedIndexColumns); segmentLogger.info("Trying to build segment"); final long buildStartTime = System.nanoTime(); converter.build(); final long buildEndTime = System.nanoTime(); segmentLogger.info("Built segment in {} ms", TimeUnit.MILLISECONDS.convert((buildEndTime - buildStartTime), TimeUnit.NANOSECONDS)); File destDir = new File(resourceDataDir, segmentMetadata.getSegmentName()); FileUtils.deleteQuietly(destDir); FileUtils.moveDirectory(tempSegmentFolder.listFiles()[0], destDir); FileUtils.deleteQuietly(tempSegmentFolder); long segStartTime = realtimeSegment.getMinTime(); long segEndTime = realtimeSegment.getMaxTime(); TimeUnit timeUnit = schema.getTimeFieldSpec().getOutgoingGranularitySpec().getTimeType(); Configuration configuration = new PropertyListConfiguration(); configuration.setProperty(IndexLoadingConfigMetadata.KEY_OF_LOADING_INVERTED_INDEX, invertedIndexColumns); IndexLoadingConfigMetadata configMetadata = new IndexLoadingConfigMetadata(configuration); IndexSegment segment = Loaders.IndexSegment.load(new File(resourceDir, segmentMetatdaZk.getSegmentName()), mode, configMetadata); segmentLogger.info("Committing Kafka offsets"); boolean commitSuccessful = false; try { kafkaStreamProvider.commit(); commitSuccessful = true; kafkaStreamProvider.shutdown(); segmentLogger.info("Successfully committed Kafka offsets, consumer release requested."); } catch (Throwable e) { // If we got here, it means that either the commit or the shutdown failed. Considering that the // KafkaConsumerManager delays shutdown and only adds the consumer to be released in a deferred way, this // likely means that writing the Kafka offsets failed. // // The old logic (mark segment as done, then commit offsets and shutdown the consumer immediately) would die // in a terrible way, leaving the consumer open and causing us to only get half the records from that point // on. In this case, because we keep the consumer open for a little while, we should be okay if the // controller reassigns us a new segment before the consumer gets released. Hopefully by the next time that // we get to committing the offsets, the transient ZK failure that caused the write to fail will not // happen again and everything will be good. // // Several things can happen: // - The controller reassigns us a new segment before we release the consumer (KafkaConsumerManager will // keep the consumer open for about a minute, which should be enough time for the controller to reassign // us a new segment) and the next time we close the segment the offsets commit successfully; we're good. // - The controller reassigns us a new segment, but after we released the consumer (if the controller was // down or there was a ZK failure on writing the Kafka offsets but not the Helix state). We lose whatever // data was in this segment. Not good. // - The server crashes after this comment and before we mark the current segment as done; if the Kafka // offsets didn't get written, then when the server restarts it'll start consuming the current segment // from the previously committed offsets; we're good. // - The server crashes after this comment, the Kafka offsets were written but the segment wasn't marked as // done in Helix, but we got a failure (or not) on the commit; we lose whatever data was in this segment // if we restart the server (not good). If we manually mark the segment as done in Helix by editing the // state in ZK, everything is good, we'll consume a new segment that starts from the correct offsets. // // This is still better than the previous logic, which would have these failure modes: // - Consumer was left open and the controller reassigned us a new segment; consume only half the events // (because there are two consumers and Kafka will try to rebalance partitions between those two) // - We got a segment assigned to us before we got around to committing the offsets, reconsume the data that // we got in this segment again, as we're starting consumption from the previously committed offset (eg. // duplicate data). // // This is still not very satisfactory, which is why this part is due for a redesign. // // Assuming you got here because the realtime offset commit metric has fired, check the logs to determine // which of the above scenarios happened. If you're in one of the good scenarios, then there's nothing to // do. If you're not, then based on how critical it is to get those rows back, then your options are: // - Wipe the realtime table and reconsume everything (mark the replica as disabled so that clients don't // see query results from partially consumed data, then re-enable it when this replica has caught up) // - Accept that those rows are gone in this replica and move on (they'll be replaced by good offline data // soon anyway) // - If there's a replica that has consumed properly, you could shut it down, copy its segments onto this // replica, assign a new consumer group id to this replica, rename the copied segments and edit their // metadata to reflect the new consumer group id, copy the Kafka offsets from the shutdown replica onto // the new consumer group id and then restart both replicas. This should get you the missing rows. segmentLogger.error("FATAL: Exception committing or shutting down consumer commitSuccessful={}", commitSuccessful, e); serverMetrics.addMeteredTableValue(tableName, ServerMeter.REALTIME_OFFSET_COMMIT_EXCEPTIONS, 1L); if (!commitSuccessful) { kafkaStreamProvider.shutdown(); } } try { segmentLogger.info("Marking current segment as completed in Helix"); RealtimeSegmentZKMetadata metadataToOverwrite = new RealtimeSegmentZKMetadata(); metadataToOverwrite.setTableName(segmentMetadata.getTableName()); metadataToOverwrite.setSegmentName(segmentMetadata.getSegmentName()); metadataToOverwrite.setSegmentType(SegmentType.OFFLINE); metadataToOverwrite.setStatus(Status.DONE); metadataToOverwrite.setStartTime(segStartTime); metadataToOverwrite.setEndTime(segEndTime); metadataToOverwrite.setTotalRawDocs(realtimeSegment.getSegmentMetadata().getTotalDocs()); metadataToOverwrite.setTimeUnit(timeUnit); notifier.notifySegmentCommitted(metadataToOverwrite, segment); segmentLogger.info("Completed write of segment completion to Helix, waiting for controller to assign a new segment"); } catch (Exception e) { if (commitSuccessful) { segmentLogger.error("Offsets were committed to Kafka but we were unable to mark this segment as completed in Helix. Manually mark the segment as completed in Helix; restarting this instance will result in data loss.", e); } else { segmentLogger.warn("Caught exception while marking segment as completed in Helix. Offsets were not written, restarting the instance should be safe.", e); } } } catch (Exception e) { segmentLogger.error("Caught exception in the realtime indexing thread", e); } } }); indexingThread.start(); serverMetrics.addValueToTableGauge(tableName, ServerGauge.SEGMENT_COUNT, 1L); segmentLogger.debug("scheduling keepIndexing timer check"); // start a schedule timer to keep track of the segment TimerService.timer.schedule(segmentStatusTask, ONE_MINUTE_IN_MILLSEC, ONE_MINUTE_IN_MILLSEC); segmentLogger.info("finished scheduling keepIndexing timer check"); } @Override public IndexSegment getSegment() { return realtimeSegment; } @Override public String getSegmentName() { return segmentName; } private void computeKeepIndexing() { if (keepIndexing) { segmentLogger.debug( "Current indexed " + realtimeSegment.getRawDocumentCount() + " raw events, success = " + realtimeSegment .getSuccessIndexedCount() + " docs, total = " + realtimeSegment.getSegmentMetadata().getTotalDocs() + " docs in realtime segment"); if ((System.currentTimeMillis() >= segmentEndTimeThreshold) || realtimeSegment.getRawDocumentCount() >= kafkaStreamProviderConfig.getSizeThresholdToFlushSegment()) { if (realtimeSegment.getRawDocumentCount() == 0) { segmentLogger.info("no new events coming in, extending the end time by another hour"); segmentEndTimeThreshold = System.currentTimeMillis() + kafkaStreamProviderConfig.getTimeThresholdToFlushSegment(); return; } segmentLogger.info( "Stopped indexing due to reaching segment limit: {} raw documents indexed, segment is aged {} minutes" , realtimeSegment.getRawDocumentCount() , ((System.currentTimeMillis() - start) / (ONE_MINUTE_IN_MILLSEC))); keepIndexing = false; } } updateCurrentDocumentCountMetrics(); } private void updateCurrentDocumentCountMetrics() { int currentRawDocs = realtimeSegment.getRawDocumentCount(); serverMetrics.addValueToTableGauge(tableName, ServerGauge.DOCUMENT_COUNT, (currentRawDocs - lastUpdatedRawDocuments.get())); lastUpdatedRawDocuments.set(currentRawDocs); } @Override public void destroy() { LOGGER.info("Trying to shutdown RealtimeSegmentDataManager : {}!", this.segmentName); isShuttingDown = true; try { kafkaStreamProvider.shutdown(); } catch (Exception e) { LOGGER.error("Failed to shutdown kafka stream provider!", e); } keepIndexing = false; segmentStatusTask.cancel(); realtimeSegment.destroy(); } }
/* * This code is subject to the HIEOS License, Version 1.0 * * Copyright(c) 2008-2009 Vangent, Inc. All rights reserved. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package com.vangent.hieos.services.xds.registry.transactions; import com.vangent.hieos.xutil.atna.XATNALogger; import com.vangent.hieos.xutil.exception.XDSNonIdenticalHashException; import com.vangent.hieos.xutil.metadata.structure.MetadataTypes; import com.vangent.hieos.services.xds.registry.storedquery.RegistryObjectValidator; import com.vangent.hieos.xutil.exception.MetadataException; import com.vangent.hieos.xutil.exception.MetadataValidationException; import com.vangent.hieos.xutil.exception.XdsDeprecatedException; import com.vangent.hieos.xutil.exception.XdsException; import com.vangent.hieos.xutil.exception.XdsInternalException; import com.vangent.hieos.xutil.exception.XdsPatientIdDoesNotMatchException; import com.vangent.hieos.xutil.exception.XdsUnknownPatientIdException; import com.vangent.hieos.services.xds.registry.backend.BackendRegistry; import com.vangent.hieos.services.xds.registry.storedquery.GetFoldersForDocument; import com.vangent.hieos.services.xds.registry.storedquery.SubmitObjectsRequestStoredQuerySupport; import com.vangent.hieos.xutil.metadata.structure.IdParser; import com.vangent.hieos.xutil.metadata.structure.Metadata; import com.vangent.hieos.xutil.metadata.structure.MetadataParser; import com.vangent.hieos.xutil.metadata.structure.MetadataSupport; import com.vangent.hieos.xutil.response.RegistryResponse; import com.vangent.hieos.xutil.registry.RegistryUtility; import com.vangent.hieos.xutil.services.framework.XBaseTransaction; import com.vangent.hieos.xutil.atna.ATNAAuditEvent; import com.vangent.hieos.xutil.atna.ATNAAuditEvent.ActorType; import com.vangent.hieos.xutil.atna.ATNAAuditEvent.IHETransaction; import com.vangent.hieos.xutil.atna.ATNAAuditEventHelper; import com.vangent.hieos.xutil.atna.ATNAAuditEventRegisterDocumentSet; import com.vangent.hieos.xutil.metadata.structure.SqParams; import com.vangent.hieos.xutil.xlog.client.XLogMessage; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.apache.axiom.om.OMElement; import org.apache.log4j.Logger; /** * * @author NIST, Bernie Thuman (overall cleanup). */ public class SubmitObjectsRequest extends XBaseTransaction { private final static Logger logger = Logger.getLogger(SubmitObjectsRequest.class); /** * * @param logMessage */ public SubmitObjectsRequest(XLogMessage logMessage) { this.log_message = logMessage; try { init(new RegistryResponse()); } catch (XdsInternalException e) { logger.fatal(logger_exception_details(e)); } } /** * * @param sor * @return */ public OMElement run(OMElement sor) { try { sor.build(); //AUDIT:POINT //call to audit message for document registry //for Transaction id = ITI-42. (Register Document set-b) // NOTE!!: Moved above "SubjectObjectsRequestInternal()" method call since the "sor" instance // is changed during the execution of "SubjectObjectsRequestInternal() method. Otherwise, // we would need to pay the penalty for a deep copy of the "sor" instance. this.auditSubmitObjectsRequest(sor); //performAudit( // XATNALogger.TXN_ITI42, // sor, // null, // XATNALogger.ActorType.REGISTRY, // XATNALogger.OutcomeIndicator.SUCCESS); this.handleSubmitObjectsRequest(sor); } catch (XdsDeprecatedException e) { response.add_error("XDSRegistryDeprecatedDocumentError", e.getMessage(), this.getClass().getName(), log_message); logger.warn(logger_exception_details(e)); } catch (XdsUnknownPatientIdException e) { response.add_error(MetadataSupport.XDSUnknownPatientId, e.getMessage(), this.getClass().getName(), log_message); logger.warn(logger_exception_details(e)); } catch (XdsPatientIdDoesNotMatchException e) { response.add_error(MetadataSupport.XDSPatientIdDoesNotMatch, e.getMessage(), this.getClass().getName(), log_message); logger.warn(logger_exception_details(e)); } catch (XDSNonIdenticalHashException e) { response.add_error(MetadataSupport.XDSNonIdenticalHash, e.getMessage(), this.getClass().getName(), log_message); logger.warn(logger_exception_details(e)); } catch (MetadataException e) { response.add_error(MetadataSupport.XDSRegistryMetadataError, e.getMessage(), this.getClass().getName(), log_message); logger.warn(logger_exception_details(e)); } catch (MetadataValidationException e) { response.add_error(MetadataSupport.XDSRegistryMetadataError, e.getMessage(), this.getClass().getName(), log_message); logger.warn(logger_exception_details(e)); } catch (XdsInternalException e) { response.add_error(MetadataSupport.XDSRegistryError, e.getMessage(), this.getClass().getName(), log_message); logger.warn(logger_exception_details(e)); } catch (XdsException e) { response.add_error(MetadataSupport.XDSRegistryError, e.getMessage(), this.getClass().getName(), log_message); logger.warn(logger_exception_details(e)); } catch (Exception e) { response.add_error(MetadataSupport.XDSRegistryError, e.getMessage(), this.getClass().getName(), log_message); logger.warn(logger_exception_details(e)); } OMElement res = null; try { res = response.getResponse(); this.log_response(); } catch (XdsInternalException e) { } return res; } /** * * @param sor * @throws XdsPatientIdDoesNotMatchException * @throws XdsDeprecatedException * @throws XdsUnknownPatientIdException * @throws MetadataException * @throws MetadataValidationException * @throws XDSNonIdenticalHashException * @throws XdsInternalException * @throws XdsException */ void handleSubmitObjectsRequest(OMElement sor) throws XdsPatientIdDoesNotMatchException, XDSNonIdenticalHashException, XdsDeprecatedException, MetadataValidationException, MetadataException, XdsInternalException, XdsException { // Run XML schema validation. RegistryUtility.schema_validate_local(sor, MetadataTypes.METADATA_TYPE_Rb); boolean commitCompleted = false; // Get backend registry instance. BackendRegistry backendRegistry = new BackendRegistry(log_message); try { // Create metadata instance from SOR. Metadata m = new Metadata(sor); this.logMetadata(m); SubmitObjectsRequestStoredQuerySupport sqSupport = new SubmitObjectsRequestStoredQuerySupport(response, log_message, backendRegistry); // Run validations. RegistryObjectValidator rov = new RegistryObjectValidator(response, log_message, backendRegistry); rov.validate(m, true /* isSubmit */, response.registryErrorList, this.getConfigActor()); if (!response.has_errors()) { // Only continue if response does not have any errors (a bit ugly). // Change symbolic names to UUIDs. IdParser idParser = new IdParser(m); idParser.compileSymbolicNamesIntoUuids(); // If this submission includes a DocumentEntry replace and the original DocumentEntry is in a folder // then the replacement document must be put into the folder as well. This must happen here // so the following logic to update folder lastUpdateTime can be triggered. this.updateFolderContentsOnDocumentReplace(m, backendRegistry); // if this submission adds a document to a folder then update that folder's lastUpdateTime Slot this.updateFoldersLastUpdateTimeSlot(m, sqSupport, backendRegistry); m.setStatusOnApprovableObjects(); // Finally, make the actual submission: this.submitRegistryRequest(m, backendRegistry, "SubmitObjectsRequest"); // Approve //this.approveObjects(m, backendRegistry); // Deprecate this.deprecateObjects(m, sqSupport, backendRegistry); } backendRegistry.commit(); commitCompleted = true; } finally { if (!commitCompleted) { backendRegistry.rollback(); } } } /** * * @param m * @param backendRegistry * @throws XdsInternalException */ private void submitRegistryRequest(Metadata m, BackendRegistry backendRegistry, String reason) throws XdsInternalException { backendRegistry.setReason(reason); backendRegistry.submit(m); backendRegistry.setReason(""); } /** * * @param m * @param backendRegistry * @throws XdsException */ //private void approveObjects(Metadata m, BackendRegistry backendRegistry) throws XdsException { // List<String> approvableObjectIds = m.getApprovableObjectIds(); // if (approvableObjectIds.size() > 0) { // this.submitApproveObjectsRequest(backendRegistry, approvableObjectIds); // } // } /** * * @param m * @param rov * @param backendRegistry * @throws MetadataValidationException * @throws MetadataException * @throws XdsException */ private void deprecateObjects(Metadata m, SubmitObjectsRequestStoredQuerySupport sqSupport, BackendRegistry backendRegistry) throws MetadataValidationException, MetadataException, XdsException { List<String> deprecatableObjectIds = m.getDeprecatableObjectIds(); // add to the list of things to deprecate, any XFRM or APND documents hanging off documents // in the deprecatable_object_ids list List<String> XFRMandAPNDDocuments = sqSupport.getXFRMandAPNDDocuments(deprecatableObjectIds); deprecatableObjectIds.addAll(XFRMandAPNDDocuments); if (deprecatableObjectIds.size() > 0) { // validate that these are documents first List<String> missing = sqSupport.getMissingDocuments(deprecatableObjectIds); if (missing != null) { throw new XdsException("The following documents were referenced by this submission but are not present in the registry: " + missing); } this.submitDeprecateObjectsRequest(backendRegistry, deprecatableObjectIds); } } /** * * @param m * @param backendRegistry * @throws XdsException */ private void updateFolderContentsOnDocumentReplace(Metadata m, BackendRegistry backendRegistry) throws XdsException { // If this submission includes a DocumentEntry replace and the original DocumentEntry is in a folder // then the replacement document must be put into the folder as well. This must happen here // so the following logic to update folder lastUpdateTime can be triggered. HashMap<String, String> rplcToOrigIds = new HashMap<String, String>(); for (OMElement assoc : m.getAssociations()) { if (MetadataSupport.xdsB_ihe_assoc_type_rplc.equals(m.getAssocType(assoc))) { rplcToOrigIds.put(m.getAssocSource(assoc), m.getAssocTarget(assoc)); } } for (String replacementDocumentId : rplcToOrigIds.keySet()) { String originalDocumentId = rplcToOrigIds.get(replacementDocumentId); // for each original document, find the collection of folders it belongs to Metadata me = this.findFoldersForDocumentByUuid(originalDocumentId, backendRegistry); List<String> folderIds = me.getObjectIds(me.getObjectRefs()); // for each folder, add an association placing replacment in that folder // This brings up interesting question, should the Assoc between SS and Assoc be generated also? YES! for (String fid : folderIds) { OMElement assoc = m.addAssociation(m.makeAssociation(MetadataSupport.xdsB_eb_assoc_type_has_member, fid, replacementDocumentId)); OMElement assoc2 = m.addAssociation(m.makeAssociation(MetadataSupport.xdsB_eb_assoc_type_has_member, m.getSubmissionSetId(), assoc.getAttributeValue(MetadataSupport.id_qname))); } } } /** * * @param uuid * @param backendRegistry * @return * @throws XdsException */ private Metadata findFoldersForDocumentByUuid(String uuid, BackendRegistry backendRegistry) throws XdsException { SqParams parms = new SqParams(); parms.addStringParm("$XDSDocumentEntryEntryUUID", uuid); //Response response, Message log_message GetFoldersForDocument sffd = new GetFoldersForDocument(parms, false /* LeafClass */, this.response, this.log_message, backendRegistry); return sffd.runInternal(); } /** * * @param m * @param sqSupport * @param backendRegistry * @throws XdsException */ private void updateFoldersLastUpdateTimeSlot(Metadata m, SubmitObjectsRequestStoredQuerySupport sqSupport, BackendRegistry backendRegistry) throws XdsException { // Update any folders "lastUpdateTime" slot with the current time: m.updateFoldersLastUpdateTimeSlot(); // if this submission adds a document to a folder then update that folder's lastUpdateTime Slot for (OMElement assoc : m.getAssociations()) { if (MetadataSupport.xdsB_eb_assoc_type_has_member.equals(m.getAssocType(assoc))) { String sourceId = m.getAssocSource(assoc); if (!m.getSubmissionSetId().equals(sourceId) && !m.getFolderIds().contains(sourceId)) { // Assoc src not part of the submission logger.info("Adding to Folder (1)" + sourceId); if (this.isFolder(sourceId, sqSupport)) { logger.info("Adding to Folder (2)" + sourceId); OMElement res = backendRegistry.basicQuery("SELECT * from RegistryPackage rp WHERE rp.id='" + sourceId + "'", true); // Update any folders "lastUpdateTime" slot: Metadata fm = MetadataParser.parseNonSubmission(res); fm.updateFoldersLastUpdateTimeSlot(); //OMElement to_backend = fm.getV3SubmitObjectsRequest(); //log_message.addOtherParam("From Registry Adaptor", to_backend); this.submitRegistryRequest(fm, backendRegistry, "Update Folder LastUpdateTime Slot"); } } } } } /** * * @param backendRegistry * @param objectIds * @throws XdsInternalException */ //private void submitApproveObjectsRequest(BackendRegistry backendRegistry, List<String> objectIds) throws XdsInternalException { // backendRegistry.submitApproveObjectsRequest(objectIds); //} /** * * @param backendRegistry * @param objectIds * @throws XdsInternalException */ private void submitDeprecateObjectsRequest(BackendRegistry backendRegistry, List<String> objectIds) throws XdsInternalException { backendRegistry.submitDeprecateObjectsRequest(objectIds); } /** * * @param id * @param sqSupport * @return * @throws XdsException */ public boolean isFolder(String id, SubmitObjectsRequestStoredQuerySupport sqSupport) throws XdsException { if (!id.startsWith("urn:uuid:")) { return false; } ArrayList<String> ids = new ArrayList<String>(); ids.add(id); List<String> missing = sqSupport.getMissingFolders(ids); if (missing != null && missing.contains(id)) { return false; } return true; } /** * * @param m * @throws MetadataException */ private void logMetadata(Metadata m) throws MetadataException { // Log relevant data (if logger is turned on of course). if (log_message.isLogEnabled() == true) { log_message.addOtherParam("SSuid", m.getSubmissionSetUniqueId()); ArrayList<String> doc_uids = new ArrayList<String>(); for (String id : m.getExtrinsicObjectIds()) { String uid = m.getUniqueIdValue(id); if (uid != null && !uid.equals("")) { doc_uids.add(uid); } } log_message.addOtherParam("DOCuids", doc_uids); ArrayList<String> fol_uids = new ArrayList<String>(); for (String id : m.getFolderIds()) { String uid = m.getUniqueIdValue(id); if (uid != null && !uid.equals("")) { fol_uids.add(uid); } } log_message.addOtherParam("FOLuids", fol_uids); log_message.addOtherParam("Structure", m.structure()); } } /** * * @param result * @return */ /* private boolean getResult(OMElement result) { if (result == null) { return false; } String value = result.getAttributeValue(MetadataSupport.status_qname); if (value == null) { return false; } if (value.indexOf(":") == -1) { return false; } String[] parts = value.split(":"); if ("Success".equals(parts[parts.length - 1])) { return true; } else { return false; } }*/ /** * * @param rootNode */ private void auditSubmitObjectsRequest(OMElement rootNode) { try { XATNALogger xATNALogger = new XATNALogger(); if (xATNALogger.isPerformAudit()) { // Create and log audit event. ATNAAuditEventRegisterDocumentSet auditEvent = ATNAAuditEventHelper.getATNAAuditEventRegisterDocumentSet(rootNode); auditEvent.setActorType(ActorType.REGISTRY); auditEvent.setTransaction(IHETransaction.ITI42); auditEvent.setAuditEventType(ATNAAuditEvent.AuditEventType.IMPORT); xATNALogger.audit(auditEvent); } } catch (Exception ex) { // FIXME?: } } }
/** * */ package com.linkedin.databus.bootstrap.producer; /* * * Copyright 2013 LinkedIn Corp. All rights reserved * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Formatter; import java.util.Random; import org.apache.log4j.Logger; import com.linkedin.databus.bootstrap.api.BootstrapProducerStatus; import com.linkedin.databus.bootstrap.common.BootstrapConn; import com.linkedin.databus.bootstrap.common.BootstrapDBMetaDataDAO; import com.linkedin.databus.bootstrap.common.BootstrapProducerStatsCollector; import com.linkedin.databus.bootstrap.common.BootstrapReadOnlyConfig; import com.linkedin.databus.bootstrap.monitoring.producer.mbean.DbusBootstrapProducerStatsMBean; import com.linkedin.databus.core.DatabusThreadBase; import com.linkedin.databus.core.util.RateMonitor; import com.linkedin.databus.core.util.RngUtils; import com.linkedin.databus2.core.BackoffTimer; import com.linkedin.databus2.core.container.request.BootstrapDatabaseTooOldException; import com.linkedin.databus2.util.DBHelper; public class BootstrapApplierThread extends DatabusThreadBase { public static final String MODULE = BootstrapApplierThread.class.getName(); public static final Logger LOG = Logger.getLogger(MODULE); private static final int MAX_EVENT_WAIT_TIME = 1000; private static final int INITIAL_EVENT_WAIT_TIME = 5; private static final int DEFAULT_LOG_SAMPLING_PERCENTAGE = 2; private static final int DEFAULT_MINSCN_TIMEOUT_SEC = 10; private static Random _sSampler = new Random(); private BootstrapDBMetaDataDAO _bootstrapDao; private final String _source; private PreparedStatement _tabScnStmt; private PreparedStatement _getScnStmt; private final BootstrapReadOnlyConfig _config; private sourcePositions _sourcePositions = null; // Log Specific private String _lastLogLine = ""; private int _lastLogLineRepeatCount = 0; private static final String APPLIER_STATE_LINE_FORMAT = "Applier state : %d %d %d %d"; private static final int MAX_SKIPPED_LOG_LINES = 1000; /* Stats Specific */ private BootstrapProducerStatsCollector _statsCollector = null; private final RateMonitor _srcRm; private final RateMonitor _totalRm; private final BackoffTimer _retryTimer; private long _minScn = -1L; private boolean _isRunning = false; /** * @param config * */ public BootstrapApplierThread(String name, String source, BootstrapReadOnlyConfig config) { this(name, source, config, null); } public BootstrapApplierThread(String name, String source, BootstrapReadOnlyConfig config, BootstrapProducerStatsCollector statsCollector) { super(name); _source = source; _bootstrapDao = null; _config = config; _retryTimer = new BackoffTimer(name + "RetryTimer", config.getRetryConfig()); _statsCollector = statsCollector; _srcRm = new RateMonitor(name + "ProducerSourceRateMonitor"); _totalRm = new RateMonitor(name + "ProducerTotalRateMonitor"); } @Override public String toString() { return "BootstrapApplierThread [_source=" + _source + ", _config=" + _config + ", _sourcePositions=" + _sourcePositions + "]"; } @Override public synchronized void start() { super.start(); } @Override public void run() { _isRunning = true; try { _sourcePositions = new sourcePositions(_source); _sourcePositions.init(); if (null != _statsCollector) { DbusBootstrapProducerStatsMBean stats = _statsCollector .getSourceStats(_source); stats.registerBatch(0, 0, -1, _sourcePositions.getApplyId(), _sourcePositions.getLogPos()); } } catch (Exception e) { if (null != _statsCollector) _statsCollector.getTotalStats().registerSQLException(); LOG.error("Error occurred in initializing source position", e); return; } int sleepTime = INITIAL_EVENT_WAIT_TIME; int totalRowsApplied = 0; while (_isRunning && !isShutdownRequested()) { try { if (isPauseRequested()) { LOG.info("Pause requested for applier. Pausing !!"); signalPause(); LOG.info("Pausing. Waiting for resume command"); awaitUnPauseRequest(); LOG.info("Resume requested for applier. Resuming !!"); signalResumed(); LOG.info("Applier resumed !!"); } _totalRm.start(); Connection conn = getConnection(); try { totalRowsApplied += applyLog(_source); if (LOG.isDebugEnabled()) { LOG.debug("Number of rows applier so far = " + totalRowsApplied + " for source = " + _source); } } catch (Exception e) { if (null != _statsCollector) { _statsCollector.getTotalStats().registerSQLException(); _statsCollector.getSourceStats(_source).registerSQLException(); } LOG.error("apply error:", e); throw e; } try { DBHelper.commit(conn); } catch (SQLException s) { DBHelper.rollback(conn); throw s; } _totalRm.stop(); if (null != _statsCollector) _statsCollector.getTotalStats().registerBatch( _totalRm.getDuration() / 1000000L, totalRowsApplied, -1, -1, -1); if (0 == totalRowsApplied) { // sleep for sometime when no events found Thread.sleep(sleepTime); // increase sleep time for next round if no events are found sleepTime = Math.min(sleepTime * 10, MAX_EVENT_WAIT_TIME); } else { // reset to initial sleep time sleepTime = INITIAL_EVENT_WAIT_TIME; totalRowsApplied = 0; } } catch (Exception e) { LOG.error("Error occured in bootstrap applier", e); if (null != _statsCollector) { _statsCollector.getTotalStats().registerSQLException(); } if (e instanceof SQLException) { if (!reset(true)) { LOG.fatal( "Unable to reset Bootstrap DB connections. Stopping Applier Thread !!", e); _isRunning = false; } } } } reset(false); doShutdownNotify(); } public boolean isRunning() { return _isRunning; } private void closeApplyStatements() throws SQLException { _sourcePositions.close(); } private int applyLog(String source) throws BootstrapDatabaseTooOldException, SQLException { PreparedStatement stmt = null; sourcePositions pos = _sourcePositions; applyBatch batch = pos.getNextApplyBatch(); int rowsToApply = batch.getTorid() - batch.getFromrid(); try { _srcRm.start(); if (rowsToApply > 0) { // Apply log to the table to move it up to logScn stmt = pos.getApplyStmt(); stmt.setInt(1, batch.getFromrid()); stmt.setInt(2, batch.getTorid()); stmt.executeUpdate(); boolean log = (RngUtils.randomPositiveInt(_sSampler) % 100) < DEFAULT_LOG_SAMPLING_PERCENTAGE; if (log) LOG.info("Applied Log " + batch + " for " + source); } // we need to save state regardless if any rows are returned because // we could be switching logs. pos.save(); } catch (SQLException e) { LOG.error("Error occured during apply log", e); throw e; } finally { _srcRm.stop(); if (null != _statsCollector) { DbusBootstrapProducerStatsMBean stats = _statsCollector .getSourceStats(source); stats.registerBatch(_srcRm.getDuration() / 1000000L, rowsToApply, pos.getApplyWindowSCN(), pos.getApplyId(), pos.getLogPos()); } } return rowsToApply; } private long getWindowScnforSource(int srcid, int applyLogId, int tabRid) throws SQLException { ResultSet rs = null; long windowScn = 0; PreparedStatement stmt = null; try { if (0 == tabRid) { // get the maxscn of the prior table stmt = getMaxWindowScnStatement(); stmt.setInt(1, srcid); stmt.setInt(2, Math.max(0, applyLogId - 1)); } else { stmt = getWindowScnStatement(applyLogId, srcid); stmt.setInt(1, tabRid); } rs = stmt.executeQuery(); if (rs.next()) { windowScn = rs.getLong(1); } } catch (SQLException e) { LOG.error("Error ocurred during getWindowScnforSource", e); throw e; } finally { if (null != rs) { rs.close(); rs = null; } if (null != stmt) { stmt.close(); stmt = null; } } return windowScn; } private void setTabPosition(int srcid, int logid, int tabRid, long windowScn) throws SQLException { PreparedStatement stmt = getTabPositionUpdateStmt(); stmt.setInt(1, logid); stmt.setInt(2, tabRid); stmt.setLong(3, windowScn); stmt.setInt(4, srcid); stmt.executeUpdate(); StringBuilder logLineBuilder = new StringBuilder(1024); Formatter logFormatter = new Formatter(logLineBuilder); logFormatter.format(APPLIER_STATE_LINE_FORMAT, srcid, logid, tabRid, windowScn); log(srcid, logFormatter); } private PreparedStatement getMaxWindowScnStatement() throws SQLException { Connection conn = null; PreparedStatement windowScnStmt = null; try { conn = getConnection(); StringBuilder sql = new StringBuilder(); sql.append("select maxwindowscn from bootstrap_loginfo where srcid = ? and logid = ?"); windowScnStmt = conn.prepareStatement(sql.toString()); } catch (SQLException e) { DBHelper.close(windowScnStmt); LOG.error("error occurred during getWindowScnStatement", e); throw e; } return windowScnStmt; } private PreparedStatement getWindowScnStatement(int applyLogId, int srcid) throws SQLException { Connection conn = null; PreparedStatement windowScnStmt = null; try { conn = getConnection(); StringBuilder sql = new StringBuilder(); sql.append("select windowscn from "); sql.append(getLogTableName(applyLogId, srcid)); sql.append(" where id = ?"); windowScnStmt = conn.prepareStatement(sql.toString()); } catch (SQLException e) { DBHelper.close(windowScnStmt); LOG.error("error occurred during getWindowScnStatement", e); throw e; } return windowScnStmt; } private PreparedStatement getTabPositionUpdateStmt() throws SQLException { if (_tabScnStmt != null) { return _tabScnStmt; } Connection conn = null; try { conn = getConnection(); StringBuilder sql = new StringBuilder(); sql.append("update bootstrap_applier_state set logid = ?, rid = ? , windowscn = ? where srcid = ?"); _tabScnStmt = conn.prepareStatement(sql.toString()); } catch (SQLException e) { LOG.error("Error ocurred in getTabPositionUpdateStmt", e); _tabScnStmt.close(); conn.close(); return null; } return _tabScnStmt; } private PreparedStatement getPositionsStmt() throws SQLException { if (_getScnStmt != null) { return _getScnStmt; } Connection conn = null; try { conn = getConnection(); StringBuilder sql = new StringBuilder(); sql.append("select p.logid, p.rid, a.logid, a.rid, l.maxrid, l.maxwindowscn "); sql.append("from bootstrap_sources s, bootstrap_producer_state p, bootstrap_applier_state a, bootstrap_loginfo l "); sql.append("where p.srcid = s.id and a.srcid = s.id and l.srcid = s.id and l.logid = a.logid and s.src = ?"); _getScnStmt = conn.prepareStatement(sql.toString()); } catch (Exception e) { LOG.error("Error occured in getPositionsstatement", e); _getScnStmt.close(); conn.close(); return null; } return _getScnStmt; } /* * Reset the Bootstrap Connection and in memory state of Applier */ private boolean reset(boolean recreate) { boolean success = false; _retryTimer.reset(); while (!success) { try { _bootstrapDao.getBootstrapConn().close(); DBHelper.close(_getScnStmt); _getScnStmt = null; DBHelper.close(_tabScnStmt); _tabScnStmt = null; closeApplyStatements(); // Close the Source Positions _sourcePositions.close(); if (recreate) { getConnection(); _bootstrapDao.getBootstrapConn().executeDummyBootstrapDBQuery(); // Init the SourcePositions from DB _sourcePositions.init(); } success = true; } catch (SQLException sqlEx) { LOG.error("Unable to reset DBConnections in Applier", sqlEx); success = false; if (null != _statsCollector) { _statsCollector.getTotalStats().registerSQLException(); } if (_retryTimer.getRemainingRetriesNum() <= 0) { LOG.fatal("Applier Thread reached max retries trying to reset the MySQL Connections. Stopping !!"); break; } _retryTimer.backoffAndSleep(); } } return success; } private Connection getConnection() throws SQLException { Connection conn = null; BootstrapConn bsConn = null; if (_bootstrapDao == null) { bsConn = new BootstrapConn(); try { final boolean autoCommit = false; bsConn.initBootstrapConn(autoCommit, java.sql.Connection.TRANSACTION_READ_COMMITTED, _config.getBootstrapDBUsername(), _config.getBootstrapDBPassword(), _config.getBootstrapDBHostname(), _config.getBootstrapDBName()); _bootstrapDao = new BootstrapDBMetaDataDAO(bsConn, _config.getBootstrapDBHostname(), _config.getBootstrapDBUsername(), _config.getBootstrapDBPassword(), _config.getBootstrapDBName(), autoCommit); } catch (SQLException e) { LOG.fatal("Unable to get Bootstrap DB Connection", e); throw e; } catch (Exception ex) { LOG.fatal("Unable to get Bootstrap DB Connection", ex); return null; } } try { conn = _bootstrapDao.getBootstrapConn().getDBConn(); } catch (SQLException sqlEx) { LOG.fatal("NOT able to get Bootstrap DB Connection", sqlEx); throw sqlEx; } return conn; } private String getLogTableName(int applyLogId, int srcid) throws SQLException { return getBootstrapConn().getLogTableName(applyLogId, srcid); } private String getSrcTableName(int srcid) throws SQLException, BootstrapDatabaseTooOldException { return getBootstrapConn().getSrcTableName(srcid); } private BootstrapConn getBootstrapConn() { return _bootstrapDao.getBootstrapConn(); } static class applyBatch { private final int _logid; private final int _fromrid; private final int _torid; applyBatch(int logid, int fromrid, int torid) { _logid = logid; _fromrid = fromrid; _torid = torid; } public int getLogId() { return _logid; } public int getFromrid() { return _fromrid; } public int getTorid() { return _torid; } @Override public String toString() { return "LogId: " + _logid + " From: " + _fromrid + " To: " + _torid; } } class sourcePositions { private int _srcid; private int _tabrid; private int _logrid; private int _applylogid; private int _producelogid; private int _logmaxrid; private long _logwindowscn; private PreparedStatement _applyStmt; private long _lastlogmaxscn; private final String _source; sourcePositions(String source) throws Exception { _source = source; init(); } public void init() throws SQLException { try { getConnection(); BootstrapDBMetaDataDAO.SourceStatusInfo srcIdStatus = _bootstrapDao .getSrcIdStatusFromDB(_source, false); _srcid = srcIdStatus.getSrcId(); if (_config.isBootstrapDBStateCheck()) { // TO allow test framework to listen to relay directly,DBStateCheck // flag is used if (!BootstrapProducerStatus.isReadyForConsumption(srcIdStatus .getStatus())) throw new BootstrapDatabaseTooOldException( "Bootstrap DB not ready to read events from relay, Status :" + srcIdStatus); } refresh(); _applyStmt = createApplyStatement(); // initialize minScnmap with current minScn value // bootstrap applier is started after bootstrap producer initialization; // the minScn of sources will be initialized appropriately _minScn = _bootstrapDao .getMinScnOfSnapshots(srcIdStatus.getSrcId()); } catch (BootstrapDatabaseTooOldException e) { throw new RuntimeException(e); } } public void close() { DBHelper.close(_applyStmt); _applyStmt = null; } void refresh() throws SQLException { ResultSet rs = null; try { PreparedStatement stmt = getPositionsStmt(); stmt.setString(1, _source); rs = stmt.executeQuery(); if (rs.next()) { _producelogid = rs.getInt(1); _logrid = rs.getInt(2); _applylogid = rs.getInt(3); _tabrid = rs.getInt(4); _logmaxrid = rs.getInt(5); long currentLogMaxScn = rs.getLong(6); if (currentLogMaxScn > 0) { if (_lastlogmaxscn > currentLogMaxScn) { throw new RuntimeException("_lastlogmaxscn=" + _lastlogmaxscn + " currentLogMaxScn=" + currentLogMaxScn + " applylogid=" + _applylogid + " producerlogid=" + _producelogid + " tabrid=" + _tabrid + " logrid=" + _logrid + " logmaxrid=" + _logmaxrid); } _lastlogmaxscn = currentLogMaxScn; } } rs.close(); } catch (SQLException e) { LOG.error("Error occured during refresh of sourcePositions", e); if (null != rs) { rs.close(); rs = null; } throw e; } } @Override public String toString() { return "SrcId: " + _srcid + " ProducerLogId: " + _producelogid + " Logrid: " + _logrid + " ApplyLogId: " + _applylogid + " Tabrid: " + _tabrid + " LogMaxrid: " + _logmaxrid; } void save() throws SQLException { LOG.debug("Saving state " + this); // if tabrid is 0, we need to get max window scn from the last log table; // otherwise, we get the window scn of the row corresponding to _tabrid; // except for the initial case where nothing is inserted, (applylogid - 1) // will // be -1 and 0 shall be used in this case. _logwindowscn = getWindowScnforSource(_srcid, _applylogid, _tabrid); setTabPosition(_srcid, _applylogid, _tabrid, _logwindowscn); // check if minscn has been set already // this happens once! or when minScn table is reinitialized; if (_minScn == BootstrapDBMetaDataDAO.DEFAULT_WINDOWSCN) { // WARN: could be a long running query; if _tabrid is high; long newMinScn = _bootstrapDao.isSeeded(_srcid) ? 0 : _bootstrapDao .getMinWindowScnFromSnapshot(_srcid, _tabrid, DEFAULT_MINSCN_TIMEOUT_SEC); // Avoid unnecessary updates and safety check: never re-initialize it // due to return value of negative scn if ((_minScn != newMinScn) && (newMinScn != BootstrapDBMetaDataDAO.DEFAULT_WINDOWSCN)) { LOG.info("Applier setting minScn=" + newMinScn); // Note: Assume that rows never actually have scn=0. Since this table // has rows with newMinScn, the minScn is updated to 1 less. // The bootstrap snapshot query will serve all rows with scn greater // than minScn. if (newMinScn != 0) { newMinScn--; } _bootstrapDao.updateMinScnOfSnapshot(_srcid, newMinScn); _minScn = newMinScn; } } } public int getTabPos() { return _tabrid; } public int getLogPos() { return _logrid; } public int getSrcId() { return _srcid; } public int getApplyId() { return _applylogid; } public long getApplyWindowSCN() { return _logwindowscn; } public PreparedStatement getApplyStmt() throws BootstrapDatabaseTooOldException, SQLException { if (null != _applyStmt) return _applyStmt; _applyStmt = createApplyStatement(); return _applyStmt; } public applyBatch getNextApplyBatch() throws SQLException { int _torid = _tabrid; // If we are applying the same log file that is currently being produced, // read upto // next 1000 rows if (_applylogid == _producelogid) { // If we have caught up for this source, refresh the state if (_tabrid == _logrid) { refresh(); } else { _torid = Math.min(_logrid, _tabrid + 1000); } } else { if (_logmaxrid == 0) { refresh(); } // If we have finished reading this log, we can move to the next if (_tabrid == _logmaxrid) { _applylogid++; _torid = 0; _tabrid = 0; _logmaxrid = 0; // remove the apply statement so a new statement can be created to // moves rows from new log table to tab table. DBHelper.close(_applyStmt); _applyStmt = null; } else { _torid = Math.min(_logmaxrid, _tabrid + 1000); } } applyBatch nextBatch = new applyBatch(_applylogid, _tabrid, _torid); _tabrid = _torid; return nextBatch; } private PreparedStatement createApplyStatement() throws SQLException, BootstrapDatabaseTooOldException { Connection conn = getConnection(); PreparedStatement applyStmt = null; StringBuilder sql = new StringBuilder(); sql.append("insert into "); sql.append(getSrcTableName(_srcid)); sql.append(" (scn, srckey, val) "); sql.append("select windowscn, srckey, val from "); sql.append(getLogTableName(_applylogid, _srcid) + " B "); sql.append(" where B.id > ? and B.id <= ?"); sql.append(" on duplicate key update scn = B.windowscn, srckey=B.srckey, val=B.val"); applyStmt = conn.prepareStatement(sql.toString()); LOG.info("Created apply statement: " + sql.toString()); return applyStmt; } } private void log(int srcid, Formatter logLine) { logLine.flush(); String newLogLine = logLine.toString(); boolean skipLog = true; final int saveLastLogLineRepeat = _lastLogLineRepeatCount; String lastLogLine = _lastLogLine; if (newLogLine.equals(lastLogLine) && _lastLogLineRepeatCount < MAX_SKIPPED_LOG_LINES) { ++_lastLogLineRepeatCount; } else { skipLog = false; lastLogLine = newLogLine; _lastLogLineRepeatCount = 0; } if (!skipLog) { _lastLogLine = lastLogLine; LOG.info("skipLog = false, last line repeated: " + saveLastLogLineRepeat); LOG.info("newLogLine = " + newLogLine); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.executiongraph; import org.apache.flink.api.common.JobStatus; import org.apache.flink.api.common.time.Time; import org.apache.flink.runtime.concurrent.ComponentMainThreadExecutorServiceAdapter; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.executiongraph.utils.SimpleAckingTaskManagerGateway; import org.apache.flink.runtime.jobgraph.JobGraphTestUtils; import org.apache.flink.runtime.jobmaster.LogicalSlot; import org.apache.flink.runtime.jobmaster.TestingLogicalSlotBuilder; import org.apache.flink.runtime.messages.Acknowledge; import org.apache.flink.runtime.scheduler.SchedulerBase; import org.apache.flink.runtime.scheduler.SchedulerTestingUtils; import org.apache.flink.util.TestLogger; import org.apache.flink.util.concurrent.FutureUtils; import org.junit.Test; import java.io.IOException; import java.util.concurrent.CompletableFuture; import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.createNoOpVertex; import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.getExecutionVertex; import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.setVertexResource; import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.setVertexState; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** Tests for cancelling {@link ExecutionVertex ExecutionVertices}. */ public class ExecutionVertexCancelTest extends TestLogger { // -------------------------------------------------------------------------------------------- // Canceling in different states // -------------------------------------------------------------------------------------------- @Test public void testCancelFromCreated() { try { final ExecutionVertex vertex = getExecutionVertex(); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); vertex.cancel(); assertEquals(ExecutionState.CANCELED, vertex.getExecutionState()); assertFalse(vertex.getFailureInfo().isPresent()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.CANCELING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.CANCELED) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testCancelFromScheduled() { try { final ExecutionVertex vertex = getExecutionVertex(); setVertexState(vertex, ExecutionState.SCHEDULED); assertEquals(ExecutionState.SCHEDULED, vertex.getExecutionState()); vertex.cancel(); assertEquals(ExecutionState.CANCELED, vertex.getExecutionState()); assertFalse(vertex.getFailureInfo().isPresent()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.CANCELING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.CANCELED) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testCancelFromRunning() { try { final ExecutionVertex vertex = getExecutionVertex(); LogicalSlot slot = new TestingLogicalSlotBuilder() .setTaskManagerGateway( new CancelSequenceSimpleAckingTaskManagerGateway(1)) .createTestingLogicalSlot(); setVertexResource(vertex, slot); setVertexState(vertex, ExecutionState.RUNNING); assertEquals(ExecutionState.RUNNING, vertex.getExecutionState()); vertex.cancel(); vertex.getCurrentExecutionAttempt() .completeCancelling(); // response by task manager once actually canceled assertEquals(ExecutionState.CANCELED, vertex.getExecutionState()); assertFalse(slot.isAlive()); assertFalse(vertex.getFailureInfo().isPresent()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.CANCELING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.CANCELED) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testRepeatedCancelFromRunning() { try { final ExecutionVertex vertex = getExecutionVertex(); LogicalSlot slot = new TestingLogicalSlotBuilder() .setTaskManagerGateway( new CancelSequenceSimpleAckingTaskManagerGateway(1)) .createTestingLogicalSlot(); setVertexResource(vertex, slot); setVertexState(vertex, ExecutionState.RUNNING); assertEquals(ExecutionState.RUNNING, vertex.getExecutionState()); vertex.cancel(); assertEquals(ExecutionState.CANCELING, vertex.getExecutionState()); vertex.cancel(); assertEquals(ExecutionState.CANCELING, vertex.getExecutionState()); // callback by TaskManager after canceling completes vertex.getCurrentExecutionAttempt().completeCancelling(); assertEquals(ExecutionState.CANCELED, vertex.getExecutionState()); assertFalse(slot.isAlive()); assertFalse(vertex.getFailureInfo().isPresent()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.CANCELING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.CANCELED) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testCancelFromRunningDidNotFindTask() { // this may happen when the task finished or failed while the call was in progress try { final ExecutionVertex vertex = getExecutionVertex(); LogicalSlot slot = new TestingLogicalSlotBuilder() .setTaskManagerGateway( new CancelSequenceSimpleAckingTaskManagerGateway(1)) .createTestingLogicalSlot(); setVertexResource(vertex, slot); setVertexState(vertex, ExecutionState.RUNNING); assertEquals(ExecutionState.RUNNING, vertex.getExecutionState()); vertex.cancel(); assertEquals(ExecutionState.CANCELING, vertex.getExecutionState()); assertFalse(vertex.getFailureInfo().isPresent()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.CANCELING) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testCancelCallFails() { try { final ExecutionVertex vertex = getExecutionVertex(); LogicalSlot slot = new TestingLogicalSlotBuilder() .setTaskManagerGateway( new CancelSequenceSimpleAckingTaskManagerGateway(0)) .createTestingLogicalSlot(); setVertexResource(vertex, slot); setVertexState(vertex, ExecutionState.RUNNING); assertEquals(ExecutionState.RUNNING, vertex.getExecutionState()); vertex.cancel(); // Callback fails, leading to CANCELED assertEquals(ExecutionState.CANCELED, vertex.getExecutionState()); assertFalse(slot.isAlive()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.CANCELING) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testSendCancelAndReceiveFail() throws Exception { final SchedulerBase scheduler = SchedulerTestingUtils.createScheduler( JobGraphTestUtils.streamingJobGraph(createNoOpVertex(10)), ComponentMainThreadExecutorServiceAdapter.forMainThread()); final ExecutionGraph graph = scheduler.getExecutionGraph(); scheduler.startScheduling(); ExecutionGraphTestUtils.switchAllVerticesToRunning(graph); assertEquals(JobStatus.RUNNING, graph.getState()); final ExecutionVertex[] vertices = graph.getVerticesTopologically().iterator().next().getTaskVertices(); assertEquals(vertices.length, graph.getRegisteredExecutions().size()); final Execution exec = vertices[3].getCurrentExecutionAttempt(); exec.cancel(); assertEquals(ExecutionState.CANCELING, exec.getState()); exec.markFailed(new Exception("test")); assertTrue( exec.getState() == ExecutionState.FAILED || exec.getState() == ExecutionState.CANCELED); assertFalse(exec.getAssignedResource().isAlive()); assertEquals(vertices.length - 1, graph.getRegisteredExecutions().size()); } private static class CancelSequenceSimpleAckingTaskManagerGateway extends SimpleAckingTaskManagerGateway { private final int successfulOperations; private int index = -1; public CancelSequenceSimpleAckingTaskManagerGateway(int successfulOperations) { super(); this.successfulOperations = successfulOperations; } @Override public CompletableFuture<Acknowledge> cancelTask( ExecutionAttemptID executionAttemptID, Time timeout) { index++; if (index >= successfulOperations) { return FutureUtils.completedExceptionally(new IOException("Rpc call fails")); } else { return CompletableFuture.completedFuture(Acknowledge.get()); } } } }
package org.nybatis.core.db.sql.sqlNode.element; import org.nybatis.core.conf.Const; import org.nybatis.core.db.session.executor.util.QueryParameter; import org.nybatis.core.db.sql.sqlMaker.QueryResolver; import org.nybatis.core.db.sql.sqlNode.element.abstracts.SqlElement; import org.nybatis.core.exception.unchecked.SqlParseException; import org.nybatis.core.model.NMap; import org.nybatis.core.util.StringUtil; import org.nybatis.core.util.Types; import org.nybatis.core.validation.Validator; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; public class ForEachSqlElement extends SqlElement { private List<SqlElement> children = new ArrayList<>(); private String paramKey; private String open; private String close; private String concater; private String indexKey; public ForEachSqlElement( String key, String open, String close, String concater, String indexKey ) { this.paramKey = StringUtil.trim( key ).replaceFirst( "^#\\{", "" ).replaceFirst( "\\}$", "" ); this.open = StringUtil.trim( open ); this.close = StringUtil.trim( close ); this.concater = StringUtil.trim( concater ); this.indexKey = StringUtil.trim( indexKey ).replaceFirst( "^#\\{", "" ).replaceFirst( "\\}$", "" ); } @Override public String toString( QueryParameter inputParam ) throws SqlParseException { boolean delimiterOn = StringUtil.isNotEmpty( getConcater( inputParam ) ); boolean indexKeyOn = StringUtil.isNotEmpty( indexKey ); boolean hasSingleParam = hasSingleParameter( inputParam ); List params = getParams( inputParam, hasSingleParam ); if( Validator.isEmpty(params) ) return ""; StringBuilder sql = new StringBuilder(); for( int i = 0, iCnt = params.size() - 1; i <= iCnt; i++ ) { QueryParameter param = clone( inputParam ).setForEachInnerParam( paramKey, params.get(i) ); if( indexKeyOn ) { param.put( indexKey, i ); } String innerSql = getInnerSql( param ); String targetKey = String.format( "%s[%d]", paramKey, i ); innerSql = convertKeyToJsonPath( innerSql, paramKey, targetKey ); innerSql = bindSingleParamKey( innerSql, hasSingleParam ); if( indexKeyOn ) { innerSql = setIndexKey( innerSql, i, inputParam ); } sql.append( innerSql ); if( delimiterOn && i != iCnt && ! StringUtil.isBlank(innerSql) ) { sql.append( ' ' ).append( getConcater( inputParam ) ).append( ' ' ); } } if( StringUtil.isBlank(sql) ) { return sql.toString(); } else { return String.format( "%s %s %s", getOpen( inputParam ), sql, getClose( inputParam ) ); } } private String setIndexKey( String sql, int index, QueryParameter inputParam ) { if( StringUtil.isEmpty( indexKey ) ) return sql; String targetKey = String.format( "%s[%d].%s", paramKey, index, indexKey ); int beforeSize = sql.length(); sql = convertKeyToJsonPath( sql, indexKey, targetKey ); int afterSize = sql.length(); if( beforeSize != afterSize ) { inputParam.put( targetKey, index ); } return sql; } private String convertKeyToJsonPath( String sql, String sourceKey, String targetKey ) { return sql.replaceAll( String.format( "#\\{%s(\\..+?)?\\}", sourceKey ), String.format( "#{%s$1}", targetKey ) ); } private void toString( StringBuilder buffer, SqlElement node, int depth ) { String tab = StringUtil.lpad( "", depth * 2, ' ' ); if( node instanceof IfSqlElement ) { IfSqlElement ifNode = (IfSqlElement) node; for( SqlElement child : ifNode.children() ) { toString( buffer, child, depth + 1 ); } } else { buffer.append( String.format( "%s%s", tab, node.toString() ) ); } } public String toString() { StringBuilder sb = new StringBuilder(); for( SqlElement node : children ) { toString( sb, node, 0 ); } return sb.toString(); } private String getConcater( Map param ) { return StringUtil.bindParam( concater, param ); } private String getClose( Map param ) { return StringUtil.bindParam( close, param ); } private String getOpen( Map param ) { return StringUtil.bindParam( open, param ); } private QueryParameter clone( Map param ) { QueryParameter newMap = new QueryParameter(); newMap.putAll( param ); return newMap; } private String getInnerSql( QueryParameter param ) throws SqlParseException { String sqlTemplate = super.toString( param ); return QueryResolver.makeDynamicSql( sqlTemplate, param ); } private boolean hasSingleParameter( NMap param ) { return param.containsKey( Const.db.PARAMETER_SINGLE ); } private List getParams( QueryParameter inputParam, boolean hasSingleParam ) { Object value = getValue( inputParam, hasSingleParam ); if( value == null ) return new ArrayList(); if( Types.isArrayOrList(value) ) { return Types.toList( value ); } else { return Arrays.asList( value ); } } private Object getValue( QueryParameter param, boolean hasSingleParam ) { Object val = param.get( paramKey ); if( val == null && hasSingleParam ) { String modifiedParamKey = paramKey.replaceFirst( "^.+?(\\..+?)?$", String.format( "%s$1", Const.db.PARAMETER_SINGLE ) ); val = param.get( modifiedParamKey ); } return val; } private String bindSingleParamKey( String sql, boolean hasSingleParam ) { if( hasSingleParam ) { return sql.replaceAll( "#\\{.+?(\\[.+?\\])?(\\..+?)?\\}", String.format( "#{%s$1$2}", Const.db.PARAMETER_SINGLE) ); } else { return sql; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.fonts.autodetect; import java.io.InputStream; import java.net.URI; import java.util.Collection; import java.util.List; import java.util.Set; import java.util.regex.Pattern; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.fop.apps.io.InternalResourceResolver; import org.apache.fop.fonts.CustomFont; import org.apache.fop.fonts.EmbedFontInfo; import org.apache.fop.fonts.EmbeddingMode; import org.apache.fop.fonts.EncodingMode; import org.apache.fop.fonts.Font; import org.apache.fop.fonts.FontCache; import org.apache.fop.fonts.FontEventListener; import org.apache.fop.fonts.FontLoader; import org.apache.fop.fonts.FontTriplet; import org.apache.fop.fonts.FontUris; import org.apache.fop.fonts.FontUtil; import org.apache.fop.fonts.MultiByteFont; import org.apache.fop.fonts.truetype.FontFileReader; import org.apache.fop.fonts.truetype.OFFontLoader; import org.apache.fop.fonts.truetype.TTFFile; /** * Attempts to determine correct FontInfo */ public class FontInfoFinder { /** logging instance */ private final Log log = LogFactory.getLog(FontInfoFinder.class); private FontEventListener eventListener; /** * Sets the font event listener that can be used to receive events about particular events * in this class. * @param listener the font event listener */ public void setEventListener(FontEventListener listener) { this.eventListener = listener; } /** * Attempts to determine FontTriplets from a given CustomFont. * It seems to be fairly accurate but will probably require some tweaking over time * * @param customFont CustomFont * @param triplets Collection that will take the generated triplets */ private void generateTripletsFromFont(CustomFont customFont, Collection<FontTriplet> triplets) { if (log.isTraceEnabled()) { log.trace("Font: " + customFont.getFullName() + ", family: " + customFont.getFamilyNames() + ", PS: " + customFont.getFontName() + ", EmbedName: " + customFont.getEmbedFontName()); } // default style and weight triplet vales (fallback) String strippedName = stripQuotes(customFont.getStrippedFontName()); //String subName = customFont.getFontSubName(); String fullName = stripQuotes(customFont.getFullName()); String searchName = fullName.toLowerCase(); String style = guessStyle(customFont, searchName); int weight; //= customFont.getWeight(); int guessedWeight = FontUtil.guessWeight(searchName); //We always take the guessed weight for now since it yield much better results. //OpenType's OS/2 usWeightClass value proves to be unreliable. weight = guessedWeight; //Full Name usually includes style/weight info so don't use these traits //If we still want to use these traits, we have to make FontInfo.fontLookup() smarter triplets.add(new FontTriplet(fullName, Font.STYLE_NORMAL, Font.WEIGHT_NORMAL)); if (!fullName.equals(strippedName)) { triplets.add(new FontTriplet(strippedName, Font.STYLE_NORMAL, Font.WEIGHT_NORMAL)); } Set<String> familyNames = customFont.getFamilyNames(); for (String familyName : familyNames) { familyName = stripQuotes(familyName); if (!fullName.equals(familyName)) { /* Heuristic: * The more similar the family name to the full font name, * the higher the priority of its triplet. * (Lower values indicate higher priorities.) */ int priority = fullName.startsWith(familyName) ? fullName.length() - familyName.length() : fullName.length(); triplets.add(new FontTriplet(familyName, style, weight, priority)); } } } private final Pattern quotePattern = Pattern.compile("'"); private String stripQuotes(String name) { return quotePattern.matcher(name).replaceAll(""); } private String guessStyle(CustomFont customFont, String fontName) { // style String style = Font.STYLE_NORMAL; if (customFont.getItalicAngle() > 0) { style = Font.STYLE_ITALIC; } else { style = FontUtil.guessStyle(fontName); } return style; } /** * Attempts to determine FontInfo from a given custom font * @param fontUri the font URI * @param customFont the custom font * @param fontCache font cache (may be null) * @return FontInfo from the given custom font */ private EmbedFontInfo getFontInfoFromCustomFont(URI fontUri, CustomFont customFont, FontCache fontCache, InternalResourceResolver resourceResolver) { FontUris fontUris = new FontUris(fontUri, null); List<FontTriplet> fontTripletList = new java.util.ArrayList<FontTriplet>(); generateTripletsFromFont(customFont, fontTripletList); String subFontName = null; if (customFont instanceof MultiByteFont) { subFontName = ((MultiByteFont) customFont).getTTCName(); } EmbedFontInfo fontInfo = new EmbedFontInfo(fontUris, customFont.isKerningEnabled(), customFont.isAdvancedEnabled(), fontTripletList, subFontName, EncodingMode.AUTO, EmbeddingMode.AUTO); fontInfo.setPostScriptName(customFont.getFontName()); if (fontCache != null) { fontCache.addFont(fontInfo, resourceResolver); } return fontInfo; } /** * Attempts to determine EmbedFontInfo from a given font file. * * @param fontURI the URI of the font resource * @param resourceResolver font resolver used to resolve font * @param fontCache font cache (may be null) * @return an array of newly created embed font info. Generally, this array * will have only one entry, unless the fontUrl is a TrueType Collection */ public EmbedFontInfo[] find(URI fontURI, InternalResourceResolver resourceResolver, FontCache fontCache) { URI embedUri = resourceResolver.resolveFromBase(fontURI); String embedStr = embedUri.toASCIIString(); boolean useKerning = true; boolean useAdvanced = true; long fileLastModified = -1; if (fontCache != null) { fileLastModified = FontCache.getLastModified(fontURI); // firstly try and fetch it from cache before loading/parsing the font file if (fontCache.containsFont(embedStr)) { EmbedFontInfo[] fontInfos = fontCache.getFontInfos(embedStr, fileLastModified); if (fontInfos != null) { return fontInfos; } // is this a previously failed parsed font? } else if (fontCache.isFailedFont(embedStr, fileLastModified)) { if (log.isDebugEnabled()) { log.debug("Skipping font file that failed to load previously: " + embedUri); } return null; } } // try to determine triplet information from font file CustomFont customFont = null; if (fontURI.toASCIIString().toLowerCase().endsWith(".ttc")) { // Get a list of the TTC Font names List<String> ttcNames = null; InputStream in = null; try { in = resourceResolver.getResource(fontURI); TTFFile ttf = new TTFFile(false, false); FontFileReader reader = new FontFileReader(in); ttcNames = ttf.getTTCnames(reader); } catch (Exception e) { if (this.eventListener != null) { this.eventListener.fontLoadingErrorAtAutoDetection(this, fontURI.toASCIIString(), e); } return null; } finally { IOUtils.closeQuietly(in); } List<EmbedFontInfo> embedFontInfoList = new java.util.ArrayList<EmbedFontInfo>(); // For each font name ... for (String fontName : ttcNames) { if (log.isDebugEnabled()) { log.debug("Loading " + fontName); } try { OFFontLoader ttfLoader = new OFFontLoader(fontURI, fontName, true, EmbeddingMode.AUTO, EncodingMode.AUTO, useKerning, useAdvanced, resourceResolver); customFont = ttfLoader.getFont(); if (this.eventListener != null) { customFont.setEventListener(this.eventListener); } } catch (Exception e) { if (fontCache != null) { fontCache.registerFailedFont(embedUri.toASCIIString(), fileLastModified); } if (this.eventListener != null) { this.eventListener.fontLoadingErrorAtAutoDetection(this, embedUri.toASCIIString(), e); } continue; } EmbedFontInfo fi = getFontInfoFromCustomFont(fontURI, customFont, fontCache, resourceResolver); if (fi != null) { embedFontInfoList.add(fi); } } return embedFontInfoList.toArray( new EmbedFontInfo[embedFontInfoList.size()]); } else { // The normal case try { FontUris fontUris = new FontUris(fontURI, null); customFont = FontLoader.loadFont(fontUris, null, true, EmbeddingMode.AUTO, EncodingMode.AUTO, useKerning, useAdvanced, resourceResolver); if (this.eventListener != null) { customFont.setEventListener(this.eventListener); } } catch (Exception e) { if (fontCache != null) { fontCache.registerFailedFont(embedUri.toASCIIString(), fileLastModified); } if (this.eventListener != null) { this.eventListener.fontLoadingErrorAtAutoDetection(this, embedUri.toASCIIString(), e); } return null; } EmbedFontInfo fi = getFontInfoFromCustomFont(fontURI, customFont, fontCache, resourceResolver); if (fi != null) { return new EmbedFontInfo[] {fi}; } else { return null; } } } }
/* * InformationMachineAPILib * * */ package co.iamdata.api.models; import java.util.*; import com.fasterxml.jackson.annotation.JsonGetter; import com.fasterxml.jackson.annotation.JsonSetter; public class ProductData implements java.io.Serializable { private static final long serialVersionUID = 4995132929366813213L; private String amazonLink; private String brand; private String category; private Integer categoryId; private String description; private Integer id; private String ingredients; private Integer ingredientsCount; private String largeImage; private String manufacturer; private String name; private List<NutrientData> nutrients; private List<String> plus; private List<String> recipes; private Double score; private String servingSize; private Double servingSizeInGrams; private String servingSizeUnit; private String servingsPerContainer; private String smallImage; private List<String> tags; private String upc; private Integer visibilityCount; private String weight; /** GETTER * TODO: Write general description for this method */ @JsonGetter("amazon_link") public String getAmazonLink ( ) { return this.amazonLink; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("amazon_link") public void setAmazonLink (String value) { this.amazonLink = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("brand") public String getBrand ( ) { return this.brand; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("brand") public void setBrand (String value) { this.brand = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("category") public String getCategory ( ) { return this.category; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("category") public void setCategory (String value) { this.category = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("category_id") public Integer getCategoryId ( ) { return this.categoryId; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("category_id") public void setCategoryId (Integer value) { this.categoryId = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("description") public String getDescription ( ) { return this.description; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("description") public void setDescription (String value) { this.description = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("id") public Integer getId ( ) { return this.id; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("id") public void setId (Integer value) { this.id = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("ingredients") public String getIngredients ( ) { return this.ingredients; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("ingredients") public void setIngredients (String value) { this.ingredients = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("ingredients_count") public Integer getIngredientsCount ( ) { return this.ingredientsCount; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("ingredients_count") public void setIngredientsCount (Integer value) { this.ingredientsCount = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("large_image") public String getLargeImage ( ) { return this.largeImage; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("large_image") public void setLargeImage (String value) { this.largeImage = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("manufacturer") public String getManufacturer ( ) { return this.manufacturer; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("manufacturer") public void setManufacturer (String value) { this.manufacturer = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("name") public String getName ( ) { return this.name; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("name") public void setName (String value) { this.name = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("nutrients") public List<NutrientData> getNutrients ( ) { return this.nutrients; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("nutrients") public void setNutrients (List<NutrientData> value) { this.nutrients = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("plus") public List<String> getPlus ( ) { return this.plus; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("plus") public void setPlus (List<String> value) { this.plus = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("recipes") public List<String> getRecipes ( ) { return this.recipes; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("recipes") public void setRecipes (List<String> value) { this.recipes = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("score") public Double getScore ( ) { return this.score; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("score") public void setScore (Double value) { this.score = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("serving_size") public String getServingSize ( ) { return this.servingSize; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("serving_size") public void setServingSize (String value) { this.servingSize = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("serving_size_in_grams") public Double getServingSizeInGrams ( ) { return this.servingSizeInGrams; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("serving_size_in_grams") public void setServingSizeInGrams (Double value) { this.servingSizeInGrams = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("serving_size_unit") public String getServingSizeUnit ( ) { return this.servingSizeUnit; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("serving_size_unit") public void setServingSizeUnit (String value) { this.servingSizeUnit = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("servings_per_container") public String getServingsPerContainer ( ) { return this.servingsPerContainer; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("servings_per_container") public void setServingsPerContainer (String value) { this.servingsPerContainer = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("small_image") public String getSmallImage ( ) { return this.smallImage; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("small_image") public void setSmallImage (String value) { this.smallImage = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("tags") public List<String> getTags ( ) { return this.tags; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("tags") public void setTags (List<String> value) { this.tags = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("upc") public String getUpc ( ) { return this.upc; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("upc") public void setUpc (String value) { this.upc = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("visibility_count") public Integer getVisibilityCount ( ) { return this.visibilityCount; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("visibility_count") public void setVisibilityCount (Integer value) { this.visibilityCount = value; } /** GETTER * TODO: Write general description for this method */ @JsonGetter("weight") public String getWeight ( ) { return this.weight; } /** SETTER * TODO: Write general description for this method */ @JsonSetter("weight") public void setWeight (String value) { this.weight = value; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ivyde.internal.eclipse.cpcontainer; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.Properties; import org.apache.ivyde.eclipse.cp.IvyClasspathContainerHelper; import org.apache.ivyde.internal.eclipse.IvyDEMessage; import org.apache.ivyde.internal.eclipse.IvyPlugin; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.Path; import org.eclipse.jdt.core.IClasspathAttribute; import org.eclipse.jdt.core.IClasspathContainer; import org.eclipse.jdt.core.IClasspathEntry; import org.eclipse.jdt.core.IJavaProject; public class IvyAttachementManager { private static final String SRC_SUFFIX = "-src"; private static final String SRCROOT_SUFFIX = "-srcroot"; private static final String DOC_SUFFIX = "-doc"; private Properties prop = new Properties(); private File containersAttachementFile; public IvyAttachementManager(File containersAttachementFile) { this.containersAttachementFile = containersAttachementFile; if (!containersAttachementFile.exists()) { IvyDEMessage.verbose("Attachement properties file not found: nothing to load"); return; } IvyDEMessage.verbose("Reading attachement properties"); try { FileInputStream in = new FileInputStream(containersAttachementFile); try { prop.load(in); } finally { try { in.close(); } catch (IOException e) { // don't care } } } catch (IOException ioe) { IvyPlugin.logWarn("IvyDE attachement properties could not be loaded", ioe); } } public void updateAttchements(IJavaProject project, IPath containerPath, IClasspathContainer containerSuggestion) { IvyDEMessage.verbose("Updating attachements on the container " + containerPath); Properties newProps = new Properties(); IClasspathEntry[] newEntries = containerSuggestion.getClasspathEntries(); for (int i = 0; i < newEntries.length; i++) { IClasspathEntry entry = newEntries[i]; if (IClasspathEntry.CPE_LIBRARY == entry.getEntryKind()) { String path = entry.getPath().toPortableString(); if (entry.getSourceAttachmentPath() != null) { newProps.put(path + SRC_SUFFIX, entry.getSourceAttachmentPath() .toPortableString()); } if (entry.getSourceAttachmentRootPath() != null) { newProps.put(path + SRCROOT_SUFFIX, entry.getSourceAttachmentRootPath() .toPortableString()); } String javadocUrl = getJavadocLocation(entry); if (javadocUrl != null) { newProps.put(path + DOC_SUFFIX, javadocUrl); } } } IvyClasspathContainerImpl ivycp = (IvyClasspathContainerImpl) IvyClasspathContainerHelper .getContainer(containerPath, project); if (ivycp == null) { IvyDEMessage .error("The IvyDE container could not be found. Aborting updating attachements."); // something wrong happened, give up return; } IClasspathEntry[] existingEntries = ivycp.getClasspathEntries(); for (int i = 0; i < existingEntries.length; i++) { IClasspathEntry entry = existingEntries[i]; if (IClasspathEntry.CPE_LIBRARY == entry.getEntryKind()) { String path = entry.getPath().toPortableString(); String value = (String) prop.get(path + SRC_SUFFIX); if (value != null && entry.getSourceAttachmentPath() != null && value.equals(entry.getSourceAttachmentPath().toPortableString())) { newProps.remove(path + SRC_SUFFIX); } value = (String) prop.get(path + SRCROOT_SUFFIX); if (value != null && entry.getSourceAttachmentRootPath() != null && value.equals(entry.getSourceAttachmentRootPath().toPortableString())) { newProps.remove(path + SRCROOT_SUFFIX); } } } // copy the actually new overrided properties prop.putAll(newProps); // now update the ivyde container for real ivycp.updateClasspathEntries(newEntries); // store the global result IvyDEMessage.verbose("Saving attachement properties"); try { FileOutputStream out = new FileOutputStream(containersAttachementFile); try { prop.store(out, ""); } finally { try { out.close(); } catch (IOException e) { // don't care } } } catch (IOException ioe) { IvyPlugin.logWarn("IvyDE attachement properties could not be saved", ioe); } } public IPath getSourceAttachment(IPath path) { String srcPath = prop.getProperty(path.toPortableString() + SRC_SUFFIX); if (srcPath != null && srcPath.length() != 0) { return new Path(srcPath); } return null; } public IPath getSourceAttachmentRoot(IPath path) { String srcPath = prop.getProperty(path.toPortableString() + SRCROOT_SUFFIX); if (srcPath != null && srcPath.length() != 0) { return new Path(srcPath); } return null; } public URL getDocAttachment(IPath path) { String srcPath = prop.getProperty(path.toPortableString() + DOC_SUFFIX); if (srcPath != null && srcPath.length() != 0) { try { return new URL(srcPath); } catch (MalformedURLException e) { IvyPlugin.logWarn("The path for the doc attachement is not a valid URL", e); return null; } } return null; } public IPath getSourceAttachment(IPath classpathArtifact, IPath sourcesArtifact) { IPath sourceAttachment = getSourceAttachment(classpathArtifact); if (sourceAttachment == null) { sourceAttachment = sourcesArtifact; } return sourceAttachment; } public IPath getSourceAttachmentRoot(IPath classpathArtifact, IPath sourcesArtifact) { IPath sourceAttachment = getSourceAttachmentRoot(classpathArtifact); if (sourceAttachment == null && sourcesArtifact != null) { sourceAttachment = sourcesArtifact; } return sourceAttachment; } public String getJavadocLocation(IClasspathEntry entry) { IClasspathAttribute[] attributes = entry.getExtraAttributes(); for (int j = 0; j < attributes.length; j++) { IClasspathAttribute attribute = attributes[j]; if (IClasspathAttribute.JAVADOC_LOCATION_ATTRIBUTE_NAME.equals(attribute.getName())) { return attribute.getValue(); } } return null; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner; import com.facebook.presto.Session; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.spi.predicate.DiscreteValues; import com.facebook.presto.spi.predicate.Domain; import com.facebook.presto.spi.predicate.Marker; import com.facebook.presto.spi.predicate.NullableValue; import com.facebook.presto.spi.predicate.Range; import com.facebook.presto.spi.predicate.Ranges; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.predicate.ValueSet; import com.facebook.presto.spi.type.Type; import com.facebook.presto.sql.analyzer.ExpressionAnalyzer; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.tree.AstVisitor; import com.facebook.presto.sql.tree.BetweenPredicate; import com.facebook.presto.sql.tree.BooleanLiteral; import com.facebook.presto.sql.tree.ComparisonExpression; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.InListExpression; import com.facebook.presto.sql.tree.InPredicate; import com.facebook.presto.sql.tree.IsNotNullPredicate; import com.facebook.presto.sql.tree.IsNullPredicate; import com.facebook.presto.sql.tree.LogicalBinaryExpression; import com.facebook.presto.sql.tree.LongLiteral; import com.facebook.presto.sql.tree.NotExpression; import com.facebook.presto.sql.tree.NullLiteral; import com.facebook.presto.sql.tree.QualifiedNameReference; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.math.DoubleMath; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.sql.ExpressionUtils.and; import static com.facebook.presto.sql.ExpressionUtils.combineConjuncts; import static com.facebook.presto.sql.ExpressionUtils.combineDisjunctsWithDefault; import static com.facebook.presto.sql.ExpressionUtils.flipComparison; import static com.facebook.presto.sql.ExpressionUtils.or; import static com.facebook.presto.sql.planner.LiteralInterpreter.toExpression; import static com.facebook.presto.sql.tree.BooleanLiteral.FALSE_LITERAL; import static com.facebook.presto.sql.tree.BooleanLiteral.TRUE_LITERAL; import static com.facebook.presto.sql.tree.ComparisonExpression.Type.EQUAL; import static com.facebook.presto.sql.tree.ComparisonExpression.Type.GREATER_THAN; import static com.facebook.presto.sql.tree.ComparisonExpression.Type.GREATER_THAN_OR_EQUAL; import static com.facebook.presto.sql.tree.ComparisonExpression.Type.LESS_THAN; import static com.facebook.presto.sql.tree.ComparisonExpression.Type.LESS_THAN_OR_EQUAL; import static com.facebook.presto.sql.tree.ComparisonExpression.Type.NOT_EQUAL; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Iterables.getOnlyElement; import static java.math.RoundingMode.CEILING; import static java.math.RoundingMode.FLOOR; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; public final class DomainTranslator { private DomainTranslator() { } public static Expression toPredicate(TupleDomain<Symbol> tupleDomain) { if (tupleDomain.isNone()) { return FALSE_LITERAL; } ImmutableList.Builder<Expression> conjunctBuilder = ImmutableList.builder(); for (Map.Entry<Symbol, Domain> entry : tupleDomain.getDomains().get().entrySet()) { Symbol symbol = entry.getKey(); QualifiedNameReference reference = new QualifiedNameReference(symbol.toQualifiedName()); conjunctBuilder.add(toPredicate(entry.getValue(), reference)); } return combineConjuncts(conjunctBuilder.build()); } private static Expression toPredicate(Domain domain, QualifiedNameReference reference) { if (domain.getValues().isNone()) { return domain.isNullAllowed() ? new IsNullPredicate(reference) : FALSE_LITERAL; } if (domain.getValues().isAll()) { return domain.isNullAllowed() ? TRUE_LITERAL : new NotExpression(new IsNullPredicate(reference)); } List<Expression> disjuncts = new ArrayList<>(); disjuncts.addAll(domain.getValues().getValuesProcessor().transform( ranges -> extractDisjuncts(domain.getType(), ranges, reference), discreteValues -> extractDisjuncts(domain.getType(), discreteValues, reference), allOrNone -> { throw new IllegalStateException("Case should not be reachable"); })); // Add nullability disjuncts if (domain.isNullAllowed()) { disjuncts.add(new IsNullPredicate(reference)); } return combineDisjunctsWithDefault(disjuncts, TRUE_LITERAL); } private static List<Expression> extractDisjuncts(Type type, Ranges ranges, QualifiedNameReference reference) { List<Expression> disjuncts = new ArrayList<>(); List<Expression> singleValues = new ArrayList<>(); for (Range range : ranges.getOrderedRanges()) { checkState(!range.isAll()); // Already checked if (range.isSingleValue()) { singleValues.add(toExpression(range.getSingleValue(), type)); } else if (isBetween(range)) { // Specialize the range with BETWEEN expression if possible b/c it is currently more efficient disjuncts.add(new BetweenPredicate(reference, toExpression(range.getLow().getValue(), type), toExpression(range.getHigh().getValue(), type))); } else { List<Expression> rangeConjuncts = new ArrayList<>(); if (!range.getLow().isLowerUnbounded()) { switch (range.getLow().getBound()) { case ABOVE: rangeConjuncts.add(new ComparisonExpression(GREATER_THAN, reference, toExpression(range.getLow().getValue(), type))); break; case EXACTLY: rangeConjuncts.add(new ComparisonExpression(GREATER_THAN_OR_EQUAL, reference, toExpression(range.getLow().getValue(), type))); break; case BELOW: throw new IllegalStateException("Low Marker should never use BELOW bound: " + range); default: throw new AssertionError("Unhandled bound: " + range.getLow().getBound()); } } if (!range.getHigh().isUpperUnbounded()) { switch (range.getHigh().getBound()) { case ABOVE: throw new IllegalStateException("High Marker should never use ABOVE bound: " + range); case EXACTLY: rangeConjuncts.add(new ComparisonExpression(LESS_THAN_OR_EQUAL, reference, toExpression(range.getHigh().getValue(), type))); break; case BELOW: rangeConjuncts.add(new ComparisonExpression(LESS_THAN, reference, toExpression(range.getHigh().getValue(), type))); break; default: throw new AssertionError("Unhandled bound: " + range.getHigh().getBound()); } } // If rangeConjuncts is null, then the range was ALL, which should already have been checked for checkState(!rangeConjuncts.isEmpty()); disjuncts.add(combineConjuncts(rangeConjuncts)); } } // Add back all of the possible single values either as an equality or an IN predicate if (singleValues.size() == 1) { disjuncts.add(new ComparisonExpression(EQUAL, reference, getOnlyElement(singleValues))); } else if (singleValues.size() > 1) { disjuncts.add(new InPredicate(reference, new InListExpression(singleValues))); } return disjuncts; } private static List<Expression> extractDisjuncts(Type type, DiscreteValues discreteValues, QualifiedNameReference reference) { List<Expression> values = discreteValues.getValues().stream() .map(object -> toExpression(object, type)) .collect(toList()); // If values is empty, then the equatableValues was either ALL or NONE, both of which should already have been checked for checkState(!values.isEmpty()); Expression predicate; if (values.size() == 1) { predicate = new ComparisonExpression(EQUAL, reference, getOnlyElement(values)); } else { predicate = new InPredicate(reference, new InListExpression(values)); } if (!discreteValues.isWhiteList()) { predicate = new NotExpression(predicate); } return ImmutableList.of(predicate); } private static boolean isBetween(Range range) { return !range.getLow().isLowerUnbounded() && range.getLow().getBound() == Marker.Bound.EXACTLY && !range.getHigh().isUpperUnbounded() && range.getHigh().getBound() == Marker.Bound.EXACTLY; } /** * Convert an Expression predicate into an ExtractionResult consisting of: * 1) A successfully extracted TupleDomain * 2) An Expression fragment which represents the part of the original Expression that will need to be re-evaluated * after filtering with the TupleDomain. */ public static ExtractionResult fromPredicate( Metadata metadata, Session session, Expression predicate, Map<Symbol, Type> types) { return new Visitor(metadata, session, types).process(predicate, false); } private static class Visitor extends AstVisitor<ExtractionResult, Boolean> { private final Metadata metadata; private final Session session; private final Map<Symbol, Type> types; private Visitor(Metadata metadata, Session session, Map<Symbol, Type> types) { this.metadata = requireNonNull(metadata, "metadata is null"); this.session = requireNonNull(session, "session is null"); this.types = ImmutableMap.copyOf(requireNonNull(types, "types is null")); } private Type checkedTypeLookup(Symbol symbol) { Type type = types.get(symbol); checkArgument(type != null, "Types is missing info for symbol: %s", symbol); return type; } private static ValueSet complementIfNecessary(ValueSet valueSet, boolean complement) { return complement ? valueSet.complement() : valueSet; } private static Domain complementIfNecessary(Domain domain, boolean complement) { return complement ? domain.complement() : domain; } private static Expression complementIfNecessary(Expression expression, boolean complement) { return complement ? new NotExpression(expression) : expression; } @Override protected ExtractionResult visitExpression(Expression node, Boolean complement) { // If we don't know how to process this node, the default response is to say that the TupleDomain is "all" return new ExtractionResult(TupleDomain.all(), complementIfNecessary(node, complement)); } @Override protected ExtractionResult visitLogicalBinaryExpression(LogicalBinaryExpression node, Boolean complement) { ExtractionResult leftResult = process(node.getLeft(), complement); ExtractionResult rightResult = process(node.getRight(), complement); TupleDomain<Symbol> leftTupleDomain = leftResult.getTupleDomain(); TupleDomain<Symbol> rightTupleDomain = rightResult.getTupleDomain(); LogicalBinaryExpression.Type type = complement ? flipLogicalBinaryType(node.getType()) : node.getType(); switch (type) { case AND: return new ExtractionResult( leftTupleDomain.intersect(rightTupleDomain), combineConjuncts(leftResult.getRemainingExpression(), rightResult.getRemainingExpression())); case OR: TupleDomain<Symbol> columnUnionedTupleDomain = TupleDomain.columnWiseUnion(leftTupleDomain, rightTupleDomain); // In most cases, the columnUnionedTupleDomain is only a superset of the actual strict union // and so we can return the current node as the remainingExpression so that all bounds will be double checked again at execution time. Expression remainingExpression = complementIfNecessary(node, complement); // However, there are a few cases where the column-wise union is actually equivalent to the strict union, so we if can detect // some of these cases, we won't have to double check the bounds unnecessarily at execution time. // We can only make inferences if the remaining expressions on both side are equal and deterministic if (leftResult.getRemainingExpression().equals(rightResult.getRemainingExpression()) && DeterminismEvaluator.isDeterministic(leftResult.getRemainingExpression())) { // The column-wise union is equivalent to the strict union if // 1) If both TupleDomains consist of the same exact single column (e.g. left TupleDomain => (a > 0), right TupleDomain => (a < 10)) // 2) If one TupleDomain is a superset of the other (e.g. left TupleDomain => (a > 0, b > 0 && b < 10), right TupleDomain => (a > 5, b = 5)) boolean matchingSingleSymbolDomains = !leftTupleDomain.isNone() && !rightTupleDomain.isNone() && leftTupleDomain.getDomains().get().size() == 1 && rightTupleDomain.getDomains().get().size() == 1 && leftTupleDomain.getDomains().get().keySet().equals(rightTupleDomain.getDomains().get().keySet()); boolean oneSideIsSuperSet = leftTupleDomain.contains(rightTupleDomain) || rightTupleDomain.contains(leftTupleDomain); if (matchingSingleSymbolDomains || oneSideIsSuperSet) { remainingExpression = leftResult.getRemainingExpression(); } } return new ExtractionResult(columnUnionedTupleDomain, remainingExpression); default: throw new AssertionError("Unknown type: " + node.getType()); } } private static LogicalBinaryExpression.Type flipLogicalBinaryType(LogicalBinaryExpression.Type type) { switch (type) { case AND: return LogicalBinaryExpression.Type.OR; case OR: return LogicalBinaryExpression.Type.AND; default: throw new AssertionError("Unknown type: " + type); } } @Override protected ExtractionResult visitNotExpression(NotExpression node, Boolean complement) { return process(node.getValue(), !complement); } @Override protected ExtractionResult visitComparisonExpression(ComparisonExpression node, Boolean complement) { Optional<NormalizedSimpleComparison> optionalNormalized = toNormalizedSimpleComparison(session, metadata, types, node); if (!optionalNormalized.isPresent()) { return super.visitComparisonExpression(node, complement); } NormalizedSimpleComparison normalized = optionalNormalized.get(); Symbol symbol = Symbol.fromQualifiedName(normalized.getNameReference().getName()); Type type = checkedTypeLookup(symbol); NullableValue value = normalized.getValue(); // Handle the cases where implicit coercions can happen in comparisons // TODO: how to abstract this out if (value.getType().equals(DOUBLE) && type.equals(BIGINT)) { return process(coerceDoubleToLongComparison(normalized), complement); } if (value.getType().equals(BIGINT) && type.equals(DOUBLE)) { value = NullableValue.of(DOUBLE, ((Long) value.getValue()).doubleValue()); } checkState(value.isNull() || value.getType().equals(type), "INVARIANT: comparison should be working on the same types"); return createComparisonExtractionResult(normalized.getComparisonType(), symbol, type, value.getValue(), complement); } private ExtractionResult createComparisonExtractionResult(ComparisonExpression.Type comparisonType, Symbol column, Type type, @Nullable Object value, boolean complement) { if (value == null) { switch (comparisonType) { case EQUAL: case GREATER_THAN: case GREATER_THAN_OR_EQUAL: case LESS_THAN: case LESS_THAN_OR_EQUAL: case NOT_EQUAL: return new ExtractionResult(TupleDomain.none(), TRUE_LITERAL); case IS_DISTINCT_FROM: Domain domain = complementIfNecessary(Domain.notNull(type), complement); return new ExtractionResult( TupleDomain.withColumnDomains(ImmutableMap.of(column, domain)), TRUE_LITERAL); default: throw new AssertionError("Unhandled type: " + comparisonType); } } Domain domain; if (type.isOrderable()) { domain = extractOrderableDomain(comparisonType, type, value, complement); } else if (type.isComparable()) { domain = extractEquatableDomain(comparisonType, type, value, complement); } else { throw new AssertionError("Type cannot be used in a comparison expression (should have been caught in analysis): " + type); } return new ExtractionResult( TupleDomain.withColumnDomains(ImmutableMap.of(column, domain)), TRUE_LITERAL); } private static Domain extractOrderableDomain(ComparisonExpression.Type comparisonType, Type type, Object value, boolean complement) { checkArgument(value != null); switch (comparisonType) { case EQUAL: return Domain.create(complementIfNecessary(ValueSet.ofRanges(Range.equal(type, value)), complement), false); case GREATER_THAN: return Domain.create(complementIfNecessary(ValueSet.ofRanges(Range.greaterThan(type, value)), complement), false); case GREATER_THAN_OR_EQUAL: return Domain.create(complementIfNecessary(ValueSet.ofRanges(Range.greaterThanOrEqual(type, value)), complement), false); case LESS_THAN: return Domain.create(complementIfNecessary(ValueSet.ofRanges(Range.lessThan(type, value)), complement), false); case LESS_THAN_OR_EQUAL: return Domain.create(complementIfNecessary(ValueSet.ofRanges(Range.lessThanOrEqual(type, value)), complement), false); case NOT_EQUAL: return Domain.create(complementIfNecessary(ValueSet.ofRanges(Range.lessThan(type, value), Range.greaterThan(type, value)), complement), false); case IS_DISTINCT_FROM: // Need to potential complement the whole domain for IS_DISTINCT_FROM since it is null-aware return complementIfNecessary(Domain.create(ValueSet.ofRanges(Range.lessThan(type, value), Range.greaterThan(type, value)), true), complement); default: throw new AssertionError("Unhandled type: " + comparisonType); } } private static Domain extractEquatableDomain(ComparisonExpression.Type comparisonType, Type type, Object value, boolean complement) { checkArgument(value != null); switch (comparisonType) { case EQUAL: return Domain.create(complementIfNecessary(ValueSet.of(type, value), complement), false); case NOT_EQUAL: return Domain.create(complementIfNecessary(ValueSet.of(type, value).complement(), complement), false); case IS_DISTINCT_FROM: // Need to potential complement the whole domain for IS_DISTINCT_FROM since it is null-aware return complementIfNecessary(Domain.create(ValueSet.of(type, value).complement(), true), complement); default: throw new AssertionError("Unhandled type: " + comparisonType); } } @Override protected ExtractionResult visitInPredicate(InPredicate node, Boolean complement) { if (!(node.getValue() instanceof QualifiedNameReference) || !(node.getValueList() instanceof InListExpression)) { return super.visitInPredicate(node, complement); } InListExpression valueList = (InListExpression) node.getValueList(); checkState(!valueList.getValues().isEmpty(), "InListExpression should never be empty"); ImmutableList.Builder<Expression> disjuncts = ImmutableList.builder(); for (Expression expression : valueList.getValues()) { disjuncts.add(new ComparisonExpression(EQUAL, node.getValue(), expression)); } return process(or(disjuncts.build()), complement); } @Override protected ExtractionResult visitBetweenPredicate(BetweenPredicate node, Boolean complement) { // Re-write as two comparison expressions return process(and( new ComparisonExpression(GREATER_THAN_OR_EQUAL, node.getValue(), node.getMin()), new ComparisonExpression(LESS_THAN_OR_EQUAL, node.getValue(), node.getMax())), complement); } @Override protected ExtractionResult visitIsNullPredicate(IsNullPredicate node, Boolean complement) { if (!(node.getValue() instanceof QualifiedNameReference)) { return super.visitIsNullPredicate(node, complement); } Symbol symbol = Symbol.fromQualifiedName(((QualifiedNameReference) node.getValue()).getName()); Type columnType = checkedTypeLookup(symbol); Domain domain = complementIfNecessary(Domain.onlyNull(columnType), complement); return new ExtractionResult( TupleDomain.withColumnDomains(ImmutableMap.of(symbol, domain)), TRUE_LITERAL); } @Override protected ExtractionResult visitIsNotNullPredicate(IsNotNullPredicate node, Boolean complement) { if (!(node.getValue() instanceof QualifiedNameReference)) { return super.visitIsNotNullPredicate(node, complement); } Symbol symbol = Symbol.fromQualifiedName(((QualifiedNameReference) node.getValue()).getName()); Type columnType = checkedTypeLookup(symbol); Domain domain = complementIfNecessary(Domain.notNull(columnType), complement); return new ExtractionResult( TupleDomain.withColumnDomains(ImmutableMap.of(symbol, domain)), TRUE_LITERAL); } @Override protected ExtractionResult visitBooleanLiteral(BooleanLiteral node, Boolean complement) { boolean value = complement ? !node.getValue() : node.getValue(); return new ExtractionResult(value ? TupleDomain.all() : TupleDomain.none(), TRUE_LITERAL); } @Override protected ExtractionResult visitNullLiteral(NullLiteral node, Boolean complement) { return new ExtractionResult(TupleDomain.none(), TRUE_LITERAL); } } /** * Extract a normalized simple comparison between a QualifiedNameReference and a native value if possible. */ private static Optional<NormalizedSimpleComparison> toNormalizedSimpleComparison(Session session, Metadata metadata, Map<Symbol, Type> types, ComparisonExpression comparison) { IdentityHashMap<Expression, Type> expressionTypes = ExpressionAnalyzer.getExpressionTypes(session, metadata, new SqlParser(), types, comparison); Object left = ExpressionInterpreter.expressionOptimizer(comparison.getLeft(), metadata, session, expressionTypes).optimize(NoOpSymbolResolver.INSTANCE); Object right = ExpressionInterpreter.expressionOptimizer(comparison.getRight(), metadata, session, expressionTypes).optimize(NoOpSymbolResolver.INSTANCE); if (left instanceof QualifiedNameReference && !(right instanceof Expression)) { return Optional.of(new NormalizedSimpleComparison((QualifiedNameReference) left, comparison.getType(), new NullableValue(expressionTypes.get(comparison.getRight()), right))); } if (right instanceof QualifiedNameReference && !(left instanceof Expression)) { return Optional.of(new NormalizedSimpleComparison((QualifiedNameReference) right, flipComparison(comparison.getType()), new NullableValue(expressionTypes.get(comparison.getLeft()), left))); } return Optional.empty(); } private static class NormalizedSimpleComparison { private final QualifiedNameReference nameReference; private final ComparisonExpression.Type comparisonType; private final NullableValue value; public NormalizedSimpleComparison(QualifiedNameReference nameReference, ComparisonExpression.Type comparisonType, NullableValue value) { this.nameReference = requireNonNull(nameReference, "nameReference is null"); this.comparisonType = requireNonNull(comparisonType, "comparisonType is null"); this.value = requireNonNull(value, "value is null"); } public QualifiedNameReference getNameReference() { return nameReference; } public ComparisonExpression.Type getComparisonType() { return comparisonType; } public NullableValue getValue() { return value; } } private static Expression coerceDoubleToLongComparison(NormalizedSimpleComparison normalized) { checkArgument(normalized.getValue().getType().equals(DOUBLE), "Value should be of DOUBLE type"); checkArgument(!normalized.getValue().isNull(), "Value should not be null"); QualifiedNameReference reference = normalized.getNameReference(); Double value = (Double) normalized.getValue().getValue(); switch (normalized.getComparisonType()) { case GREATER_THAN_OR_EQUAL: case LESS_THAN: return new ComparisonExpression(normalized.getComparisonType(), reference, toExpression(DoubleMath.roundToLong(value, CEILING), BIGINT)); case GREATER_THAN: case LESS_THAN_OR_EQUAL: return new ComparisonExpression(normalized.getComparisonType(), reference, toExpression(DoubleMath.roundToLong(value, FLOOR), BIGINT)); case EQUAL: Long equalValue = DoubleMath.roundToLong(value, FLOOR); if (equalValue.doubleValue() != value) { // Return something that is false for all non-null values return and(new ComparisonExpression(EQUAL, reference, new LongLiteral("0")), new ComparisonExpression(NOT_EQUAL, reference, new LongLiteral("0"))); } return new ComparisonExpression(normalized.getComparisonType(), reference, toExpression(equalValue, BIGINT)); case NOT_EQUAL: Long notEqualValue = DoubleMath.roundToLong(value, FLOOR); if (notEqualValue.doubleValue() != value) { // Return something that is true for all non-null values return or(new ComparisonExpression(EQUAL, reference, new LongLiteral("0")), new ComparisonExpression(NOT_EQUAL, reference, new LongLiteral("0"))); } return new ComparisonExpression(normalized.getComparisonType(), reference, toExpression(notEqualValue, BIGINT)); case IS_DISTINCT_FROM: Long distinctValue = DoubleMath.roundToLong(value, FLOOR); if (distinctValue.doubleValue() != value) { return TRUE_LITERAL; } return new ComparisonExpression(normalized.getComparisonType(), reference, toExpression(distinctValue, BIGINT)); default: throw new AssertionError("Unhandled type: " + normalized.getComparisonType()); } } public static class ExtractionResult { private final TupleDomain<Symbol> tupleDomain; private final Expression remainingExpression; public ExtractionResult(TupleDomain<Symbol> tupleDomain, Expression remainingExpression) { this.tupleDomain = requireNonNull(tupleDomain, "tupleDomain is null"); this.remainingExpression = requireNonNull(remainingExpression, "remainingExpression is null"); } public TupleDomain<Symbol> getTupleDomain() { return tupleDomain; } public Expression getRemainingExpression() { return remainingExpression; } } }
/* * Copyright (c) 2005-2010 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.framework.components.customTableTag; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Locale; import javax.servlet.jsp.JspException; import javax.servlet.jsp.PageContext; import org.mifos.application.master.MessageLookup; import org.mifos.config.business.MifosConfiguration; import org.mifos.framework.exceptions.TableTagParseException; import org.mifos.framework.util.helpers.Constants; import org.mifos.framework.util.helpers.LabelTagUtils; import org.mifos.security.util.UserContext; public class Column { private String label = null; private String value = null; private String valueType = null; private String columnType = null; private ColumnDetails columnDetails = null; private LinkDetails linkDetails = null; private MifosConfiguration labelConfig = MifosConfiguration.getInstance(); public void setLinkDetails(LinkDetails linkDetails) { this.linkDetails = linkDetails; } public LinkDetails getLinkDetails() { return linkDetails; } public void setColumnDetials(ColumnDetails columnDetails) { this.columnDetails = columnDetails; } public ColumnDetails getColumnDetails() { return columnDetails; } public void setLabel(String label) { this.label = label; } public void setValue(String value) { this.value = value; } public void setValueType(String valueType) { this.valueType = valueType; } public void setColumnType(String columnType) { this.columnType = columnType; } public String getLabel() { return label; } public String getValue() { return value; } public String getValueType() { return valueType; } public String getColumnType() { return columnType; } public void getColumnHeader(StringBuilder tableInfo, PageContext pageContext, String bundle) throws JspException { tableInfo.append("<td "); tableInfo.append(" width=\"" + getColumnDetails().getColWidth() + "%\""); tableInfo.append(" align=\"" + getColumnDetails().getAlign() + "\" "); tableInfo.append(">"); if (getLabel().replaceAll("", "").equals("") || getLabel() == null) { tableInfo.append("&nbsp;"); } else { tableInfo.append("<b>" + getLabelText(pageContext, getLabel(), bundle) + "</b>"); } tableInfo.append("</td>"); } public void generateTableColumn(StringBuilder tableInfo, Object obj, Locale locale, Locale prefferedLocale, Locale mfiLocale) throws TableTagParseException { tableInfo.append("<td class=\"" + getColumnDetails().getRowStyle() + "\" "); tableInfo.append(" align=\"" + getColumnDetails().getAlign() + "\" "); tableInfo.append("> "); if (getValueType().equalsIgnoreCase(TableTagConstants.METHOD)) { if (getColumnType().equalsIgnoreCase(TableTagConstants.TEXT)) { getTableColumn(tableInfo, obj, locale, prefferedLocale, mfiLocale); } else { // Generate Link On Column getTableColumnWithLink(tableInfo, obj, locale, prefferedLocale, mfiLocale); } } else { if (getColumnType().equalsIgnoreCase(TableTagConstants.TEXT)) { // ColumnType should be link throw new TableTagParseException(getColumnType()); } getLinkWithoutName(tableInfo, obj); tableInfo.append(""); } tableInfo.append("</td>"); } public void getTableColumn(StringBuilder tableInfo, Object obj, Locale locale, Locale prefferedLocale, Locale mfiLocale) throws TableTagParseException { Method[] methods = obj.getClass().getMethods(); for (Method method : methods) { if (method.getName().equalsIgnoreCase("get".concat(getValue()))) { try { tableInfo.append(method.invoke(obj, new Object[] {})); } catch (IllegalAccessException e) { throw new TableTagParseException(e); } catch (InvocationTargetException ex) { throw new TableTagParseException(ex); } } if (method.getName().equalsIgnoreCase("setLocale") && locale != null) { try { Object[] argumentLocale = new Object[] { locale }; method.invoke(obj, argumentLocale); } catch (IllegalAccessException e) { throw new TableTagParseException(e); } catch (InvocationTargetException ex) { throw new TableTagParseException(ex); } } if (method.getName().equalsIgnoreCase("setMfiLocale") && mfiLocale != null) { try { Object[] argumentLocale = new Object[] { mfiLocale }; method.invoke(obj, argumentLocale); } catch (IllegalAccessException e) { throw new TableTagParseException(e); } catch (InvocationTargetException ex) { throw new TableTagParseException(ex); } } } } public void getTableColumnWithLink(StringBuilder tableInfo, Object obj, Locale locale, Locale prefferedLocale, Locale mfiLocale) throws TableTagParseException { tableInfo.append("<a "); linkDetails.generateLink(tableInfo, obj); tableInfo.append(" >"); getTableColumn(tableInfo, obj, locale, prefferedLocale, mfiLocale); tableInfo.append("</a>"); } public void getLinkWithoutName(StringBuilder tableInfo, Object obj) throws TableTagParseException { tableInfo.append("<a "); linkDetails.generateLink(tableInfo, obj); tableInfo.append(" >"); tableInfo.append(getValue()); tableInfo.append("</a>"); } private String getLabelText(PageContext pageContext, String key, String bundle) throws JspException { UserContext userContext = (UserContext) pageContext.getSession().getAttribute(Constants.USER_CONTEXT_KEY); LabelTagUtils labelTagUtils = LabelTagUtils.getInstance(); String labelText = null; try { labelText = labelTagUtils.getLabel(pageContext, bundle, userContext.getPreferredLocale(), key, null); } catch (Exception e) { } if (labelText == null) { labelText = MessageLookup.getInstance().lookup(key); } if (labelText == null) { try { char[] charArray = bundle.toCharArray(); charArray[0] = Character.toUpperCase(charArray[0]); bundle = new String(charArray); labelText = labelTagUtils.getLabel(pageContext, bundle, userContext.getPreferredLocale(), key, null); } catch (Exception e) { labelText = key; } } return labelText; } }
/* * Copyright (c) 1998, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.swing.text; import java.io.Serializable; /** * A TabSet is comprised of many TabStops. It offers methods for locating the * closest TabStop to a given position and finding all the potential TabStops. * It is also immutable. * <p> * <strong>Warning:</strong> * Serialized objects of this class will not be compatible with * future Swing releases. The current serialization support is * appropriate for short term storage or RMI between applications running * the same version of Swing. As of 1.4, support for long term storage * of all JavaBeans&trade; * has been added to the <code>java.beans</code> package. * Please see {@link java.beans.XMLEncoder}. * * @author Scott Violet */ public class TabSet implements Serializable { /** TabStops this TabSet contains. */ private TabStop[] tabs; /** * Since this class is immutable the hash code could be * calculated once. MAX_VALUE means that it was not initialized * yet. Hash code shouldn't has MAX_VALUE value. */ private int hashCode = Integer.MAX_VALUE; /** * Creates and returns an instance of TabSet. The array of Tabs * passed in must be sorted in ascending order. */ public TabSet(TabStop[] tabs) { // PENDING(sky): If this becomes a problem, make it sort. if(tabs != null) { int tabCount = tabs.length; this.tabs = new TabStop[tabCount]; System.arraycopy(tabs, 0, this.tabs, 0, tabCount); } else this.tabs = null; } /** * Returns the number of Tab instances the receiver contains. */ public int getTabCount() { return (tabs == null) ? 0 : tabs.length; } /** * Returns the TabStop at index <code>index</code>. This will throw an * IllegalArgumentException if <code>index</code> is outside the range * of tabs. */ public TabStop getTab(int index) { int numTabs = getTabCount(); if(index < 0 || index >= numTabs) throw new IllegalArgumentException(index + " is outside the range of tabs"); return tabs[index]; } /** * Returns the Tab instance after <code>location</code>. This will * return null if there are no tabs after <code>location</code>. */ public TabStop getTabAfter(float location) { int index = getTabIndexAfter(location); return (index == -1) ? null : tabs[index]; } /** * @return the index of the TabStop <code>tab</code>, or -1 if * <code>tab</code> is not contained in the receiver. */ public int getTabIndex(TabStop tab) { for(int counter = getTabCount() - 1; counter >= 0; counter--) // should this use .equals? if(getTab(counter) == tab) return counter; return -1; } /** * Returns the index of the Tab to be used after <code>location</code>. * This will return -1 if there are no tabs after <code>location</code>. */ public int getTabIndexAfter(float location) { int current, min, max; min = 0; max = getTabCount(); while(min != max) { current = (max - min) / 2 + min; if(location > tabs[current].getPosition()) { if(min == current) min = max; else min = current; } else { if(current == 0 || location > tabs[current - 1].getPosition()) return current; max = current; } } // no tabs after the passed in location. return -1; } /** * Indicates whether this <code>TabSet</code> is equal to another one. * @param o the <code>TabSet</code> instance which this instance * should be compared to. * @return <code>true</code> if <code>o</code> is the instance of * <code>TabSet</code>, has the same number of <code>TabStop</code>s * and they are all equal, <code>false</code> otherwise. * * @since 1.5 */ public boolean equals(Object o) { if (o == this) { return true; } if (o instanceof TabSet) { TabSet ts = (TabSet) o; int count = getTabCount(); if (ts.getTabCount() != count) { return false; } for (int i=0; i < count; i++) { TabStop ts1 = getTab(i); TabStop ts2 = ts.getTab(i); if ((ts1 == null && ts2 != null) || (ts1 != null && !getTab(i).equals(ts.getTab(i)))) { return false; } } return true; } return false; } /** * Returns a hashcode for this set of TabStops. * @return a hashcode value for this set of TabStops. * * @since 1.5 */ public int hashCode() { if (hashCode == Integer.MAX_VALUE) { hashCode = 0; int len = getTabCount(); for (int i = 0; i < len; i++) { TabStop ts = getTab(i); hashCode ^= ts != null ? getTab(i).hashCode() : 0; } if (hashCode == Integer.MAX_VALUE) { hashCode -= 1; } } return hashCode; } /** * Returns the string representation of the set of tabs. */ public String toString() { int tabCount = getTabCount(); StringBuilder buffer = new StringBuilder("[ "); for(int counter = 0; counter < tabCount; counter++) { if(counter > 0) buffer.append(" - "); buffer.append(getTab(counter).toString()); } buffer.append(" ]"); return buffer.toString(); } }
/* * #%L * ImageJ software for multidimensional image processing and analysis. * %% * Copyright (C) 2014 - 2016 Board of Regents of the University of * Wisconsin-Madison, University of Konstanz and Brian Northan. * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package net.imagej.ops.image; import net.imagej.ops.AbstractNamespace; import net.imagej.ops.Namespace; import net.imagej.ops.OpMethod; import net.imagej.ops.Ops; import net.imagej.ops.image.cooccurrenceMatrix.MatrixOrientation; import net.imglib2.IterableInterval; import net.imglib2.RandomAccessibleInterval; import net.imglib2.histogram.Histogram1d; import net.imglib2.type.BooleanType; import net.imglib2.type.Type; import net.imglib2.type.numeric.RealType; import org.scijava.plugin.Plugin; /** * The image namespace contains operations relating to images. * * @author Curtis Rueden */ @Plugin(type = Namespace.class) public class ImageNamespace extends AbstractNamespace { // -- ascii -- /** Executes the "ascii" operation on the given arguments. */ @OpMethod(op = net.imagej.ops.image.ascii.DefaultASCII.class) public <T extends RealType<T>> String ascii(final IterableInterval<T> image) { final String result = (String) ops().run( net.imagej.ops.Ops.Image.ASCII.class, image); return result; } /** Executes the "ascii" operation on the given arguments. */ @OpMethod(op = net.imagej.ops.image.ascii.DefaultASCII.class) public <T extends RealType<T>> String ascii(final IterableInterval<T> image, final T min) { final String result = (String) ops().run( net.imagej.ops.Ops.Image.ASCII.class, image, min); return result; } /** Executes the "ascii" operation on the given arguments. */ @OpMethod(op = net.imagej.ops.image.ascii.DefaultASCII.class) public <T extends RealType<T>> String ascii(final IterableInterval<T> image, final T min, final T max) { final String result = (String) ops().run( net.imagej.ops.Ops.Image.ASCII.class, image, min, max); return result; } // -- cooccurrence matrix -- @OpMethod(ops = { net.imagej.ops.image.cooccurrenceMatrix.CooccurrenceMatrix3D.class, net.imagej.ops.image.cooccurrenceMatrix.CooccurrenceMatrix2D.class }) public <T extends RealType<T>> double[][] cooccurrenceMatrix( final IterableInterval<T> in, final int nrGreyLevels, final int distance, final MatrixOrientation orientation) { final double[][] result = (double[][]) ops().run( Ops.Image.CooccurrenceMatrix.class, in, nrGreyLevels, distance, orientation); return result; } // -- distance transform -- /** Executes the "distancetransform" operation on the given arguments. */ @OpMethod(ops = { net.imagej.ops.image.distancetransform.DefaultDistanceTransform.class, net.imagej.ops.image.distancetransform.DistanceTransform2D.class, net.imagej.ops.image.distancetransform.DistanceTransform3D.class }) public <B extends BooleanType<B>, T extends RealType<T>> RandomAccessibleInterval<T> distancetransform( final RandomAccessibleInterval<B> in, final RandomAccessibleInterval<T> out) { @SuppressWarnings("unchecked") final RandomAccessibleInterval<T> result = (RandomAccessibleInterval<T>) ops() .run(Ops.Image.DistanceTransform.class, in, out); return result; } /** Executes the "distancetransform" operation on the given arguments. */ @OpMethod(ops = { net.imagej.ops.image.distancetransform.DefaultDistanceTransform.class, net.imagej.ops.image.distancetransform.DistanceTransform2D.class, net.imagej.ops.image.distancetransform.DistanceTransform3D.class }) public <B extends BooleanType<B>, T extends RealType<T>> RandomAccessibleInterval<T> distancetransform( final RandomAccessibleInterval<B> in) { @SuppressWarnings("unchecked") final RandomAccessibleInterval<T> result = (RandomAccessibleInterval<T>) ops() .run(Ops.Image.DistanceTransform.class, in); return result; } // -- equation -- /** Executes the "equation" operation on the given arguments. */ @OpMethod(op = net.imagej.ops.image.equation.DefaultEquation.class) public <T extends RealType<T>> IterableInterval<T> equation(final String in) { @SuppressWarnings("unchecked") final IterableInterval<T> result = (IterableInterval<T>) ops().run( net.imagej.ops.Ops.Image.Equation.class, in); return result; } /** Executes the "equation" operation on the given arguments. */ @OpMethod(op = net.imagej.ops.image.equation.DefaultEquation.class) public <T extends RealType<T>> IterableInterval<T> equation( final IterableInterval<T> out, final String in) { @SuppressWarnings("unchecked") final IterableInterval<T> result = (IterableInterval<T>) ops().run( net.imagej.ops.Ops.Image.Equation.class, out, in); return result; } // -- fill -- /** Executes the "fill" operation on the given arguments. */ @OpMethod(op = net.imagej.ops.image.fill.DefaultFill.class) public <T extends Type<T>> Iterable<T> fill(final Iterable<T> out, final T in) { @SuppressWarnings("unchecked") final Iterable<T> result = (Iterable<T>) ops().run( net.imagej.ops.Ops.Image.Fill.class, out, in); return result; } // -- histogram -- /** Executes the "histogram" operation on the given arguments. */ @OpMethod(op = net.imagej.ops.image.histogram.HistogramCreate.class) public <T extends RealType<T>> Histogram1d<T> histogram(final Iterable<T> in) { @SuppressWarnings("unchecked") final Histogram1d<T> result = (Histogram1d<T>) ops().run( net.imagej.ops.Ops.Image.Histogram.class, in); return result; } /** Executes the "histogram" operation on the given arguments. */ @OpMethod(op = net.imagej.ops.image.histogram.HistogramCreate.class) public <T extends RealType<T>> Histogram1d<T> histogram( final Iterable<T> in, final int numBins) { @SuppressWarnings("unchecked") final Histogram1d<T> result = (Histogram1d<T>) ops().run( net.imagej.ops.Ops.Image.Histogram.class, in, numBins); return result; } //-- integral -- @SuppressWarnings({ "unchecked", "rawtypes" }) @OpMethod(op = net.imagej.ops.image.integral.DefaultIntegralImg.class) public <T extends RealType<T>> RandomAccessibleInterval<RealType> integral( final RandomAccessibleInterval<RealType> out, final RandomAccessibleInterval<T> in) { final RandomAccessibleInterval<RealType> result = (RandomAccessibleInterval) ops().run( net.imagej.ops.image.integral.DefaultIntegralImg.class, out, in); return result; } @SuppressWarnings({ "unchecked", "rawtypes" }) @OpMethod(ops = { net.imagej.ops.image.integral.DefaultIntegralImg.class, net.imagej.ops.image.integral.WrappedIntegralImg.class }) public <T extends RealType<T>> RandomAccessibleInterval<RealType> integral( final RandomAccessibleInterval<T> in) { final RandomAccessibleInterval<RealType> result = (RandomAccessibleInterval) ops().run( Ops.Image.Integral.class, in); return result; } @SuppressWarnings({ "unchecked", "rawtypes" }) @OpMethod(op = net.imagej.ops.image.integral.SquareIntegralImg.class) public <T extends RealType<T>> RandomAccessibleInterval<RealType> squareIntegral(final RandomAccessibleInterval<RealType> out, final RandomAccessibleInterval<T> in) { final RandomAccessibleInterval<RealType> result = (RandomAccessibleInterval) ops().run(Ops.Image.SquareIntegral.class, out, in); return result; } @SuppressWarnings({ "unchecked", "rawtypes" }) @OpMethod(op = net.imagej.ops.image.integral.SquareIntegralImg.class) public <T extends RealType<T>> RandomAccessibleInterval<RealType> squareIntegral(final RandomAccessibleInterval<T> in) { final RandomAccessibleInterval<RealType> result = (RandomAccessibleInterval) ops().run(Ops.Image.SquareIntegral.class, in); return result; } // -- invert -- /** Executes the "invert" operation on the given arguments. */ @OpMethod(op = net.imagej.ops.image.invert.InvertII.class) public <I extends RealType<I>, O extends RealType<O>> IterableInterval<O> invert( final IterableInterval<O> out, final IterableInterval<I> in) { @SuppressWarnings("unchecked") final IterableInterval<O> result = (IterableInterval<O>) ops().run( net.imagej.ops.Ops.Image.Invert.class, out, in); return result; } // -- normalize -- @OpMethod(op = net.imagej.ops.image.normalize.NormalizeIIComputer.class) public <T extends RealType<T>> IterableInterval<T> normalize( final IterableInterval<T> out, final IterableInterval<T> in) { @SuppressWarnings("unchecked") final IterableInterval<T> result = (IterableInterval<T>) ops() .run(net.imagej.ops.Ops.Image.Normalize.class, out, in); return result; } @OpMethod(op = net.imagej.ops.image.normalize.NormalizeIIComputer.class) public <T extends RealType<T>> IterableInterval<T> normalize( final IterableInterval<T> out, final IterableInterval<T> in, final T sourceMin) { @SuppressWarnings("unchecked") final IterableInterval<T> result = (IterableInterval<T>) ops().run( net.imagej.ops.Ops.Image.Normalize.class, out, in, sourceMin); return result; } @OpMethod(op = net.imagej.ops.image.normalize.NormalizeIIComputer.class) public <T extends RealType<T>> IterableInterval<T> normalize( final IterableInterval<T> out, final IterableInterval<T> in, final T sourceMin, final T sourceMax) { @SuppressWarnings("unchecked") final IterableInterval<T> result = (IterableInterval<T>) ops().run( net.imagej.ops.Ops.Image.Normalize.class, out, in, sourceMin, sourceMax); return result; } @OpMethod(op = net.imagej.ops.image.normalize.NormalizeIIComputer.class) public <T extends RealType<T>> IterableInterval<T> normalize( final IterableInterval<T> out, final IterableInterval<T> in, final T sourceMin, final T sourceMax, final T targetMin) { @SuppressWarnings("unchecked") final IterableInterval<T> result = (IterableInterval<T>) ops().run( net.imagej.ops.Ops.Image.Normalize.class, out, in, sourceMin, sourceMax, targetMin); return result; } @OpMethod(op = net.imagej.ops.image.normalize.NormalizeIIComputer.class) public <T extends RealType<T>> IterableInterval<T> normalize(final IterableInterval<T> out, final IterableInterval<T> in, final T sourceMin, final T sourceMax, final T targetMin, final T targetMax) { @SuppressWarnings("unchecked") final IterableInterval<T> result = (IterableInterval<T>) ops().run( net.imagej.ops.Ops.Image.Normalize.class, out, in, sourceMin, sourceMax, targetMin, targetMax); return result; } @OpMethod( op = net.imagej.ops.image.normalize.NormalizeIIFunction.class) public <T extends RealType<T>> IterableInterval<T> normalize( final IterableInterval<T> in) { @SuppressWarnings("unchecked") final IterableInterval<T> result = (IterableInterval<T>) ops().run( net.imagej.ops.Ops.Image.Normalize.class, in); return result; } @OpMethod( op = net.imagej.ops.image.normalize.NormalizeIIFunction.class) public <T extends RealType<T>> IterableInterval<T> normalize( final IterableInterval<T> in, final T sourceMin) { @SuppressWarnings("unchecked") final IterableInterval<T> result = (IterableInterval<T>) ops().run( net.imagej.ops.Ops.Image.Normalize.class, in, sourceMin); return result; } @OpMethod( op = net.imagej.ops.image.normalize.NormalizeIIFunction.class) public <T extends RealType<T>> IterableInterval<T> normalize( final IterableInterval<T> in, final T sourceMin, final T sourceMax) { @SuppressWarnings("unchecked") final IterableInterval<T> result = (IterableInterval<T>) ops().run( net.imagej.ops.Ops.Image.Normalize.class, in, sourceMin, sourceMax); return result; } @OpMethod( op = net.imagej.ops.image.normalize.NormalizeIIFunction.class) public <T extends RealType<T>> IterableInterval<T> normalize( final IterableInterval<T> in, final T sourceMin, final T sourceMax, final T targetMin) { @SuppressWarnings("unchecked") final IterableInterval<T> result = (IterableInterval<T>) ops().run( net.imagej.ops.Ops.Image.Normalize.class, in, sourceMin, sourceMax, targetMin); return result; } @OpMethod( op = net.imagej.ops.image.normalize.NormalizeIIFunction.class) public <T extends RealType<T>> IterableInterval<T> normalize( final IterableInterval<T> in, final T sourceMin, final T sourceMax, final T targetMin, final T targetMax) { @SuppressWarnings("unchecked") final IterableInterval<T> result = (IterableInterval<T>) ops().run( net.imagej.ops.Ops.Image.Normalize.class, in, sourceMin, sourceMax, targetMin, targetMax); return result; } @OpMethod( op = net.imagej.ops.image.normalize.NormalizeIIFunction.class) public <T extends RealType<T>> IterableInterval<T> normalize( final IterableInterval<T> in, final T sourceMin, final T sourceMax, final T targetMin, final T targetMax, final boolean isLazy) { @SuppressWarnings("unchecked") final IterableInterval<T> result = (IterableInterval<T>) ops().run( net.imagej.ops.Ops.Image.Normalize.class, in, sourceMin, sourceMax, targetMin, targetMax, isLazy); return result; } @Override public String getName() { return "image"; } }
// ParserAdapter.java - adapt a SAX1 Parser to a SAX2 XMLReader. // http://www.saxproject.org // Written by David Megginson // NO WARRANTY! This class is in the public domain. // $Id$ package org.xml.sax.helpers; import java.io.IOException; import java.util.Enumeration; import java.util.Vector; import org.xml.sax.Parser; // deprecated import org.xml.sax.InputSource; import org.xml.sax.Locator; import org.xml.sax.AttributeList; // deprecated import org.xml.sax.EntityResolver; import org.xml.sax.DTDHandler; import org.xml.sax.DocumentHandler; // deprecated import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXNotSupportedException; /** * Adapt a SAX1 Parser as a SAX2 XMLReader. * * <blockquote> * <em>This module, both source code and documentation, is in the * Public Domain, and comes with <strong>NO WARRANTY</strong>.</em> * See <a href='http://www.saxproject.org'>http://www.saxproject.org</a> * for further information. * </blockquote> * * <p>This class wraps a SAX1 {@link org.xml.sax.Parser Parser} * and makes it act as a SAX2 {@link org.xml.sax.XMLReader XMLReader}, * with feature, property, and Namespace support. Note * that it is not possible to report {@link org.xml.sax.ContentHandler#skippedEntity * skippedEntity} events, since SAX1 does not make that information available.</p> * * <p>This adapter does not test for duplicate Namespace-qualified * attribute names.</p> * * @since SAX 2.0 * @author David Megginson * @version 2.0.1 (sax2r2) * @see org.xml.sax.helpers.XMLReaderAdapter * @see org.xml.sax.XMLReader * @see org.xml.sax.Parser */ public class ParserAdapter implements XMLReader, DocumentHandler { //////////////////////////////////////////////////////////////////// // Constructors. //////////////////////////////////////////////////////////////////// /** * Construct a new parser adapter. * * <p>Use the "org.xml.sax.parser" property to locate the * embedded SAX1 driver.</p> * * @exception SAXException If the embedded driver * cannot be instantiated or if the * org.xml.sax.parser property is not specified. */ public ParserAdapter () throws SAXException { super(); String driver = System.getProperty("org.xml.sax.parser"); try { setup(ParserFactory.makeParser()); } catch (ClassNotFoundException e1) { throw new SAXException("Cannot find SAX1 driver class " + driver, e1); } catch (IllegalAccessException e2) { throw new SAXException("SAX1 driver class " + driver + " found but cannot be loaded", e2); } catch (InstantiationException e3) { throw new SAXException("SAX1 driver class " + driver + " loaded but cannot be instantiated", e3); } catch (ClassCastException e4) { throw new SAXException("SAX1 driver class " + driver + " does not implement org.xml.sax.Parser"); } catch (NullPointerException e5) { throw new SAXException("System property org.xml.sax.parser not specified"); } } /** * Construct a new parser adapter. * * <p>Note that the embedded parser cannot be changed once the * adapter is created; to embed a different parser, allocate * a new ParserAdapter.</p> * * @param parser The SAX1 parser to embed. * @exception java.lang.NullPointerException If the parser parameter * is null. */ public ParserAdapter (Parser parser) { super(); setup(parser); } /** * Internal setup method. * * @param parser The embedded parser. * @exception java.lang.NullPointerException If the parser parameter * is null. */ private void setup (Parser parser) { if (parser == null) { throw new NullPointerException("Parser argument must not be null"); } this.parser = parser; atts = new AttributesImpl(); nsSupport = new NamespaceSupport(); attAdapter = new AttributeListAdapter(); } //////////////////////////////////////////////////////////////////// // Implementation of org.xml.sax.XMLReader. //////////////////////////////////////////////////////////////////// // // Internal constants for the sake of convenience. // private final static String FEATURES = "http://xml.org/sax/features/"; private final static String NAMESPACES = FEATURES + "namespaces"; private final static String NAMESPACE_PREFIXES = FEATURES + "namespace-prefixes"; private final static String XMLNS_URIs = FEATURES + "xmlns-uris"; /** * Set a feature flag for the parser. * * <p>The only features recognized are namespaces and * namespace-prefixes.</p> * * @param name The feature name, as a complete URI. * @param value The requested feature value. * @exception SAXNotRecognizedException If the feature * can't be assigned or retrieved. * @exception SAXNotSupportedException If the feature * can't be assigned that value. * @see org.xml.sax.XMLReader#setFeature */ public void setFeature (String name, boolean value) throws SAXNotRecognizedException, SAXNotSupportedException { if (name.equals(NAMESPACES)) { checkNotParsing("feature", name); namespaces = value; if (!namespaces && !prefixes) { prefixes = true; } } else if (name.equals(NAMESPACE_PREFIXES)) { checkNotParsing("feature", name); prefixes = value; if (!prefixes && !namespaces) { namespaces = true; } } else if (name.equals(XMLNS_URIs)) { checkNotParsing("feature", name); uris = value; } else { throw new SAXNotRecognizedException("Feature: " + name); } } /** * Check a parser feature flag. * * <p>The only features recognized are namespaces and * namespace-prefixes.</p> * * @param name The feature name, as a complete URI. * @return The current feature value. * @exception SAXNotRecognizedException If the feature * value can't be assigned or retrieved. * @exception SAXNotSupportedException If the * feature is not currently readable. * @see org.xml.sax.XMLReader#setFeature */ public boolean getFeature (String name) throws SAXNotRecognizedException, SAXNotSupportedException { if (name.equals(NAMESPACES)) { return namespaces; } else if (name.equals(NAMESPACE_PREFIXES)) { return prefixes; } else if (name.equals(XMLNS_URIs)) { return uris; } else { throw new SAXNotRecognizedException("Feature: " + name); } } /** * Set a parser property. * * <p>No properties are currently recognized.</p> * * @param name The property name. * @param value The property value. * @exception SAXNotRecognizedException If the property * value can't be assigned or retrieved. * @exception SAXNotSupportedException If the property * can't be assigned that value. * @see org.xml.sax.XMLReader#setProperty */ public void setProperty (String name, Object value) throws SAXNotRecognizedException, SAXNotSupportedException { throw new SAXNotRecognizedException("Property: " + name); } /** * Get a parser property. * * <p>No properties are currently recognized.</p> * * @param name The property name. * @return The property value. * @exception SAXNotRecognizedException If the property * value can't be assigned or retrieved. * @exception SAXNotSupportedException If the property * value is not currently readable. * @see org.xml.sax.XMLReader#getProperty */ public Object getProperty (String name) throws SAXNotRecognizedException, SAXNotSupportedException { throw new SAXNotRecognizedException("Property: " + name); } /** * Set the entity resolver. * * @param resolver The new entity resolver. * @see org.xml.sax.XMLReader#setEntityResolver */ public void setEntityResolver (EntityResolver resolver) { entityResolver = resolver; } /** * Return the current entity resolver. * * @return The current entity resolver, or null if none was supplied. * @see org.xml.sax.XMLReader#getEntityResolver */ public EntityResolver getEntityResolver () { return entityResolver; } /** * Set the DTD handler. * * @param handler the new DTD handler * @see org.xml.sax.XMLReader#setEntityResolver */ public void setDTDHandler (DTDHandler handler) { dtdHandler = handler; } /** * Return the current DTD handler. * * @return the current DTD handler, or null if none was supplied * @see org.xml.sax.XMLReader#getEntityResolver */ public DTDHandler getDTDHandler () { return dtdHandler; } /** * Set the content handler. * * @param handler the new content handler * @see org.xml.sax.XMLReader#setEntityResolver */ public void setContentHandler (ContentHandler handler) { contentHandler = handler; } /** * Return the current content handler. * * @return The current content handler, or null if none was supplied. * @see org.xml.sax.XMLReader#getEntityResolver */ public ContentHandler getContentHandler () { return contentHandler; } /** * Set the error handler. * * @param handler The new error handler. * @see org.xml.sax.XMLReader#setEntityResolver */ public void setErrorHandler (ErrorHandler handler) { errorHandler = handler; } /** * Return the current error handler. * * @return The current error handler, or null if none was supplied. * @see org.xml.sax.XMLReader#getEntityResolver */ public ErrorHandler getErrorHandler () { return errorHandler; } /** * Parse an XML document. * * @param systemId The absolute URL of the document. * @exception java.io.IOException If there is a problem reading * the raw content of the document. * @exception SAXException If there is a problem * processing the document. * @see #parse(org.xml.sax.InputSource) * @see org.xml.sax.Parser#parse(java.lang.String) */ public void parse (String systemId) throws IOException, SAXException { parse(new InputSource(systemId)); } /** * Parse an XML document. * * @param input An input source for the document. * @exception java.io.IOException If there is a problem reading * the raw content of the document. * @exception SAXException If there is a problem * processing the document. * @see #parse(java.lang.String) * @see org.xml.sax.Parser#parse(org.xml.sax.InputSource) */ public void parse (InputSource input) throws IOException, SAXException { if (parsing) { throw new SAXException("Parser is already in use"); } setupParser(); parsing = true; try { parser.parse(input); } finally { parsing = false; } parsing = false; } //////////////////////////////////////////////////////////////////// // Implementation of org.xml.sax.DocumentHandler. //////////////////////////////////////////////////////////////////// /** * Adapter implementation method; do not call. * Adapt a SAX1 document locator event. * * @param locator A document locator. * @see org.xml.sax.ContentHandler#setDocumentLocator */ public void setDocumentLocator (Locator locator) { this.locator = locator; if (contentHandler != null) { contentHandler.setDocumentLocator(locator); } } /** * Adapter implementation method; do not call. * Adapt a SAX1 start document event. * * @exception SAXException The client may raise a * processing exception. * @see org.xml.sax.DocumentHandler#startDocument */ public void startDocument () throws SAXException { if (contentHandler != null) { contentHandler.startDocument(); } } /** * Adapter implementation method; do not call. * Adapt a SAX1 end document event. * * @exception SAXException The client may raise a * processing exception. * @see org.xml.sax.DocumentHandler#endDocument */ public void endDocument () throws SAXException { if (contentHandler != null) { contentHandler.endDocument(); } } /** * Adapter implementation method; do not call. * Adapt a SAX1 startElement event. * * <p>If necessary, perform Namespace processing.</p> * * @param qName The qualified (prefixed) name. * @param qAtts The XML attribute list (with qnames). * @exception SAXException The client may raise a * processing exception. */ public void startElement (String qName, AttributeList qAtts) throws SAXException { // These are exceptions from the // first pass; they should be // ignored if there's a second pass, // but reported otherwise. Vector exceptions = null; // If we're not doing Namespace // processing, dispatch this quickly. if (!namespaces) { if (contentHandler != null) { attAdapter.setAttributeList(qAtts); contentHandler.startElement("", "", qName.intern(), attAdapter); } return; } // OK, we're doing Namespace processing. nsSupport.pushContext(); int length = qAtts.getLength(); // First pass: handle NS decls for (int i = 0; i < length; i++) { String attQName = qAtts.getName(i); if (!attQName.startsWith("xmlns")) continue; // Could be a declaration... String prefix; int n = attQName.indexOf(':'); // xmlns=... if (n == -1 && attQName.length () == 5) { prefix = ""; } else if (n != 5) { // XML namespaces spec doesn't discuss "xmlnsf:oo" // (and similarly named) attributes ... at most, warn continue; } else // xmlns:foo=... prefix = attQName.substring(n+1); String value = qAtts.getValue(i); if (!nsSupport.declarePrefix(prefix, value)) { reportError("Illegal Namespace prefix: " + prefix); continue; } if (contentHandler != null) contentHandler.startPrefixMapping(prefix, value); } // Second pass: copy all relevant // attributes into the SAX2 AttributeList // using updated prefix bindings atts.clear(); for (int i = 0; i < length; i++) { String attQName = qAtts.getName(i); String type = qAtts.getType(i); String value = qAtts.getValue(i); // Declaration? if (attQName.startsWith("xmlns")) { String prefix; int n = attQName.indexOf(':'); if (n == -1 && attQName.length () == 5) { prefix = ""; } else if (n != 5) { // XML namespaces spec doesn't discuss "xmlnsf:oo" // (and similarly named) attributes ... ignore prefix = null; } else { prefix = attQName.substring(6); } // Yes, decl: report or prune if (prefix != null) { if (prefixes) { if (uris) // note funky case: localname can be null // when declaring the default prefix, and // yet the uri isn't null. atts.addAttribute (nsSupport.XMLNS, prefix, attQName.intern(), type, value); else atts.addAttribute ("", "", attQName.intern(), type, value); } continue; } } // Not a declaration -- report try { String attName[] = processName(attQName, true, true); atts.addAttribute(attName[0], attName[1], attName[2], type, value); } catch (SAXException e) { if (exceptions == null) exceptions = new Vector(); exceptions.addElement(e); atts.addAttribute("", attQName, attQName, type, value); } } // now handle the deferred exception reports if (exceptions != null && errorHandler != null) { for (int i = 0; i < exceptions.size(); i++) errorHandler.error((SAXParseException) (exceptions.elementAt(i))); } // OK, finally report the event. if (contentHandler != null) { String name[] = processName(qName, false, false); contentHandler.startElement(name[0], name[1], name[2], atts); } } /** * Adapter implementation method; do not call. * Adapt a SAX1 end element event. * * @param qName The qualified (prefixed) name. * @exception SAXException The client may raise a * processing exception. * @see org.xml.sax.DocumentHandler#endElement */ public void endElement (String qName) throws SAXException { // If we're not doing Namespace // processing, dispatch this quickly. if (!namespaces) { if (contentHandler != null) { contentHandler.endElement("", "", qName.intern()); } return; } // Split the name. String names[] = processName(qName, false, false); if (contentHandler != null) { contentHandler.endElement(names[0], names[1], names[2]); Enumeration prefixes = nsSupport.getDeclaredPrefixes(); while (prefixes.hasMoreElements()) { String prefix = (String)prefixes.nextElement(); contentHandler.endPrefixMapping(prefix); } } nsSupport.popContext(); } /** * Adapter implementation method; do not call. * Adapt a SAX1 characters event. * * @param ch An array of characters. * @param start The starting position in the array. * @param length The number of characters to use. * @exception SAXException The client may raise a * processing exception. * @see org.xml.sax.DocumentHandler#characters */ public void characters (char ch[], int start, int length) throws SAXException { if (contentHandler != null) { contentHandler.characters(ch, start, length); } } /** * Adapter implementation method; do not call. * Adapt a SAX1 ignorable whitespace event. * * @param ch An array of characters. * @param start The starting position in the array. * @param length The number of characters to use. * @exception SAXException The client may raise a * processing exception. * @see org.xml.sax.DocumentHandler#ignorableWhitespace */ public void ignorableWhitespace (char ch[], int start, int length) throws SAXException { if (contentHandler != null) { contentHandler.ignorableWhitespace(ch, start, length); } } /** * Adapter implementation method; do not call. * Adapt a SAX1 processing instruction event. * * @param target The processing instruction target. * @param data The remainder of the processing instruction * @exception SAXException The client may raise a * processing exception. * @see org.xml.sax.DocumentHandler#processingInstruction */ public void processingInstruction (String target, String data) throws SAXException { if (contentHandler != null) { contentHandler.processingInstruction(target, data); } } //////////////////////////////////////////////////////////////////// // Internal utility methods. //////////////////////////////////////////////////////////////////// /** * Initialize the parser before each run. */ private void setupParser () { // catch an illegal "nonsense" state. if (!prefixes && !namespaces) throw new IllegalStateException (); nsSupport.reset(); if (uris) nsSupport.setNamespaceDeclUris (true); if (entityResolver != null) { parser.setEntityResolver(entityResolver); } if (dtdHandler != null) { parser.setDTDHandler(dtdHandler); } if (errorHandler != null) { parser.setErrorHandler(errorHandler); } parser.setDocumentHandler(this); locator = null; } /** * Process a qualified (prefixed) name. * * <p>If the name has an undeclared prefix, use only the qname * and make an ErrorHandler.error callback in case the app is * interested.</p> * * @param qName The qualified (prefixed) name. * @param isAttribute true if this is an attribute name. * @return The name split into three parts. * @exception SAXException The client may throw * an exception if there is an error callback. */ private String [] processName (String qName, boolean isAttribute, boolean useException) throws SAXException { String parts[] = nsSupport.processName(qName, nameParts, isAttribute); if (parts == null) { if (useException) throw makeException("Undeclared prefix: " + qName); reportError("Undeclared prefix: " + qName); parts = new String[3]; parts[0] = parts[1] = ""; parts[2] = qName.intern(); } return parts; } /** * Report a non-fatal error. * * @param message The error message. * @exception SAXException The client may throw * an exception. */ void reportError (String message) throws SAXException { if (errorHandler != null) errorHandler.error(makeException(message)); } /** * Construct an exception for the current context. * * @param message The error message. */ private SAXParseException makeException (String message) { if (locator != null) { return new SAXParseException(message, locator); } else { return new SAXParseException(message, null, null, -1, -1); } } /** * Throw an exception if we are parsing. * * <p>Use this method to detect illegal feature or * property changes.</p> * * @param type The type of thing (feature or property). * @param name The feature or property name. * @exception SAXNotSupportedException If a * document is currently being parsed. */ private void checkNotParsing (String type, String name) throws SAXNotSupportedException { if (parsing) { throw new SAXNotSupportedException("Cannot change " + type + ' ' + name + " while parsing"); } } //////////////////////////////////////////////////////////////////// // Internal state. //////////////////////////////////////////////////////////////////// private NamespaceSupport nsSupport; private AttributeListAdapter attAdapter; private boolean parsing = false; private String nameParts[] = new String[3]; private Parser parser = null; private AttributesImpl atts = null; // Features private boolean namespaces = true; private boolean prefixes = false; private boolean uris = false; // Properties // Handlers Locator locator; EntityResolver entityResolver = null; DTDHandler dtdHandler = null; ContentHandler contentHandler = null; ErrorHandler errorHandler = null; //////////////////////////////////////////////////////////////////// // Inner class to wrap an AttributeList when not doing NS proc. //////////////////////////////////////////////////////////////////// /** * Adapt a SAX1 AttributeList as a SAX2 Attributes object. * * <p>This class is in the Public Domain, and comes with NO * WARRANTY of any kind.</p> * * <p>This wrapper class is used only when Namespace support * is disabled -- it provides pretty much a direct mapping * from SAX1 to SAX2, except that names and types are * interned whenever requested.</p> */ final class AttributeListAdapter implements Attributes { /** * Construct a new adapter. */ AttributeListAdapter () { } /** * Set the embedded AttributeList. * * <p>This method must be invoked before any of the others * can be used.</p> * * @param The SAX1 attribute list (with qnames). */ void setAttributeList (AttributeList qAtts) { this.qAtts = qAtts; } /** * Return the length of the attribute list. * * @return The number of attributes in the list. * @see org.xml.sax.Attributes#getLength */ public int getLength () { return qAtts.getLength(); } /** * Return the Namespace URI of the specified attribute. * * @param The attribute's index. * @return Always the empty string. * @see org.xml.sax.Attributes#getURI */ public String getURI (int i) { return ""; } /** * Return the local name of the specified attribute. * * @param The attribute's index. * @return Always the empty string. * @see org.xml.sax.Attributes#getLocalName */ public String getLocalName (int i) { return ""; } /** * Return the qualified (prefixed) name of the specified attribute. * * @param The attribute's index. * @return The attribute's qualified name, internalized. */ public String getQName (int i) { return qAtts.getName(i).intern(); } /** * Return the type of the specified attribute. * * @param The attribute's index. * @return The attribute's type as an internalized string. */ public String getType (int i) { return qAtts.getType(i).intern(); } /** * Return the value of the specified attribute. * * @param The attribute's index. * @return The attribute's value. */ public String getValue (int i) { return qAtts.getValue(i); } /** * Look up an attribute index by Namespace name. * * @param uri The Namespace URI or the empty string. * @param localName The local name. * @return The attributes index, or -1 if none was found. * @see org.xml.sax.Attributes#getIndex(java.lang.String,java.lang.String) */ public int getIndex (String uri, String localName) { return -1; } /** * Look up an attribute index by qualified (prefixed) name. * * @param qName The qualified name. * @return The attributes index, or -1 if none was found. * @see org.xml.sax.Attributes#getIndex(java.lang.String) */ public int getIndex (String qName) { int max = atts.getLength(); for (int i = 0; i < max; i++) { if (qAtts.getName(i).equals(qName)) { return i; } } return -1; } /** * Look up the type of an attribute by Namespace name. * * @param uri The Namespace URI * @param localName The local name. * @return The attribute's type as an internalized string. */ public String getType (String uri, String localName) { return null; } /** * Look up the type of an attribute by qualified (prefixed) name. * * @param qName The qualified name. * @return The attribute's type as an internalized string. */ public String getType (String qName) { return qAtts.getType(qName).intern(); } /** * Look up the value of an attribute by Namespace name. * * @param uri The Namespace URI * @param localName The local name. * @return The attribute's value. */ public String getValue (String uri, String localName) { return null; } /** * Look up the value of an attribute by qualified (prefixed) name. * * @param qName The qualified name. * @return The attribute's value. */ public String getValue (String qName) { return qAtts.getValue(qName); } private AttributeList qAtts; } } // end of ParserAdapter.java
/* * Copyright (C) 2017 The Dagger Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dagger.internal.codegen.binding; import static androidx.room.compiler.processing.XElementKt.isMethod; import static androidx.room.compiler.processing.XElementKt.isTypeElement; import static androidx.room.compiler.processing.XElementKt.isVariableElement; import static androidx.room.compiler.processing.compat.XConverters.toJavac; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Iterables.getOnlyElement; import static dagger.internal.codegen.binding.ComponentDescriptor.isComponentProductionMethod; import static dagger.internal.codegen.binding.ConfigurationAnnotations.getNullableType; import static dagger.internal.codegen.binding.MapKeys.getMapKey; import static dagger.internal.codegen.extension.DaggerStreams.toImmutableSet; import static dagger.internal.codegen.xprocessing.XElements.asMethod; import static dagger.internal.codegen.xprocessing.XElements.asTypeElement; import static dagger.internal.codegen.xprocessing.XElements.asVariable; import static dagger.internal.codegen.xprocessing.XTypes.isDeclared; import static dagger.spi.model.BindingKind.ASSISTED_FACTORY; import static dagger.spi.model.BindingKind.ASSISTED_INJECTION; import static dagger.spi.model.BindingKind.BOUND_INSTANCE; import static dagger.spi.model.BindingKind.COMPONENT; import static dagger.spi.model.BindingKind.COMPONENT_DEPENDENCY; import static dagger.spi.model.BindingKind.COMPONENT_PRODUCTION; import static dagger.spi.model.BindingKind.COMPONENT_PROVISION; import static dagger.spi.model.BindingKind.DELEGATE; import static dagger.spi.model.BindingKind.INJECTION; import static dagger.spi.model.BindingKind.MEMBERS_INJECTOR; import static dagger.spi.model.BindingKind.OPTIONAL; import static dagger.spi.model.BindingKind.PRODUCTION; import static dagger.spi.model.BindingKind.PROVISION; import static dagger.spi.model.BindingKind.SUBCOMPONENT_CREATOR; import androidx.room.compiler.processing.XConstructorElement; import androidx.room.compiler.processing.XConstructorType; import androidx.room.compiler.processing.XElement; import androidx.room.compiler.processing.XExecutableParameterElement; import androidx.room.compiler.processing.XMethodElement; import androidx.room.compiler.processing.XMethodType; import androidx.room.compiler.processing.XType; import androidx.room.compiler.processing.XTypeElement; import androidx.room.compiler.processing.XVariableElement; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.squareup.javapoet.ClassName; import dagger.Module; import dagger.internal.codegen.base.ContributionType; import dagger.internal.codegen.base.MapType; import dagger.internal.codegen.base.SetType; import dagger.internal.codegen.binding.MembersInjectionBinding.InjectionSite; import dagger.internal.codegen.binding.ProductionBinding.ProductionKind; import dagger.internal.codegen.javapoet.TypeNames; import dagger.internal.codegen.langmodel.DaggerTypes; import dagger.spi.model.BindingKind; import dagger.spi.model.DaggerAnnotation; import dagger.spi.model.DaggerType; import dagger.spi.model.DependencyRequest; import dagger.spi.model.Key; import dagger.spi.model.RequestKind; import java.util.Optional; import java.util.function.BiFunction; import javax.inject.Inject; /** A factory for {@link Binding} objects. */ public final class BindingFactory { private final DaggerTypes types; private final KeyFactory keyFactory; private final DependencyRequestFactory dependencyRequestFactory; private final InjectionSiteFactory injectionSiteFactory; private final InjectionAnnotations injectionAnnotations; @Inject BindingFactory( DaggerTypes types, KeyFactory keyFactory, DependencyRequestFactory dependencyRequestFactory, InjectionSiteFactory injectionSiteFactory, InjectionAnnotations injectionAnnotations) { this.types = types; this.keyFactory = keyFactory; this.dependencyRequestFactory = dependencyRequestFactory; this.injectionSiteFactory = injectionSiteFactory; this.injectionAnnotations = injectionAnnotations; } /** * Returns an {@link dagger.spi.model.BindingKind#INJECTION} binding. * * @param constructorElement the {@code @Inject}-annotated constructor * @param resolvedType the parameterized type if the constructor is for a generic class and the * binding should be for the parameterized type */ // TODO(dpb): See if we can just pass the parameterized type and not also the constructor. public ProvisionBinding injectionBinding( XConstructorElement constructorElement, Optional<XType> resolvedEnclosingType) { checkArgument(InjectionAnnotations.hasInjectOrAssistedInjectAnnotation(constructorElement)); XConstructorType constructorType = constructorElement.getExecutableType(); XType enclosingType = constructorElement.getEnclosingElement().getType(); // If the class this is constructing has some type arguments, resolve everything. if (!enclosingType.getTypeArguments().isEmpty() && resolvedEnclosingType.isPresent()) { checkIsSameErasedType(resolvedEnclosingType.get(), enclosingType); enclosingType = resolvedEnclosingType.get(); constructorType = constructorElement.asMemberOf(enclosingType); } // Collect all dependency requests within the provision method. // Note: we filter out @Assisted parameters since these aren't considered dependency requests. ImmutableSet.Builder<DependencyRequest> provisionDependencies = ImmutableSet.builder(); for (int i = 0; i < constructorElement.getParameters().size(); i++) { XExecutableParameterElement parameter = constructorElement.getParameters().get(i); XType parameterType = constructorType.getParameterTypes().get(i); if (!AssistedInjectionAnnotations.isAssistedParameter(parameter)) { provisionDependencies.add( dependencyRequestFactory.forRequiredResolvedVariable(parameter, parameterType)); } } ProvisionBinding.Builder builder = ProvisionBinding.builder() .contributionType(ContributionType.UNIQUE) .bindingElement(constructorElement) .key(keyFactory.forInjectConstructorWithResolvedType(enclosingType)) .provisionDependencies(provisionDependencies.build()) .injectionSites(injectionSiteFactory.getInjectionSites(enclosingType)) .kind( constructorElement.hasAnnotation(TypeNames.ASSISTED_INJECT) ? ASSISTED_INJECTION : INJECTION) .scope(injectionAnnotations.getScope(constructorElement.getEnclosingElement())); if (hasNonDefaultTypeParameters(enclosingType)) { builder.unresolved(injectionBinding(constructorElement, Optional.empty())); } return builder.build(); } public ProvisionBinding assistedFactoryBinding( XTypeElement factory, Optional<XType> resolvedFactoryType) { // If the class this is constructing has some type arguments, resolve everything. XType factoryType = factory.getType(); if (!factoryType.getTypeArguments().isEmpty() && resolvedFactoryType.isPresent()) { checkIsSameErasedType(resolvedFactoryType.get(), factoryType); factoryType = resolvedFactoryType.get(); } XMethodElement factoryMethod = AssistedInjectionAnnotations.assistedFactoryMethod(factory); XMethodType factoryMethodType = factoryMethod.asMemberOf(factoryType); return ProvisionBinding.builder() .contributionType(ContributionType.UNIQUE) .key(Key.builder(DaggerType.from(factoryType)).build()) .bindingElement(factory) .provisionDependencies( ImmutableSet.of( DependencyRequest.builder() .key(Key.builder(DaggerType.from(factoryMethodType.getReturnType())).build()) .kind(RequestKind.PROVIDER) .build())) .kind(ASSISTED_FACTORY) .build(); } /** * Returns a {@link dagger.spi.model.BindingKind#PROVISION} binding for a * {@code @Provides}-annotated method. * * @param contributedBy the installed module that declares or inherits the method */ public ProvisionBinding providesMethodBinding( XMethodElement providesMethod, XTypeElement contributedBy) { return setMethodBindingProperties( ProvisionBinding.builder(), providesMethod, contributedBy, keyFactory.forProvidesMethod(providesMethod, contributedBy), this::providesMethodBinding) .kind(PROVISION) .scope(injectionAnnotations.getScope(providesMethod)) .nullableType(getNullableType(providesMethod)) .build(); } /** * Returns a {@link dagger.spi.model.BindingKind#PRODUCTION} binding for a * {@code @Produces}-annotated method. * * @param contributedBy the installed module that declares or inherits the method */ public ProductionBinding producesMethodBinding( XMethodElement producesMethod, XTypeElement contributedBy) { // TODO(beder): Add nullability checking with Java 8. ProductionBinding.Builder builder = setMethodBindingProperties( ProductionBinding.builder(), producesMethod, contributedBy, keyFactory.forProducesMethod(producesMethod, contributedBy), this::producesMethodBinding) .kind(PRODUCTION) .productionKind(ProductionKind.fromProducesMethod(producesMethod)) .thrownTypes(producesMethod.getThrownTypes()) .executorRequest(dependencyRequestFactory.forProductionImplementationExecutor()) .monitorRequest(dependencyRequestFactory.forProductionComponentMonitor()); return builder.build(); } private <C extends ContributionBinding, B extends ContributionBinding.Builder<C, B>> B setMethodBindingProperties( B builder, XMethodElement method, XTypeElement contributedBy, Key key, BiFunction<XMethodElement, XTypeElement, C> create) { XMethodType methodType = method.asMemberOf(contributedBy.getType()); if (!types.isSameType(toJavac(methodType), toJavac(method.getExecutableType()))) { checkState(isTypeElement(method.getEnclosingElement())); builder.unresolved(create.apply(method, asTypeElement(method.getEnclosingElement()))); } return builder .contributionType(ContributionType.fromBindingElement(method)) .bindingElement(method) .contributingModule(contributedBy) .key(key) .dependencies( dependencyRequestFactory.forRequiredResolvedVariables( method.getParameters(), methodType.getParameterTypes())) .mapKey(getMapKey(method).map(DaggerAnnotation::from)); } /** * Returns a {@link dagger.spi.model.BindingKind#MULTIBOUND_MAP} or {@link * dagger.spi.model.BindingKind#MULTIBOUND_SET} binding given a set of multibinding contribution * bindings. * * @param key a key that may be satisfied by a multibinding */ public ContributionBinding syntheticMultibinding( Key key, Iterable<ContributionBinding> multibindingContributions) { ContributionBinding.Builder<?, ?> builder = multibindingRequiresProduction(key, multibindingContributions) ? ProductionBinding.builder() : ProvisionBinding.builder(); return builder .contributionType(ContributionType.UNIQUE) .key(key) .dependencies( dependencyRequestFactory.forMultibindingContributions(key, multibindingContributions)) .kind(bindingKindForMultibindingKey(key)) .build(); } private static BindingKind bindingKindForMultibindingKey(Key key) { if (SetType.isSet(key)) { return BindingKind.MULTIBOUND_SET; } else if (MapType.isMap(key)) { return BindingKind.MULTIBOUND_MAP; } else { throw new IllegalArgumentException(String.format("key is not for a set or map: %s", key)); } } private boolean multibindingRequiresProduction( Key key, Iterable<ContributionBinding> multibindingContributions) { if (MapType.isMap(key)) { MapType mapType = MapType.from(key); if (mapType.valuesAreTypeOf(TypeNames.PRODUCER) || mapType.valuesAreTypeOf(TypeNames.PRODUCED)) { return true; } } else if (SetType.isSet(key) && SetType.from(key).elementsAreTypeOf(TypeNames.PRODUCED)) { return true; } return Iterables.any( multibindingContributions, binding -> binding.bindingType().equals(BindingType.PRODUCTION)); } /** Returns a {@link dagger.spi.model.BindingKind#COMPONENT} binding for the component. */ public ProvisionBinding componentBinding(XTypeElement componentDefinitionType) { checkNotNull(componentDefinitionType); return ProvisionBinding.builder() .contributionType(ContributionType.UNIQUE) .bindingElement(componentDefinitionType) .key(keyFactory.forType(componentDefinitionType.getType())) .kind(COMPONENT) .build(); } /** * Returns a {@link dagger.spi.model.BindingKind#COMPONENT_DEPENDENCY} binding for a component's * dependency. */ public ProvisionBinding componentDependencyBinding(ComponentRequirement dependency) { checkNotNull(dependency); return ProvisionBinding.builder() .contributionType(ContributionType.UNIQUE) .bindingElement(dependency.typeElement()) .key(keyFactory.forType(dependency.type())) .kind(COMPONENT_DEPENDENCY) .build(); } /** * Returns a {@link dagger.spi.model.BindingKind#COMPONENT_PROVISION} or {@link * dagger.spi.model.BindingKind#COMPONENT_PRODUCTION} binding for a method on a component's * dependency. * * @param componentDescriptor the component with the dependency, not the dependency that has the * method */ public ContributionBinding componentDependencyMethodBinding( ComponentDescriptor componentDescriptor, XMethodElement dependencyMethod) { checkArgument(dependencyMethod.getParameters().isEmpty()); ContributionBinding.Builder<?, ?> builder; if (componentDescriptor.isProduction() && isComponentProductionMethod(dependencyMethod)) { builder = ProductionBinding.builder() .key(keyFactory.forProductionComponentMethod(dependencyMethod)) .kind(COMPONENT_PRODUCTION) .thrownTypes(dependencyMethod.getThrownTypes()); } else { builder = ProvisionBinding.builder() .key(keyFactory.forComponentMethod(dependencyMethod)) .nullableType(getNullableType(dependencyMethod)) .kind(COMPONENT_PROVISION) .scope(injectionAnnotations.getScope(dependencyMethod)); } return builder .contributionType(ContributionType.UNIQUE) .bindingElement(dependencyMethod) .build(); } /** * Returns a {@link dagger.spi.model.BindingKind#BOUND_INSTANCE} binding for a * {@code @BindsInstance}-annotated builder setter method or factory method parameter. */ ProvisionBinding boundInstanceBinding(ComponentRequirement requirement, XElement element) { checkArgument(isVariableElement(element) || isMethod(element)); XVariableElement parameterElement = isVariableElement(element) ? asVariable(element) : getOnlyElement(asMethod(element).getParameters()); return ProvisionBinding.builder() .contributionType(ContributionType.UNIQUE) .bindingElement(element) .key(requirement.key().get()) .nullableType(getNullableType(parameterElement)) .kind(BOUND_INSTANCE) .build(); } /** * Returns a {@link dagger.spi.model.BindingKind#SUBCOMPONENT_CREATOR} binding declared by a * component method that returns a subcomponent builder. Use {{@link * #subcomponentCreatorBinding(ImmutableSet)}} for bindings declared using {@link * Module#subcomponents()}. * * @param component the component that declares or inherits the method */ ProvisionBinding subcomponentCreatorBinding( XMethodElement subcomponentCreatorMethod, XTypeElement component) { checkArgument(subcomponentCreatorMethod.getParameters().isEmpty()); Key key = keyFactory.forSubcomponentCreatorMethod(subcomponentCreatorMethod, component.getType()); return ProvisionBinding.builder() .contributionType(ContributionType.UNIQUE) .bindingElement(subcomponentCreatorMethod) .key(key) .kind(SUBCOMPONENT_CREATOR) .build(); } /** * Returns a {@link dagger.spi.model.BindingKind#SUBCOMPONENT_CREATOR} binding declared using * {@link Module#subcomponents()}. */ ProvisionBinding subcomponentCreatorBinding( ImmutableSet<SubcomponentDeclaration> subcomponentDeclarations) { SubcomponentDeclaration subcomponentDeclaration = subcomponentDeclarations.iterator().next(); return ProvisionBinding.builder() .contributionType(ContributionType.UNIQUE) .key(subcomponentDeclaration.key()) .kind(SUBCOMPONENT_CREATOR) .build(); } /** * Returns a {@link dagger.spi.model.BindingKind#DELEGATE} binding. * * @param delegateDeclaration the {@code @Binds}-annotated declaration * @param actualBinding the binding that satisfies the {@code @Binds} declaration */ ContributionBinding delegateBinding( DelegateDeclaration delegateDeclaration, ContributionBinding actualBinding) { switch (actualBinding.bindingType()) { case PRODUCTION: return buildDelegateBinding( ProductionBinding.builder().nullableType(actualBinding.nullableType()), delegateDeclaration, TypeNames.PRODUCER); case PROVISION: return buildDelegateBinding( ProvisionBinding.builder() .scope(injectionAnnotations.getScope(delegateDeclaration.bindingElement().get())) .nullableType(actualBinding.nullableType()), delegateDeclaration, TypeNames.PROVIDER); case MEMBERS_INJECTION: // fall-through to throw } throw new AssertionError("bindingType: " + actualBinding); } /** * Returns a {@link dagger.spi.model.BindingKind#DELEGATE} binding used when there is no binding * that satisfies the {@code @Binds} declaration. */ public ContributionBinding unresolvedDelegateBinding(DelegateDeclaration delegateDeclaration) { return buildDelegateBinding( ProvisionBinding.builder() .scope(injectionAnnotations.getScope(delegateDeclaration.bindingElement().get())), delegateDeclaration, TypeNames.PROVIDER); } private ContributionBinding buildDelegateBinding( ContributionBinding.Builder<?, ?> builder, DelegateDeclaration delegateDeclaration, ClassName frameworkType) { return builder .contributionType(delegateDeclaration.contributionType()) .bindingElement(delegateDeclaration.bindingElement().get()) .contributingModule(delegateDeclaration.contributingModule().get()) .key(keyFactory.forDelegateBinding(delegateDeclaration, frameworkType)) .dependencies(delegateDeclaration.delegateRequest()) .mapKey(delegateDeclaration.mapKey()) .kind(DELEGATE) .build(); } /** * Returns an {@link dagger.spi.model.BindingKind#OPTIONAL} binding for {@code key}. * * @param requestKind the kind of request for the optional binding * @param underlyingKeyBindings the possibly empty set of bindings that exist in the component for * the underlying (non-optional) key */ ContributionBinding syntheticOptionalBinding( Key key, RequestKind requestKind, ImmutableCollection<? extends Binding> underlyingKeyBindings) { if (underlyingKeyBindings.isEmpty()) { return ProvisionBinding.builder() .contributionType(ContributionType.UNIQUE) .key(key) .kind(OPTIONAL) .build(); } boolean requiresProduction = underlyingKeyBindings.stream() .anyMatch(binding -> binding.bindingType() == BindingType.PRODUCTION) || requestKind.equals(RequestKind.PRODUCER) // handles producerFromProvider cases || requestKind.equals(RequestKind.PRODUCED); // handles producerFromProvider cases return (requiresProduction ? ProductionBinding.builder() : ProvisionBinding.builder()) .contributionType(ContributionType.UNIQUE) .key(key) .kind(OPTIONAL) .dependencies(dependencyRequestFactory.forSyntheticPresentOptionalBinding(key, requestKind)) .build(); } /** Returns a {@link dagger.spi.model.BindingKind#MEMBERS_INJECTOR} binding. */ public ProvisionBinding membersInjectorBinding( Key key, MembersInjectionBinding membersInjectionBinding) { return ProvisionBinding.builder() .key(key) .contributionType(ContributionType.UNIQUE) .kind(MEMBERS_INJECTOR) .bindingElement(membersInjectionBinding.key().type().xprocessing().getTypeElement()) .provisionDependencies(membersInjectionBinding.dependencies()) .injectionSites(membersInjectionBinding.injectionSites()) .build(); } /** * Returns a {@link dagger.spi.model.BindingKind#MEMBERS_INJECTION} binding. * * @param resolvedType if {@code declaredType} is a generic class and {@code resolvedType} is a * parameterization of that type, the returned binding will be for the resolved type */ // TODO(dpb): See if we can just pass one nongeneric/parameterized type. public MembersInjectionBinding membersInjectionBinding(XType type, Optional<XType> resolvedType) { // If the class this is injecting has some type arguments, resolve everything. if (!type.getTypeArguments().isEmpty() && resolvedType.isPresent()) { checkIsSameErasedType(resolvedType.get(), type); type = resolvedType.get(); } ImmutableSortedSet<InjectionSite> injectionSites = injectionSiteFactory.getInjectionSites(type); ImmutableSet<DependencyRequest> dependencies = injectionSites.stream() .flatMap(injectionSite -> injectionSite.dependencies().stream()) .collect(toImmutableSet()); return MembersInjectionBinding.create( keyFactory.forMembersInjectedType(type), dependencies, hasNonDefaultTypeParameters(type) ? Optional.of( membersInjectionBinding(type.getTypeElement().getType(), Optional.empty())) : Optional.empty(), injectionSites); } private void checkIsSameErasedType(XType type1, XType type2) { checkState( types.isSameType(types.erasure(toJavac(type1)), types.erasure(toJavac(type2))), "erased expected type: %s, erased actual type: %s", types.erasure(toJavac(type1)), types.erasure(toJavac(type2))); } private static boolean hasNonDefaultTypeParameters(XType type) { // If the type is not declared, then it can't have type parameters. if (!isDeclared(type)) { return false; } // If the element has no type parameters, none can be non-default. XType defaultType = type.getTypeElement().getType(); if (defaultType.getTypeArguments().isEmpty()) { return false; } // The actual type parameter size can be different if the user is using a raw type. if (defaultType.getTypeArguments().size() != type.getTypeArguments().size()) { return true; } for (int i = 0; i < defaultType.getTypeArguments().size(); i++) { if (!defaultType.getTypeArguments().get(i).isSameType(type.getTypeArguments().get(i))) { return true; } } return false; } }
package com.genie.beans; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import javax.annotation.PostConstruct; import javax.faces.bean.ManagedBean; import javax.faces.bean.ViewScoped; import org.primefaces.model.DefaultStreamedContent; import org.primefaces.model.StreamedContent; import com.genie.model.Achievement; import com.genie.model.AchievementProgress; import com.genie.model.Assignment; import com.genie.model.Badge; import com.genie.model.CoursePlan; import com.genie.model.Document; import com.genie.model.GamificationSettings; import com.genie.model.GradeCriteria; import com.genie.model.LeaderboardSettings; import com.genie.model.SemesterCourse; import com.genie.model.StudentBadge; import com.genie.model.StudentGamificationSettings; import com.genie.model.User; import com.genie.pojo.AssignmentGradeWrapper; import com.genie.pojo.AttendanceDetailsWrapper; import com.genie.pojo.AttendanceWrapper; import com.genie.pojo.GradeColumn; import com.genie.pojo.GradeWrapper; import com.genie.pojo.LeaderboardWrapper; import com.genie.pojo.StudentGradeRowWrapper; import com.genie.services.CoursePlanService; import com.genie.services.CourseService; import com.genie.services.GamificationService; import com.genie.services.SessionService; import com.genie.services.UserService; import com.genie.utils.DataFormatter; import com.genie.utils.JsfUtil; import com.genie.utils.ResourceUtil; @ManagedBean @ViewScoped public class CourseDetailsBean extends BaseBean { private static final long serialVersionUID = -7447236755286730413L; private static String semCourseId; private Long semesterCourseId; private List<CoursePlan> coursePlans; private SemesterCourse semesterCourse; private GamificationSettings generalSettings; private LeaderboardSettings leaderboardSettings; private StudentGamificationSettings settings; private List<Achievement> achievements; private List<Achievement> filteredAchievements; private List<AchievementProgress> progressedAchievements; private List<Badge> badges; private List<Badge> filteredBadges; private List<StudentBadge> studentBadges; private List<LeaderboardWrapper> leaderboardStudents; private LeaderboardWrapper currentStudent; private List<User> otherStudents; private List<StudentGradeRowWrapper> gradeStudents; private List<AssignmentGradeWrapper> assignmentGrades; private List<AttendanceDetailsWrapper> attendanceDetails; private List<GradeColumn> otherGradeColumns; private List<GradeColumn> examGradeColumns; private List<GradeColumn> assignmentColumns; private List<GradeColumn> attendanceColumns; @PostConstruct public void initBean() { try { semesterCourseId = Long.parseLong(semCourseId); } catch (Exception e) { semesterCourseId = 0l; } coursePlans = CoursePlanService.getAllPlansBySemesterCourseIdWithMaterialDocuments(semesterCourseId); semesterCourse = CourseService.getByIdWithEverything(semesterCourseId); generalSettings = GamificationService.getGeneralSettingsForSemesterCourse(semesterCourseId); settings = GamificationService.getUserSettings(semesterCourseId); leaderboardSettings = GamificationService.getLeaderboardSettingsForSemesterCourse(semesterCourseId); if(settings == null) { settings = new StudentGamificationSettings(); } if(generalSettings == null) { generalSettings = new GamificationSettings(); } if(generalSettings.isAchievementsEnabled() && settings.isAchievementsEnabled()) { refreshAchievements(); } if(generalSettings.isBadgesEnabled() && settings.isBadgesEnabled()) { refreshBadges(); } if(generalSettings.isLeaderboardsEnabled() && settings.isLeaderboardsEnabled()) { refreshLeaderboards(); } refreshGrades(); refreshOtherStudents(); } public void refreshOtherStudents() { otherStudents = UserService.getAllBySemesterCourseExceptUser(semesterCourse); } public void refreshGrades() { List<GradeCriteria> criteria = CoursePlanService.getGradeCriteriaBySemesterCourse(semesterCourseId); User currentStudent = UserService.getByUsernameWithEverything(SessionService.getUsername()); List<User> students = new ArrayList<User>(); students.add(currentStudent); gradeStudents = CourseService.calculateStudentGrades(students, criteria, semesterCourseId, false, true); gradeStudents.addAll(CourseService.calculateStudentGrades(students, criteria, semesterCourseId, true, true)); examGradeColumns = new ArrayList<GradeColumn>(); if(gradeStudents != null && gradeStudents.size() > 0) { int i = 0; if(gradeStudents.get(0).getExamGrades() != null) { for (GradeWrapper gw : gradeStudents.get(0).getExamGrades()) { examGradeColumns.add(new GradeColumn(gw.getLabel(), i)); i++; } } } otherGradeColumns = new ArrayList<GradeColumn>(); if(gradeStudents != null && gradeStudents.size() > 0) { int i = 0; if(gradeStudents.get(0).getOtherGrades() != null) { for (GradeWrapper gw : gradeStudents.get(0).getOtherGrades()) { otherGradeColumns.add(new GradeColumn(gw.getLabel(), i)); i++; } } } attendanceDetails = CoursePlanService.getAttendanceDetails(currentStudent, semesterCourseId); attendanceColumns = new ArrayList<GradeColumn>(); int i = 0; for (AttendanceWrapper adw : attendanceDetails.get(0).getAttendances()) { attendanceColumns.add(new GradeColumn(Integer.toString(adw.getWeek()), i)); i++; } assignmentGrades = CoursePlanService.getAssignmentGrades(currentStudent, semesterCourseId); assignmentColumns = new ArrayList<GradeColumn>(); int k = 0; for (GradeWrapper gw : assignmentGrades.get(0).getGrades()) { assignmentColumns.add(new GradeColumn(gw.getLabel(), k)); k++; } } public void refreshAchievements() { achievements = GamificationService.getAllAchievementsForSemesterCourseWithEverything(semesterCourseId); progressedAchievements = GamificationService.getAllAchievementProgressesForStudent(semesterCourseId); filteredAchievements = new ArrayList<Achievement>(); for (Achievement ach : achievements) { boolean found = false; if(progressedAchievements != null) { for (AchievementProgress ap : progressedAchievements) { if(ap.getAchievement().equals(ach)) found = true; } } if(!found) filteredAchievements.add(ach); } } public void refreshBadges() { badges = GamificationService.getAllBadgesForSemesterCourse(semesterCourseId); studentBadges = GamificationService.getAllBadgesForStudent(semesterCourseId); filteredBadges = new ArrayList<Badge>(); for (Badge ach : badges) { boolean found = false; if(studentBadges != null) { for (StudentBadge ap : studentBadges) { if(ap.getBadge().equals(ach)) found = true; } } if(!found) filteredBadges.add(ach); } } public void refreshLeaderboards() { List<Object> tuple = GamificationService.getLeaderboardUnsorted(semesterCourse, leaderboardSettings); if(tuple.get(0) != null) currentStudent = (LeaderboardWrapper) tuple.get(0); leaderboardStudents = (List<LeaderboardWrapper>) tuple.get(1); } public StreamedContent downloadFile(Document doc) { StreamedContent file = null; if(doc != null && doc.getDocumentId() > 0) { String docType = doc.getDocumentType(); String filename = doc.getFilename() + doc.getFiletype(); try { byte[] fileBytes = doc.getFileData().getBytes(1, (int)doc.getFileData().length()); InputStream is = new ByteArrayInputStream(fileBytes); file = new DefaultStreamedContent(is, docType, filename); } catch (Exception e) { e.printStackTrace(); JsfUtil.addErrorMessage("growl.assignment.noSubmittedFileForDownloading"); } } if(file == null) { JsfUtil.addErrorMessage("growl.assignment.noSubmittedFileForDownloading"); } return file; } public void goToAssignment(Assignment assignment) { if(assignment != null && assignment.getId() != null) { JsfUtil.redirect("assignment-details?asgnid=" + assignment.getId()); } } public boolean isFilesRendered(CoursePlan cp) { if(cp.getMaterials() != null && cp.getMaterials().size() > 0) { return true; } return false; } public boolean isAssignmentsRendered(CoursePlan cp) { if(cp.getAssignments() != null && cp.getAssignments().size() > 0) { return true; } return false; } public boolean isStudentBadgesRendered() { return studentBadges != null && studentBadges.size() > 0; } public int getProgress(AchievementProgress ap) { if(ap.getAchievement().getTargetCount() == null || ap.getAchievement().getTargetCount() == 0) return 0; if(ap.getCurrentValue() == null || ap.getCurrentValue() == 0) return 0; Float result = ap.getCurrentValue().floatValue() / ap.getAchievement().getTargetCount().floatValue() * 100; return result.intValue(); } public String getProgressStyle(AchievementProgress ap) { int progress = getProgress(ap); String style = "width: " + progress + "%;"; if(progress == 0) { style += " background-color:#909090;"; } return style; } public void prepareGamificationSettings() { settings = GamificationService.getUserSettings(semesterCourse.getId()); if(settings == null) { settings = new StudentGamificationSettings(); } } public String getImageClass(AchievementProgress ap) { String cls = "img-responsive wh-128"; if(!ap.isComplete()) { cls += " incomplete-objective"; } return cls; } public String stringifyObjective(Achievement ach) { return GamificationService.stringifyAchievementObjective(ach); } public String getProgressString(AchievementProgress ap) { return DataFormatter.formatPercent((double)getProgress(ap) / 100.0); } public void addOrUpdateGamificationSettings() { try { if(settings.getId() != null) { GamificationService.updateUserSettings(settings, semesterCourse); updateSuccessful(); } else { GamificationService.saveUserSettings(settings, semesterCourse); saveSuccessful(); } if(generalSettings.isAchievementsEnabled() && settings.isAchievementsEnabled()) { refreshAchievements(); } if(generalSettings.isBadgesEnabled() && settings.isBadgesEnabled()) { refreshBadges(); } if(generalSettings.isLeaderboardsEnabled() && settings.isLeaderboardsEnabled()) { refreshLeaderboards(); } } catch (Exception e) { e.printStackTrace(); saveOrUpdateFailed(); } } public String getPointRewardString(Achievement ach) { return generalSettings.getPointsName() + ": " + (ach.getPointReward() == null ? "-" : ach.getPointReward() ); } public String getBadgeRewardString(Achievement ach) { String badgeName = "-"; if(ach.getBadgeReward() != null) badgeName = ach.getBadgeReward().getName(); return ResourceUtil.getLabel("badge.label.badge") + ": " + badgeName; } public String getGradeRewardString(Achievement ach) { if(ach.getRewardGradeCriteria() == null || ach.getGradeReward() == null) return ResourceUtil.getLabel("gamificationSettings.label.noGradeReward"); else { if(ach.getRewardAssignment() != null) return ach.getRewardAssignment().getName() + ": " + ach.getGradeReward(); return ach.getRewardGradeCriteria().getName() + ": " + ach.getGradeReward(); } } public static void loadUrlParameters(String scid) { semCourseId = scid; } public static String getSemCourseId() { return semCourseId; } public static void setSemCourseId(String semCourseId) { CourseDetailsBean.semCourseId = semCourseId; } public Long getSemesterCourseId() { return semesterCourseId; } public void setSemesterCourseId(Long semesterCourseId) { this.semesterCourseId = semesterCourseId; } public List<CoursePlan> getCoursePlans() { return coursePlans; } public void setCoursePlans(List<CoursePlan> coursePlans) { this.coursePlans = coursePlans; } public SemesterCourse getSemesterCourse() { return semesterCourse; } public void setSemesterCourse(SemesterCourse semesterCourse) { this.semesterCourse = semesterCourse; } public StudentGamificationSettings getSettings() { return settings; } public void setSettings(StudentGamificationSettings settings) { this.settings = settings; } public List<Achievement> getAchievements() { return achievements; } public void setAchievements(List<Achievement> achievements) { this.achievements = achievements; } public GamificationSettings getGeneralSettings() { return generalSettings; } public void setGeneralSettings(GamificationSettings generalSettings) { this.generalSettings = generalSettings; } public List<AchievementProgress> getProgressedAchievements() { return progressedAchievements; } public void setProgressedAchievements(List<AchievementProgress> progressedAchievements) { this.progressedAchievements = progressedAchievements; } public List<Achievement> getFilteredAchievements() { return filteredAchievements; } public void setFilteredAchievements(List<Achievement> filteredAchievements) { this.filteredAchievements = filteredAchievements; } public List<Badge> getBadges() { return badges; } public void setBadges(List<Badge> badges) { this.badges = badges; } public List<Badge> getFilteredBadges() { return filteredBadges; } public void setFilteredBadges(List<Badge> filteredBadges) { this.filteredBadges = filteredBadges; } public List<StudentBadge> getStudentBadges() { return studentBadges; } public void setStudentBadges(List<StudentBadge> studentBadges) { this.studentBadges = studentBadges; } public static long getSerialversionuid() { return serialVersionUID; } public LeaderboardSettings getLeaderboardSettings() { return leaderboardSettings; } public void setLeaderboardSettings(LeaderboardSettings leaderboardSettings) { this.leaderboardSettings = leaderboardSettings; } public List<LeaderboardWrapper> getLeaderboardStudents() { return leaderboardStudents; } public void setLeaderboardStudents(List<LeaderboardWrapper> leaderboardStudents) { this.leaderboardStudents = leaderboardStudents; } public LeaderboardWrapper getCurrentStudent() { return currentStudent; } public void setCurrentStudent(LeaderboardWrapper currentStudent) { this.currentStudent = currentStudent; } public List<StudentGradeRowWrapper> getGradeStudents() { return gradeStudents; } public void setGradeStudents(List<StudentGradeRowWrapper> gradeStudents) { this.gradeStudents = gradeStudents; } public List<GradeColumn> getOtherGradeColumns() { return otherGradeColumns; } public void setOtherGradeColumns(List<GradeColumn> otherGradeColumns) { this.otherGradeColumns = otherGradeColumns; } public List<GradeColumn> getExamGradeColumns() { return examGradeColumns; } public void setExamGradeColumns(List<GradeColumn> examGradeColumns) { this.examGradeColumns = examGradeColumns; } public List<AssignmentGradeWrapper> getAssignmentGrades() { return assignmentGrades; } public void setAssignmentGrades(List<AssignmentGradeWrapper> assignmentGrades) { this.assignmentGrades = assignmentGrades; } public List<AttendanceDetailsWrapper> getAttendanceDetails() { return attendanceDetails; } public void setAttendanceDetails(List<AttendanceDetailsWrapper> attendanceDetails) { this.attendanceDetails = attendanceDetails; } public List<GradeColumn> getAssignmentColumns() { return assignmentColumns; } public void setAssignmentColumns(List<GradeColumn> assignmentColumns) { this.assignmentColumns = assignmentColumns; } public List<GradeColumn> getAttendanceColumns() { return attendanceColumns; } public void setAttendanceColumns(List<GradeColumn> attendanceColumns) { this.attendanceColumns = attendanceColumns; } public List<User> getOtherStudents() { return otherStudents; } public void setOtherStudents(List<User> otherStudents) { this.otherStudents = otherStudents; } }
/* The MIT License (MIT) Copyright (c) 2014, Groupon, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.groupon.jenkins.dynamic.build; import com.google.common.base.Objects; import com.google.common.collect.Iterables; import com.groupon.jenkins.buildtype.InvalidBuildConfigurationException; import com.groupon.jenkins.dynamic.build.cause.BuildCause; import com.groupon.jenkins.dynamic.build.execution.BuildEnvironment; import com.groupon.jenkins.dynamic.build.execution.BuildExecutionContext; import com.groupon.jenkins.dynamic.buildtype.BuildType; import com.groupon.jenkins.github.services.GithubRepositoryService; import hudson.EnvVars; import hudson.Functions; import hudson.matrix.AxisList; import hudson.matrix.Combination; import hudson.model.AbstractProject; import hudson.model.Build; import hudson.model.BuildListener; import hudson.model.Cause; import hudson.model.CauseAction; import hudson.model.Executor; import hudson.model.Result; import hudson.model.TaskListener; import hudson.tasks.BuildStep; import hudson.util.HttpResponses; import hudson.util.VersionNumber; import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.util.Arrays; import java.util.Map; import jenkins.model.Jenkins; import org.apache.commons.lang.exception.ExceptionUtils; import org.kohsuke.github.GHRepository; import org.kohsuke.stapler.HttpResponse; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import org.kohsuke.stapler.export.Exported; import org.kohsuke.stapler.interceptor.RequirePOST; import org.mongodb.morphia.annotations.Property; import javax.servlet.ServletException; public class DynamicBuild extends DbBackedBuild<DynamicProject, DynamicBuild> { private transient DynamicBuildModel model; @Property(concreteClass = AxisList.class) private AxisList axisList; public DynamicBuild(DynamicProject project) throws IOException { super(project); this.model = new DynamicBuildModel(this); } public DynamicBuild(DynamicProject project, File buildDir) throws IOException { super(project, buildDir); this.model = new DynamicBuildModel(this); } public void postMorphiaLoad() { super.postMorphiaLoad(); this.model = new DynamicBuildModel(this); } @SuppressWarnings("unchecked") @Override public void run() { try { this.model.run(); } catch (IOException e) { throw new RuntimeException(e); } execute(new DynamicRunExecution()); } public boolean isNewJenkins() { VersionNumber matrixBreakOutVersion = new VersionNumber("1.560"); return Jenkins.getVersion().isNewerThan(matrixBreakOutVersion); } public DynamicBuildLayouter getLayouter() { return new DynamicBuildLayouter(axisList, this); } // This needs to be overriden here to override @RequirePOST annotation, // which seems like a bug in the version were are using. @Override public synchronized HttpResponse doStop() throws IOException, ServletException { return super.doStop(); } @Override public void restoreFromDb(AbstractProject project, Map<String, Object> input) { super.restoreFromDb(project, input); this.model = new DynamicBuildModel(this); } @Override @RequirePOST public void doDoDelete(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { checkPermission(DELETE); model.deleteBuild(); rsp.sendRedirect2(req.getContextPath() + '/' + getParent().getUrl()); } @Override public Object getDynamic(String token, StaplerRequest req, StaplerResponse rsp) { try { Build item = getRun(Combination.fromString(token)); if (item != null) { if (item.getNumber() == this.getNumber()) { return item; } else { // redirect the user to the correct URL String url = Functions.joinPath(item.getUrl(), req.getRestOfPath()); String qs = req.getQueryString(); if (qs != null) { url += '?' + qs; } throw HttpResponses.redirectViaContextPath(url); } } } catch (IllegalArgumentException e) { // failed to parse the token as Combination. Must be something else } return super.getDynamic(token, req, rsp); } @Override public Map<String, String> getDotCiEnvVars(EnvVars jenkinsEnvVars) { Map<String, String> vars = super.getDotCiEnvVars(jenkinsEnvVars); Map<String, String> dotCiEnvVars = model.getDotCiEnvVars(); vars.putAll(dotCiEnvVars); return vars; } public Iterable<DynamicSubProject> getAllSubProjects() { return getConductor().getItems(); } public DynamicSubProject getSubProject(Combination subBuildCombination) { return Iterables.getOnlyElement(getSubProjects(Arrays.asList(subBuildCombination))); } public GithubRepositoryService getGithubRepositoryService() { return new GithubRepositoryService(getGithubRepoUrl()); } public void setAxisList(AxisList axisList) { this.axisList = axisList; try { save(); } catch (IOException e) { throw new RuntimeException(e); } } public Map<String,Object> getEnvironmentWithChangeSet(TaskListener listener) throws IOException, InterruptedException { return model.getEnvironmentWithChangeSet(listener); } public GHRepository getGithubRepository() { return getGithubRepositoryService().getGithubRepository(); } protected class DynamicRunExecution extends Build.BuildExecution implements BuildExecutionContext { @Override public boolean performStep(BuildStep execution, BuildListener listener) throws IOException, InterruptedException { return perform(execution, listener); } @Override public void setResult(Result r) { DynamicBuild.this.setResult(r); } @Override protected Result doRun(BuildListener listener) throws Exception, hudson.model.Run.RunnerAbortedException { BuildEnvironment buildEnvironment = new BuildEnvironment(DynamicBuild.this, launcher, listener); try { if (!buildEnvironment.initialize()) { return Result.FAILURE; } exportDeployKeysIfPrivateRepo(listener, launcher); BuildType buildType = BuildType.getBuildType(getParent()); Result buildRunResult = buildType.runBuild(DynamicBuild.this, this, launcher, listener); setResult(buildRunResult); return buildRunResult; } catch (InvalidBuildConfigurationException invalidBuildConfigurationException) { for (String error : invalidBuildConfigurationException.getValidationErrors()) { listener.error(error); } return Result.FAILURE; }catch (InterruptedException e) { Executor x = Executor.currentExecutor(); x.recordCauseOfInterruption(DynamicBuild.this, listener); return x.abortResult(); }catch (Exception e) { PrintStream logger = listener.getLogger(); logger.println(e.getMessage()); logger.println(ExceptionUtils.getStackTrace(e)); Executor x = Executor.currentExecutor(); x.recordCauseOfInterruption(DynamicBuild.this, listener); x.doStop(); return Result.FAILURE; } finally { if (buildEnvironment.tearDownBuildEnvironments(listener)) { return Result.FAILURE; } deleteDeployKeys(listener, launcher); } } } @Override @Exported public Executor getExecutor() { return super.getExecutor() == null ? getOneOffExecutor() : super.getExecutor(); } private DynamicProject getConductor() { return this.getParent(); } public Iterable<DynamicSubProject> getSubProjects(Iterable<Combination> mainRunCombinations) { return getConductor().getSubProjects(mainRunCombinations); } public Build getRun(Combination combination) { for (DynamicSubProject subProject : getAllSubProjects()) { if (subProject.getCombination().equals(combination)) { return getRunForConfiguration(subProject); } } return null; } @Override public DynamicProject getParent() { return super.getParent(); } private DynamicSubBuild getRunForConfiguration(DynamicSubProject c) { DynamicSubBuild r = c.getBuildByNumber(getNumber()); return r != null ? r : null; } @Override public boolean equals(Object other) { if (other instanceof DynamicBuild) { DbBackedBuild<DynamicProject, DynamicBuild> otherBuild = (DbBackedBuild<DynamicProject, DynamicBuild>) other; if (otherBuild.getName().equals(this.getName()) && otherBuild.getNumber() == this.getNumber()) { return true; } } return false; } @Override public int hashCode() { return Objects.hashCode(getName(), getNumber()); } @Override public String getSha() { return this.getCause() == null ? "" : getCause().getSha(); } @Override public BuildCause getCause() { return model.getBuildCause(); } public String getGithubRepoUrl() { return getProject().getGithubRepoUrl(); } public void addCause(Cause manualCause) { this.getAction(CauseAction.class).getCauses().add(manualCause); } /* * Jenkins method is final cannot be mocked. Work around to make this * mockable without powermock */ public String getFullUrl() { return this.getAbsoluteUrl(); } public Map<String, String> getDotCiEnvVars() { return model.getDotCiEnvVars(); } public void skip() { addAction(new SkippedBuildAction()); } @Override public String getDescription() { String description = super.getDescription(); return description == null? getCurrentBranch().toString() : description; } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package by.zuyeu.deyestracker.core.detection.tracker; import by.zuyeu.deyestracker.core.detection.learning.TeacherWithUser; import by.zuyeu.deyestracker.core.detection.model.DetectFaceSample; import by.zuyeu.deyestracker.core.detection.model.StudyResult; import by.zuyeu.deyestracker.core.eda.event.CoreEvent; import by.zuyeu.deyestracker.core.eda.event.MoveEvent; import by.zuyeu.deyestracker.core.eda.event.QualityEvent; import by.zuyeu.deyestracker.core.eda.router.IRouter; import by.zuyeu.deyestracker.core.eda.router.RouterFactory; import by.zuyeu.deyestracker.core.exception.DEyesTrackerException; import by.zuyeu.deyestracker.core.exception.DEyesTrackerExceptionCode; import by.zuyeu.deyestracker.core.util.ExceptionToEventConverter; import by.zuyeu.deyestracker.core.util.OpenCVLibraryLoader; import by.zuyeu.deyestracker.core.video.sampler.FaceInfoSampler; import by.zuyeu.deyestracker.core.video.sampler.ISampler; import by.zuyeu.deyestracker.core.video.util.ScreenUtils; import org.apache.commons.collections4.queue.CircularFifoQueue; import org.opencv.core.Point; import org.opencv.core.Size; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author Fieryphoenix */ public class ScreenPointTracker { public static class ScreenPointTrackerBuilder { private ISampler sampler; private IRouter router; private StudyResult studyResult; public ScreenPointTrackerBuilder() { } public ScreenPointTrackerBuilder setSampler(ISampler sampler) { this.sampler = sampler; return this; } public ScreenPointTrackerBuilder setRouter(IRouter router) { this.router = router; return this; } public ScreenPointTrackerBuilder setStudyResult(StudyResult studyResult) { this.studyResult = studyResult; return this; } public ScreenPointTracker createScreenPointTracker() throws DEyesTrackerException { final ScreenPointTracker screenPointTracker = new ScreenPointTracker(true); if (router != null) { screenPointTracker.setRouter(router); } else { screenPointTracker.initRouter(); } if (sampler != null) { screenPointTracker.setSampler(sampler); } else { screenPointTracker.openSampler(); } if (studyResult != null) { screenPointTracker.setStudyResult(studyResult); } else { screenPointTracker.startStuding(); } return screenPointTracker; } } private static final Logger LOG = LoggerFactory.getLogger(ScreenPointTracker.class); private static final double TRESHOLD_CENTER_VALUE = 0.5;//FIXME private static final double TRESHOLD_Y_VALUE = 0.5;//FIXME private static final double TRESHOLD_X_VALUE = 0.05;//FIXME private IRouter router; private ISampler sampler; private final CircularFifoQueue<DetectFaceSample> samples; private StudyResult studyResult; private boolean isStopped; private final Size screenSize; private ScreenPointBorder pointBorder; protected ScreenPointTracker(final boolean skipInit) throws DEyesTrackerException { if (!skipInit) { OpenCVLibraryLoader.loadCoreIfNeed(); initRouter(); openSampler(); startStuding(); } samples = new CircularFifoQueue<>(10); this.screenSize = ScreenUtils.getScreenSize(); } public void start() throws DEyesTrackerException { isStopped = false; defineScreenBorder(); while (!isStopped) { final DetectFaceSample nextSample = sampler.makeSample(); trackPointChanges(nextSample); sendQualityReport(nextSample); } LOG.info("TEST START - OK"); } private void trackPointChanges(final DetectFaceSample nextSample) { if (nextSample.isComplete()) { final DetectFaceSample lastSample = samples.peek(); if (lastSample != null) { final Point nextMedian = defineMedian(nextSample); final Point lastMedian = defineMedian(lastSample); tresholdMove(nextMedian, lastMedian); } samples.add(nextSample); } } private void sendQualityReport(final DetectFaceSample nextSample) { if (nextSample.getFace() == null) { router.sendEvent(new QualityEvent(QualityEvent.QualityType.BAD)); } else if (nextSample.getLeftEye() == null && nextSample.getRightEye() == null) { router.sendEvent(new QualityEvent(QualityEvent.QualityType.NORMAL)); } else { router.sendEvent(new QualityEvent(QualityEvent.QualityType.GOOD)); } } public void stop() throws DEyesTrackerException { //TODO isStopped = false; sampler.close(); LOG.info("TEST STOP - OK"); } public IRouter getRouter() { return this.router; } private void startStuding() throws DEyesTrackerException { LOG.trace("startStuding - start;"); TeacherWithUser teacher = new TeacherWithUser(router, sampler); try { this.studyResult = teacher.call(); } catch (Exception ex) { throw new DEyesTrackerException(DEyesTrackerExceptionCode.STUDY_FAILURE, ex); } LOG.trace("startStuding - end;"); } private void openSampler() throws DEyesTrackerException { LOG.trace("openSampler - start;"); try { sampler = new FaceInfoSampler(); } catch (DEyesTrackerException e) { dispatchException(e); throw e; } LOG.trace("openSampler - end;"); } private void initRouter() { LOG.trace("initRouter - start;"); this.router = RouterFactory.getRouter(RouterFactory.RouterType.EVENT); LOG.trace("initRouter - end;"); } private void dispatchException(final DEyesTrackerException e) { final DEyesTrackerExceptionCode code = e.getCode(); final CoreEvent.EventType item = ExceptionToEventConverter.getEventFromException(code); publishCoreEvent(item); } protected void publishCoreEvent(CoreEvent.EventType item) { if (router != null && item != null) { router.sendEvent(new CoreEvent(item)); } } private void publishMoveEvent(MoveEvent moveEvent) { if (router != null && moveEvent != null) { router.sendEvent(moveEvent); } } private void defineScreenBorder() { this.pointBorder = new ScreenPointBorder(); pointBorder.setBottomY(defineMedian(studyResult.getBottomLeft(), studyResult.getBottomRight()).y); pointBorder.setTopY(defineMedian(studyResult.getTopLeft(), studyResult.getTopRight()).y); pointBorder.setLeftX(defineMedian(studyResult.getTopLeft(), studyResult.getBottomLeft()).x); pointBorder.setRightX(defineMedian(studyResult.getTopRight(), studyResult.getBottomRight()).x); LOG.debug("defineScreenBorder - end: pointBorder = {}", pointBorder); } private Point defineMedian(DetectFaceSample s1, DetectFaceSample s2) { final Point p1 = defineMedian(s1); final Point p2 = defineMedian(s2); LOG.debug("p1 = {}, p2 = {}", p1, p2); return new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2); } private Point defineMedian(DetectFaceSample sample) { double pupilXMedian = (sample.getLeftPupil().x + sample.getRightPupil().x) / 2; double pupilYMedian = (sample.getLeftPupil().y + sample.getRightPupil().y) / 2; double eyesXMedian = (sample.getLeftEye().x + sample.getLeftEye().width / 2 + sample.getRightEye().x + sample.getRightEye().width / 2) / 2; double eyesYMedian = (sample.getLeftEye().y + sample.getLeftEye().height / 2 + sample.getRightEye().y + sample.getRightEye().height / 2) / 2; LOG.debug("pupilXMedian = {}, eyesXMedian = {}", pupilXMedian, eyesXMedian); LOG.debug("pupilYMedian = {}, eyesYMedian = {}", pupilYMedian, eyesYMedian); return new Point((pupilXMedian + eyesXMedian) / 2, (pupilYMedian + eyesYMedian) / 2); } private void tresholdMove(Point nextMedian, Point lastMedian) { LOG.debug("tresholdMove() - start: lastMedian = {}, nextMedian = {}", lastMedian, nextMedian); boolean moveX = Math.abs(nextMedian.x - lastMedian.x) > TRESHOLD_X_VALUE; boolean moveY = Math.abs(nextMedian.y - lastMedian.y) > TRESHOLD_Y_VALUE; Point center = pointBorder.getCenter(); boolean moveXFromCenter = Math.abs(nextMedian.x - center.x) > TRESHOLD_CENTER_VALUE; boolean moveYFromCenter = Math.abs(nextMedian.x - center.x) > TRESHOLD_CENTER_VALUE; if (moveX || moveY || moveXFromCenter || moveYFromCenter) { scaleToBorderAndPublicMove(nextMedian, lastMedian, center); } LOG.debug("tresholdMove() - end;"); } private void scaleToBorderAndPublicMove(Point nextMedian, Point lastMedian, Point center) { Point screenViewPoint = calculateViewPointBoundToBorders(nextMedian); Point oldScreenViewPoint = calculateViewPointBoundToBorders(lastMedian); publishMoveEvent(new MoveEvent(oldScreenViewPoint, screenViewPoint, center)); } private Point calculateViewPointBoundToBorders(Point nextMedian) { LOG.debug("calculateViewPointBoundToBorders() - start;"); // shift from left border double width = pointBorder.getWidth(); double xShift = nextMedian.x - pointBorder.getLeftX(); width = Math.max(width, xShift); double viewX = screenSize.width / width * xShift; //shift from top border double height = pointBorder.getHeight(); double yShift = nextMedian.y - pointBorder.getTopY(); height = Math.max(height, yShift); double viewY = screenSize.height / height * yShift; Point screenViewPoint = new Point(viewX, viewY); LOG.debug("calculateViewPointBoundToBorders() - end: point = {}", screenViewPoint); return screenViewPoint; } private void setRouter(IRouter router) { this.router = router; } private void setSampler(ISampler sampler) { this.sampler = sampler; } private void setStudyResult(StudyResult studyResult) { this.studyResult = studyResult; } public ISampler getSampler() { return sampler; } public StudyResult getStudyResult() { return studyResult; } }
/* * Copyright 2009-2013 by The Regents of the University of California * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * you may obtain a copy of the License from * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.uci.ics.asterix.runtime.evaluators.constructors; import java.io.DataOutput; import java.io.IOException; import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider; import edu.uci.ics.asterix.om.base.AMutableUUID; import edu.uci.ics.asterix.om.base.ANull; import edu.uci.ics.asterix.om.base.AUUID; import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions; import edu.uci.ics.asterix.om.functions.IFunctionDescriptor; import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory; import edu.uci.ics.asterix.om.types.ATypeTag; import edu.uci.ics.asterix.om.types.BuiltinType; import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor; import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException; import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier; import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator; import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory; import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer; import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider; import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage; import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference; /** * Receives a canonical representation of UUID and construct a UUID value. * a UUID is represented by 32 lowercase hexadecimal digits (8-4-4-4-12). (E.g. * uuid("02a199ca-bf58-412e-bd9f-60a0c975a8ac")) */ public class AUUIDFromStringConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor { private static final long serialVersionUID = 1L; private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize(); private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize(); public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() { public IFunctionDescriptor createFunctionDescriptor() { return new AUUIDFromStringConstructorDescriptor(); } }; @Override public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) { return new ICopyEvaluatorFactory() { private static final long serialVersionUID = 1L; @Override public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException { return new ICopyEvaluator() { private DataOutput out = output.getDataOutput(); private ArrayBackedValueStorage outInput = new ArrayBackedValueStorage(); private ICopyEvaluator eval = args[0].createEvaluator(outInput); private String errorMessage = "This can not be an instance of UUID"; private AMutableUUID aUUID = new AMutableUUID(0, 0); @SuppressWarnings("unchecked") private ISerializerDeserializer<AUUID> uuidSerde = AqlSerializerDeserializerProvider.INSTANCE .getSerializerDeserializer(BuiltinType.AUUID); @SuppressWarnings("unchecked") private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE .getSerializerDeserializer(BuiltinType.ANULL); private long msb = 0; private long lsb = 0; private long tmpLongValue = 0; @Override public void evaluate(IFrameTupleReference tuple) throws AlgebricksException { try { outInput.reset(); eval.evaluate(tuple); byte[] serString = outInput.getByteArray(); if (serString[0] == SER_STRING_TYPE_TAG) { msb = 0; lsb = 0; tmpLongValue = 0; // first byte: tag, next two bytes: length, so // we add 3 bytes. // First part - 8 bytes int offset = 3; msb = calculateLongFromHex(serString, offset, 8); msb <<= 16; offset += 8; // Skip the hyphen part offset += 1; // Second part - 4 bytes tmpLongValue = calculateLongFromHex(serString, offset, 4); msb |= tmpLongValue; msb <<= 16; offset += 4; // Skip the hyphen part offset += 1; // Third part - 4 bytes tmpLongValue = calculateLongFromHex(serString, offset, 4); msb |= tmpLongValue; offset += 4; // Skip the hyphen part offset += 1; // Fourth part - 4 bytes lsb = calculateLongFromHex(serString, offset, 4); lsb <<= 48; offset += 4; // Skip the hyphen part offset += 1; // The last part - 12 bytes tmpLongValue = calculateLongFromHex(serString, offset, 12); lsb |= tmpLongValue; aUUID.setValue(msb, lsb); uuidSerde.serialize(aUUID, out); } else if (serString[0] == SER_NULL_TYPE_TAG) nullSerde.serialize(ANull.NULL, out); else throw new AlgebricksException(errorMessage); } catch (IOException e1) { throw new AlgebricksException(errorMessage); } } // Calculate a long value from a hex string. private long calculateLongFromHex(byte[] hexArray, int offset, int length) throws AlgebricksException { int tmpIntVal = 0; long tmpLongVal = 0; for (int i = offset; i < offset + length; i++) { tmpIntVal = transformHexCharToInt(hexArray[i]); if (tmpIntVal != -1) { tmpLongVal = tmpLongVal * 16 + tmpIntVal; } else { throw new AlgebricksException("This is not a correct UUID value."); } } return tmpLongVal; } // Interpret a character to the corresponding integer value. private int transformHexCharToInt(byte val) throws AlgebricksException { switch (val) { case '0': return 0; case '1': return 1; case '2': return 2; case '3': return 3; case '4': return 4; case '5': return 5; case '6': return 6; case '7': return 7; case '8': return 8; case '9': return 9; case 'a': case 'A': return 10; case 'b': case 'B': return 11; case 'c': case 'C': return 12; case 'd': case 'D': return 13; case 'e': case 'E': return 14; case 'f': case 'F': return 15; case '-': // We need to skip this hyphen part. return -1; default: throw new AlgebricksException("This is not a correct UUID value."); } } }; } }; } @Override public FunctionIdentifier getIdentifier() { return AsterixBuiltinFunctions.UUID_CONSTRUCTOR; } }
package com.musical.fragments; import java.util.ArrayList; import android.app.Activity; import android.app.LoaderManager.LoaderCallbacks; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.Loader; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.app.Fragment; import android.util.Log; import android.view.ActionMode; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.AdapterView.OnItemLongClickListener; import android.widget.GridView; import android.widget.TextView; import android.widget.Toast; import com.musical.R; import com.musical.adapters.ArtistAdapter; import com.musical.loaders.ArtistsLoader; import com.musical.loaders.TagsLoader; import com.musical.model.Artist; import com.musical.service.CoverService; import com.musical.utils.KeyConsts; import com.musical.utils.NetworkState; import com.musical.utils.TagSetter; public class ArtistsFragment extends Fragment implements OnItemLongClickListener, OnItemClickListener, LoaderCallbacks<ArrayList<Artist>> { private static final String TAG = ArtistsFragment.class.getSimpleName(); private static final int LOAD_ARTIST = 1; private Callbacks mCallbacks = sDummyCallbacks; private Handler mHandler = new Handler() { public void handleMessage(Message msg) { super.handleMessage(msg); switch (msg.what) { case 0: Log.i(TAG, "Success"); artistForAction = msg.getData().getParcelable(KeyConsts.args.ARTIST); mAdapter.update(artistForAction); break; case 1: Log.i(TAG, "Fail"); Toast.makeText(getActivity(), getResources().getString(R.string.epic_fail), Toast.LENGTH_SHORT).show(); break; } } }; protected Object mActionMode; Artist artistForAction; ArtistAdapter mAdapter; private TagSetter modifier; CoverBroadCastReceiver coverReceiver; public static Fragment newInstance() { return new ArtistsFragment(); } /** * A callback interface that all activities containing this fragment must * implement. This mechanism allows activities to be notified of item * selections. */ public interface Callbacks { public void onArtistSelected(Artist art); public void findArtistCover(Artist art); } private static Callbacks sDummyCallbacks = new Callbacks() { @Override public void onArtistSelected(Artist art) { } @Override public void findArtistCover(Artist art) { } }; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); modifier = new TagSetter(getActivity(), mHandler); mAdapter = new ArtistAdapter(getActivity(), new ArrayList<Artist>()); } @Override public void onAttach(Activity activity) { super.onAttach(activity); if (!(activity instanceof Callbacks)) { throw new IllegalStateException("Activity must implement fragment's callbacks."); } mCallbacks = (Callbacks) activity; } @Override public void onDetach() { super.onDetach(); mCallbacks = sDummyCallbacks; } @Override public void onResume() { super.onResume(); ((TextView)getActivity().getActionBar().getCustomView().findViewById(R.id.action_bar_title)).setText("Artists"); IntentFilter filter = new IntentFilter(CoverService.REFRESH_ARTIST); coverReceiver = new CoverBroadCastReceiver(); getActivity().registerReceiver(coverReceiver, filter); } @Override public void onPause() { super.onPause(); getActivity().unregisterReceiver(coverReceiver); coverReceiver = null; } @Override public void onSaveInstanceState(Bundle outState){ outState.putParcelableArrayList("artists", mAdapter.getAll()); super.onSaveInstanceState(outState); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { super.onCreateView(inflater, container, savedInstanceState); Log.i(TAG, "On create "); if(savedInstanceState != null && savedInstanceState.getParcelableArrayList("artists") != null) { ArrayList<Artist> savedArtists = savedInstanceState.getParcelableArrayList("artists"); mAdapter.addAll(savedArtists); } else getActivity().getLoaderManager().initLoader(LOAD_ARTIST, null, this); View rootView = inflater.inflate(R.layout.frag_artists, null); GridView gv = (GridView) rootView.findViewById(R.id.artists_gridView); gv.setAdapter(mAdapter); gv.setOnItemClickListener(this); gv.setOnItemLongClickListener(this); return rootView; } @Override public void onDestroy() { super.onDestroy(); } @Override public ArtistsLoader onCreateLoader(int id, Bundle arg1) { Log.i(TAG, "On create Lodaer AHA"); ArtistsLoader l = new ArtistsLoader(getActivity()); l.forceLoad(); return l; } @Override public void onLoadFinished(Loader<ArrayList<Artist>> loader, ArrayList<Artist> art) { if(mAdapter.isEmpty()){ mAdapter.removeAll(); mAdapter.addAll(art); mAdapter.notifyDataSetChanged(); } } @Override public void onLoaderReset(Loader<ArrayList<Artist>> arg0) { // TODO Auto-generated method stub } @Override public void onItemClick(AdapterView<?> arg0, View arg1, int pos, long arg3) { if (mActionMode != null) { ((ActionMode) mActionMode).finish(); } mCallbacks.onArtistSelected(mAdapter.getItem(pos)); } @Override public boolean onItemLongClick(AdapterView<?> arg0, View view, int pos, long arg3) { if (mActionMode != null) { return false; } artistForAction = mAdapter.getItem(pos); mActionMode = getActivity().startActionMode(mActionModeCallback); view.setSelected(true); return true; } private ActionMode.Callback mActionModeCallback = new ActionMode.Callback() { // Called when the action mode is created; startActionMode() was called public boolean onCreateActionMode(ActionMode mode, Menu menu) { // Inflate a menu resource providing context menu items MenuInflater inflater = mode.getMenuInflater(); // Assumes that you have "contexual.xml" menu resources inflater.inflate(R.menu.contextual, menu); mode.setTitle(artistForAction.getName()); return true; } public boolean onPrepareActionMode(ActionMode mode, Menu menu) { return false; // Return false if nothing is done } // Called when the user selects a contextual menu item public boolean onActionItemClicked(ActionMode mode, MenuItem item) { switch (item.getItemId()) { case R.id.refresh: if (NetworkState.isOnline(getActivity())) { mCallbacks.findArtistCover(artistForAction); } else { Toast.makeText(getActivity(), getResources().getString(R.string.no_network), Toast.LENGTH_SHORT).show(); } mode.finish(); return true; case R.id.edit: Bundle a = new Bundle(); a.putParcelable(KeyConsts.args.ARTIST, artistForAction); a.putInt(KeyConsts.args.MODE, TagsLoader.mode.artist); EditTagsDFragment f = EditTagsDFragment.newInstance(a, modifier); f.show(getFragmentManager(), ""); mode.finish(); return true; default: return false; } } // Called when the user exits the action mode public void onDestroyActionMode(ActionMode mode) { mActionMode = null; } }; private class CoverBroadCastReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { Log.i(TAG, "Received: " + intent.getAction()); if (intent.getBooleanExtra("success", false)) { artistForAction = intent.getParcelableExtra(CoverService.REFRESH_ARTIST); mAdapter.update(artistForAction); } else { Toast.makeText(getActivity(), getResources().getString(R.string.epic_fail), Toast.LENGTH_SHORT).show(); } } } }
/* * Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.identity.application.authentication.framework.handler.sequence.impl; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.application.authentication.framework.config.model.ApplicationConfig; import org.wso2.carbon.identity.application.authentication.framework.config.model.SequenceConfig; import org.wso2.carbon.identity.application.authentication.framework.context.AuthenticationContext; import org.wso2.carbon.identity.application.authentication.framework.exception.FrameworkException; import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser; import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants; import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkUtils; import org.wso2.carbon.identity.application.common.model.ClaimConfig; import org.wso2.carbon.identity.claim.metadata.mgt.ClaimMetadataHandler; import org.wso2.carbon.identity.claim.metadata.mgt.exception.ClaimMetadataException; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.user.core.UserCoreConstants; import org.wso2.carbon.user.core.util.UserCoreUtil; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import static org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants.InternalRoleDomains.APPLICATION_DOMAIN; import static org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants.InternalRoleDomains.WORKFLOW_DOMAIN; import static org.wso2.carbon.identity.core.util.IdentityUtil.getLocalGroupsClaimURI; /** * Common utility used by Default Sequence Handlers. */ public class DefaultSequenceHandlerUtils { private static Log log = LogFactory.getLog(DefaultSequenceHandlerUtils.class); private static final String SEND_ONLY_SP_MAPPED_ROLES = "SPRoleManagement.ReturnOnlyMappedLocalRoles"; private DefaultSequenceHandlerUtils() { } public static String getServiceProviderMappedUserRoles(SequenceConfig sequenceConfig, List<String> locallyMappedUserRoles) { if (log.isDebugEnabled()) { AuthenticatedUser authenticatedUser = sequenceConfig.getAuthenticatedUser(); String serviceProvider = sequenceConfig.getApplicationConfig().getApplicationName(); log.debug("Getting Service Provider mapped roles of application: " + serviceProvider + " of user: " + authenticatedUser); } // SP role mapped role values joined by Multi Attribute Separator. boolean returnOnlyMappedLocalRoles = Boolean.parseBoolean(IdentityUtil.getProperty(SEND_ONLY_SP_MAPPED_ROLES)); String spMappedRoles = null; if (CollectionUtils.isNotEmpty(locallyMappedUserRoles)) { // Get SP Role mappings Map<String, String> localToSpRoleMapping = sequenceConfig.getApplicationConfig().getRoleMappings(); List<String> spMappedRoleList = new ArrayList<>(); List<String> domainRemovedRoleList = new ArrayList<>(); // Check whether there are any SpRoleMappings if (localToSpRoleMapping != null && !localToSpRoleMapping.isEmpty()) { for (String locallyMappedRole : locallyMappedUserRoles) { if (localToSpRoleMapping.containsKey(locallyMappedRole)) { // add the SP mapped role String spMappedRole = localToSpRoleMapping.get(locallyMappedRole); spMappedRoleList.add(spMappedRole); if (log.isDebugEnabled()) { log.debug("Mapping local role: " + locallyMappedRole + " to service provider role: " + spMappedRole); } } else { // If ReturnOnlyMappedLocalRoles is false, add local role to the list. if (!returnOnlyMappedLocalRoles) { if (isRemoveUserDomainInRole(sequenceConfig)) { //if 'Use user store domain in roles' is false add the list to remove domain name. domainRemovedRoleList.add(locallyMappedRole); } else { spMappedRoleList.add(locallyMappedRole); } } } } } else { if (log.isDebugEnabled()) { log.debug("No local roles to map to Service Provider role mappings. Sending back all local roles " + "as service provider mapped roles."); } // We don't have any sp role mappings if (isRemoveUserDomainInRole(sequenceConfig)) { domainRemovedRoleList = locallyMappedUserRoles; } else { spMappedRoleList = locallyMappedUserRoles; } } //if 'Use user store domain in roles' is false remove the domain from roles. if (isRemoveUserDomainInRole(sequenceConfig)) { List<String> domainRemovedRoles = removeDomainFromNamesExcludeHybrid(domainRemovedRoleList); if (!domainRemovedRoles.isEmpty()) { spMappedRoleList.addAll(domainRemovedRoles); } } spMappedRoles = StringUtils.join(spMappedRoleList.toArray(), FrameworkUtils.getMultiAttributeSeparator()); } if (log.isDebugEnabled()) { log.debug("Service Provider Mapped Roles: " + spMappedRoles); } return spMappedRoles; } /** * Remove domain name from roles except the hybrid roles (Internal,Application & Workflow) * * @param names list of roles assigned to a user * @return list of roles assigned to a user with domain name removed from roles */ private static List<String> removeDomainFromNamesExcludeHybrid(List<String> names) { List<String> nameList = new ArrayList<String>(); for (String name : names) { String userStoreDomain = IdentityUtil.extractDomainFromName(name); if (UserCoreConstants.INTERNAL_DOMAIN.equalsIgnoreCase(userStoreDomain) || APPLICATION_DOMAIN .equalsIgnoreCase(userStoreDomain) || WORKFLOW_DOMAIN.equalsIgnoreCase(userStoreDomain)) { nameList.add(name); } else { nameList.add(UserCoreUtil.removeDomainFromName(name)); } } return nameList; } // Execute only if it has allowed removing userstore domain from the sp level configurations. private static boolean isRemoveUserDomainInRole(SequenceConfig sequenceConfig) { return !sequenceConfig.getApplicationConfig().getServiceProvider().getLocalAndOutBoundAuthenticationConfig(). isUseUserstoreDomainInRoles(); } /** * Get the roles from service provider requested claims. * After the service provider claims mappings the role claim URI could * change from the local role claim uri. This method will find the * roles based on the given role claim URI or the proper role claim URI. * * @param context AuthenticationContext. * @param sequenceConfig SequenceConfig. * @param mappedAttrs Service Provider mapped claims. * @param spRoleUri Service Provider role claim URiI. * @return Roles. * @throws FrameworkException */ public static String[] getRolesFromSPMappedClaims(AuthenticationContext context, SequenceConfig sequenceConfig, Map<String, String> mappedAttrs, String spRoleUri) throws FrameworkException { String spStandardDialect = DefaultSequenceHandlerUtils.getSPStandardDialect(context); String roleAttr = null; if (spStandardDialect != null && DefaultSequenceHandlerUtils.isLocalClaimDialect(context)) { spRoleUri = DefaultSequenceHandlerUtils.getStandardRoleClaimURI(spStandardDialect, context.getTenantDomain()); roleAttr = mappedAttrs.get(spRoleUri); } else if (spStandardDialect != null && !DefaultSequenceHandlerUtils.isLocalClaimDialect(context)) { String localClaim = DefaultSequenceHandlerUtils.getSPMappedLocalRoleClaimURI(sequenceConfig.getApplicationConfig()); spRoleUri = DefaultSequenceHandlerUtils.getStandardClaimURIFromLocal(spStandardDialect, context.getTenantDomain(), localClaim); roleAttr = mappedAttrs.get(spRoleUri); } else if (spStandardDialect == null && DefaultSequenceHandlerUtils.isLocalClaimDialect(context)) { roleAttr = mappedAttrs.get(spRoleUri); } else if (spStandardDialect == null && !DefaultSequenceHandlerUtils.isLocalClaimDialect(context)) { roleAttr = mappedAttrs.get(spRoleUri); } if (StringUtils.isNotBlank(roleAttr)) { // Need to convert multiAttributeSeparator value into a regex literal before calling // split function. Otherwise split can produce misleading results in case // multiAttributeSeparator contains regex special meaning characters like .* return roleAttr.split(Pattern.quote(FrameworkUtils.getMultiAttributeSeparator())); } return null; } /** * Get the standard claim dialect of the service provider in the * authentication context. * * @param context AuthenticationContext. * @return The claim dialect of the service provider. */ private static String getSPStandardDialect(AuthenticationContext context) { ApplicationConfig appConfig = context.getSequenceConfig().getApplicationConfig(); String spStandardDialect; if (context.getProperties().containsKey(FrameworkConstants.SP_STANDARD_DIALECT)) { spStandardDialect = (String) context.getProperty(FrameworkConstants.SP_STANDARD_DIALECT); } else { spStandardDialect = FrameworkUtils.getStandardDialect(context.getRequestType(), appConfig); } return spStandardDialect; } /** * Checks if the service provider is using the local claim dialect. * Extracts the service provider details from the authentication context. * * @param context AuthenticationContext. * @return True if the used dialect is the local dialect. */ private static boolean isLocalClaimDialect(AuthenticationContext context) { ApplicationConfig appConfig = context.getSequenceConfig().getApplicationConfig(); ClaimConfig claimConfig = appConfig.getServiceProvider().getClaimConfig(); return claimConfig.isLocalClaimDialect(); } /** * Get the standard role claim URI used for the given dialect. * * @param standardDialect Dialect URI. * @param tenantDomain Tenant domain. * @return Matching role claim uri of the given dialect. * @throws FrameworkException */ private static String getStandardRoleClaimURI(String standardDialect, String tenantDomain) throws FrameworkException { String roleClaim = getStandardClaimURIFromLocal(standardDialect, tenantDomain, getLocalGroupsClaimURI()); if (StringUtils.isBlank(roleClaim)) { return getLocalGroupsClaimURI(); } return roleClaim; } /** * Get the standard claim URI used for the given dialect. * * @param standardDialect Dialect URI. * @param tenantDomain Tenant domain. * @param claimURI Local claim URI. * @return Matching claim uri of the given dialect. * @throws FrameworkException */ private static String getStandardClaimURIFromLocal(String standardDialect, String tenantDomain, String claimURI) throws FrameworkException { try { Map<String, String> claimMapping = ClaimMetadataHandler.getInstance() .getMappingsMapFromOtherDialectToCarbon(standardDialect, null, tenantDomain, true); if (claimMapping.containsKey(claimURI)) { return claimMapping.get(claimURI); } } catch (ClaimMetadataException e) { throw new FrameworkException("Error while loading mappings.", e); } return null; } /** * Used to get the service provider mapped local role claim URI. * * @param appConfig ApplicationConfig. * @return Service Provider mapped role claim URI. */ private static String getSPMappedLocalRoleClaimURI(ApplicationConfig appConfig) { String spRoleClaimUri = appConfig.getRoleClaim(); if (StringUtils.isNotBlank(spRoleClaimUri)) { Map<String, String> spToLocalClaimMapping = appConfig.getClaimMappings(); if (MapUtils.isNotEmpty(spToLocalClaimMapping)) { for (Map.Entry<String, String> entry : spToLocalClaimMapping.entrySet()) { if (spRoleClaimUri.equals(entry.getKey())) { return entry.getValue(); } } } } return getLocalGroupsClaimURI(); } /** * Get the role claim URI of the service provider form application config. * * @param appConfig ApplicationConfig. * @return Role claim URI of the service provider. */ public static String getSpRoleClaimUri(ApplicationConfig appConfig) { // Get external identity provider role claim uri. String spRoleClaimUri = appConfig.getRoleClaim(); if (StringUtils.isEmpty(spRoleClaimUri)) { // No role claim uri defined // we can still try to find it out - lets have a look at the claim // mapping. Map<String, String> spToLocalClaimMapping = appConfig.getClaimMappings(); if (MapUtils.isNotEmpty(spToLocalClaimMapping)) { for (Map.Entry<String, String> entry : spToLocalClaimMapping.entrySet()) { if (getLocalGroupsClaimURI().equals(entry.getValue())) { spRoleClaimUri = entry.getKey(); break; } } } } if (StringUtils.isEmpty(spRoleClaimUri)) { spRoleClaimUri = getLocalGroupsClaimURI(); if (log.isDebugEnabled()) { String serviceProvider = appConfig.getApplicationName(); log.debug("Service Provider Role Claim URI not configured for SP: " + serviceProvider + ". Defaulting to " + spRoleClaimUri); } } return spRoleClaimUri; } /** * Get standard role claim uri for the given service provider role uri. * * @param context AuthenticationContext. * @param spRoleUri Service Provider role claim URI. * @param sequenceConfig SequenceConfig. * @return Standard role claim uri for the given spRoleUri. * @throws FrameworkException If an error occurred while reading standard role uri. */ public static String getStandardRoleClaimUri(AuthenticationContext context, String spRoleUri, SequenceConfig sequenceConfig) throws FrameworkException { String spStandardDialect = DefaultSequenceHandlerUtils.getSPStandardDialect(context); if (spStandardDialect != null && DefaultSequenceHandlerUtils.isLocalClaimDialect(context)) { spRoleUri = DefaultSequenceHandlerUtils.getStandardRoleClaimURI(spStandardDialect, context.getTenantDomain()); } else if (spStandardDialect != null && !DefaultSequenceHandlerUtils.isLocalClaimDialect(context)) { String localClaim = DefaultSequenceHandlerUtils.getSPMappedLocalRoleClaimURI(sequenceConfig.getApplicationConfig()); spRoleUri = DefaultSequenceHandlerUtils.getStandardClaimURIFromLocal(spStandardDialect, context.getTenantDomain(), localClaim); } return spRoleUri; } }
/** * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * This file is part of the Smart Developer Hub Project: * http://www.smartdeveloperhub.org/ * * Center for Open Middleware * http://www.centeropenmiddleware.com/ * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Copyright (C) 2015-2016 Center for Open Middleware. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Artifact : org.smartdeveloperhub.harvesters.scm:scm-harvester-backend:0.3.0 * Bundle : scm-harvester-backend-0.3.0.jar * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# */ package org.smartdeveloperhub.harvesters.scm.backend.notification; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.sameInstance; import static org.junit.Assert.fail; import java.io.IOException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import mockit.Expectations; import mockit.Mock; import mockit.MockUp; import mockit.Mocked; import mockit.integration.junit4.JMockit; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.ldp4j.commons.testing.Utils; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; import com.rabbitmq.client.ConnectionFactory; @RunWith(JMockit.class) public class ConnectionManagerTest { @Mocked private Connection connection; @Mocked private Channel channel; private ConnectionManager sut; @Before public void setUp() { this.sut=new ConnectionManager("name", "localhost", 5726, "/"); } @Test public void testConnect$failConnection$couldNotCreateConnection$IOException() throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { throw new IOException("Could not connect"); } }; try { this.sut.connect(); } catch (final ControllerException e) { assertThat(e.getMessage(),equalTo("Could not connect to broker at localhost:5726 using virtual host /")); assertThat(e.getCause(),instanceOf(IOException.class)); assertThat(e.getCause().getMessage(),equalTo("Could not connect")); } } @Test public void testConnect$failConnection$couldNotCreateConnection$TimeoutException() throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { throw new TimeoutException("Could not connect"); } }; try { this.sut.connect(); fail("Should have failed to connect"); } catch (final ControllerException e) { assertThat(e.getMessage(),equalTo("Could not connect to broker at localhost:5726 using virtual host /")); assertThat(e.getCause(),instanceOf(TimeoutException.class)); assertThat(e.getCause().getMessage(),equalTo("Could not connect")); assertThat(e.getBrokerHost(),equalTo("localhost")); assertThat(e.getBrokerPort(),equalTo(5726)); assertThat(e.getVirtualHost(),equalTo("/")); } } @Test public void testConnectIsSafe() throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock(invocations=1) public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();this.result=ConnectionManagerTest.this.channel; ConnectionManagerTest.this.connection.isOpen();this.result=true; }}; this.sut.connect(); this.sut.connect(); } @Test public void testDisconnectIsSafe() throws Exception { this.sut.disconnect(); } @Test public void testDisconnectUnlocksOnUnexpectedFailure() throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock(invocations=1) public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();this.result=ConnectionManagerTest.this.channel; ConnectionManagerTest.this.connection.isOpen();returns(true);this.result=new RuntimeException("Failure"); }}; this.sut.connect(); try { this.sut.disconnect(); fail("Should have failed to disconnect"); } catch (final RuntimeException e) { assertThat(e.getMessage(),equalTo("Failure")); } } @Test public void testDefaultChannelIsAlwaysOpen() throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();this.result=ConnectionManagerTest.this.channel; ConnectionManagerTest.this.connection.isOpen();this.result=true; ConnectionManagerTest.this.channel.isOpen();returns(false,true); }}; this.sut.connect(); assertThat(this.sut.channel().isOpen(),equalTo(true)); } @Test public void testRetrievingTheDefaultChannelMayBreak() throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();returns(ConnectionManagerTest.this.channel);this.result=new IOException("Failure"); ConnectionManagerTest.this.connection.isOpen();this.result=true; ConnectionManagerTest.this.channel.isOpen();returns(false,true); }}; this.sut.connect(); try { this.sut.channel(); fail("Should have failed to create a channel"); } catch(final ControllerException e) { assertThat(e.getMessage(),equalTo("Could not create channel using connection "+String.format("%08X",this.connection.hashCode())+" to broker at localhost:5726 using virtual host /")); assertThat(e.getCause(),instanceOf(IOException.class)); assertThat(e.getCause().getMessage(),equalTo("Failure")); assertThat(e.getBrokerHost(),equalTo("localhost")); assertThat(e.getBrokerPort(),equalTo(5726)); assertThat(e.getVirtualHost(),equalTo("/")); } } @Test public void testChannelCreationRequiresAChannelFromTheConnection() throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();this.result=null; ConnectionManagerTest.this.connection.isOpen();this.result=true; }}; try { this.sut.connect(); fail("Should have failed to connect"); } catch(final ControllerException e) { assertThat(e.getMessage(),equalTo("Could not create channel using connection "+String.format("%08X",this.connection.hashCode())+" to broker at localhost:5726 using virtual host /")); assertThat(e.getCause(),instanceOf(NullPointerException.class)); assertThat(e.getCause().getMessage(),equalTo("No channel available")); assertThat(e.getBrokerHost(),equalTo("localhost")); assertThat(e.getBrokerPort(),equalTo(5726)); assertThat(e.getVirtualHost(),equalTo("/")); } } @Test public void testThreadLocalChannelLifecycle(@Mocked final Channel currentChannel) throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();returns(ConnectionManagerTest.this.channel,currentChannel,ConnectionManagerTest.this.channel); ConnectionManagerTest.this.connection.isOpen();this.result=true; currentChannel.isOpen();this.times=1;this.result=true; currentChannel.close();this.times=1; }}; this.sut.connect(); final Channel aChannel=this.sut.currentChannel(); assertThat(aChannel,sameInstance(currentChannel)); this.sut.discardChannel(); assertThat(this.sut.currentChannel(),sameInstance(this.channel)); } @Test public void testThreadLocalChannelLifecycleIsThreadSafe() throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; final Channel currentChannel=new MockUp<Channel>() { @Mock(invocations=250) boolean isOpen() { return true; } @Mock(invocations=250) void close() { try { TimeUnit.MICROSECONDS.sleep(2500); } catch (final InterruptedException e) { } } }.getMockInstance(); new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();returns(ConnectionManagerTest.this.channel,currentChannel);this.times=251; ConnectionManagerTest.this.connection.isOpen();this.result=true; }}; this.sut.connect(); final ExecutorService executor = Executors.newFixedThreadPool(100); for(int i=0;i<500;i++) { final int times=i; executor.execute(new Runnable(){ @Override public void run() { try { if(times%2==0) { ConnectionManagerTest.this.sut.currentChannel(); } ConnectionManagerTest.this.sut.discardChannel(); } catch (final ControllerException e) { e.printStackTrace(); } }}); } executor.shutdown(); while(!executor.isTerminated()) { executor.awaitTermination(5,TimeUnit.SECONDS); } this.sut.disconnect(); } @Test public void testCurrentChannelKeepsThreadLocalChannels(@Mocked final Channel currentChannel) throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();returns(ConnectionManagerTest.this.channel,currentChannel);this.times=2; ConnectionManagerTest.this.connection.isOpen();this.result=true; }}; this.sut.connect(); assertThat(this.sut.currentChannel(),sameInstance(currentChannel)); assertThat(this.sut.currentChannel(),sameInstance(currentChannel)); } @Test public void testCurrentChannelRequiresBeingConnected() throws Exception { try { this.sut.currentChannel(); fail("Should not return a thread-local channel if not connected"); } catch (final IllegalStateException e) { assertThat(e.getMessage(),equalTo("No connection available")); } } @Test public void testDiscardChannelDoesNothingIfNoCurrentChannelIsAvailable() throws Exception { this.sut.discardChannel(); } @Test public void testCloseQuietlyObservesChannelStatus(@Mocked final Channel currentChannel) throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();returns(ConnectionManagerTest.this.channel,currentChannel); ConnectionManagerTest.this.connection.isOpen();this.result=true; currentChannel.isOpen();this.times=1;this.result=false; currentChannel.close();this.times=0; }}; this.sut.connect(); final Channel aChannel=this.sut.currentChannel(); assertThat(aChannel,sameInstance(currentChannel)); this.sut.discardChannel(); } @Test public void testCloseQuietlySwallowsRegularExceptions(@Mocked final Channel currentChannel) throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();returns(ConnectionManagerTest.this.channel,currentChannel); ConnectionManagerTest.this.connection.isOpen();this.result=true; currentChannel.isOpen();this.times=1;this.result=true; currentChannel.close();this.times=1;this.result=new IOException("Failure"); }}; this.sut.connect(); final Channel aChannel=this.sut.currentChannel(); assertThat(aChannel,sameInstance(currentChannel)); this.sut.discardChannel(); } @Test public void testCloseQuietlyFailsOnRuntimeExceptions(@Mocked final Channel currentChannel) throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();returns(ConnectionManagerTest.this.channel,currentChannel); ConnectionManagerTest.this.connection.isOpen();this.result=true; currentChannel.isOpen();this.times=1;this.result=true; currentChannel.close();this.times=1;this.result=new Error("Failure"); }}; this.sut.connect(); final Channel aChannel=this.sut.currentChannel(); assertThat(aChannel,sameInstance(currentChannel)); try { this.sut.discardChannel(); fail("Should fail on runtime exception"); } catch(final AssertionError e) { } catch (final Error e) { assertThat(e.getMessage(),equalTo("Failure")); } } @Test public void testCloseConnectionQuietlySwallowsRegularExceptions() throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();returns(ConnectionManagerTest.this.channel); ConnectionManagerTest.this.connection.isOpen();this.result=true; ConnectionManagerTest.this.channel.isOpen();this.times=1;this.result=true; ConnectionManagerTest.this.channel.close();this.times=1; ConnectionManagerTest.this.connection.close();this.result=new IOException("Failure"); }}; this.sut.connect(); this.sut.disconnect(); } @Test public void testIsConnectedIsFalseIfDisconnected() { assertThat(this.sut.isConnected(),equalTo(false)); } @Test public void testIsConnectedIsFalseIfConnectionIsNotOpen() throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();returns(ConnectionManagerTest.this.channel); ConnectionManagerTest.this.connection.isOpen();returns(true,false); }}; this.sut.connect(); assertThat(this.sut.isConnected(),equalTo(false)); } @Test public void testIsConnectedUnlocksOnConnectionFailure() throws Exception { new MockUp<ConnectionFactory>() { @Mock public void setHost(final String host) { } @Mock public void setPort(final int port) { } @Mock public void setVirtualHost(final String virtualHost) { } @Mock public void setThreadFactory(final ThreadFactory threadFactory) { } @Mock public Connection newConnection() throws IOException, TimeoutException { return ConnectionManagerTest.this.connection; } }; new Expectations() {{ ConnectionManagerTest.this.connection.createChannel();returns(ConnectionManagerTest.this.channel); ConnectionManagerTest.this.connection.isOpen();returns(true);this.result=new RuntimeException("Failure"); }}; this.sut.connect(); try { this.sut.isConnected(); } catch (final RuntimeException e) { assertThat(e.getMessage(),equalTo("Failure")); } } @Test public void testHasCustomToString() { assertThat(this.sut.toString(),not(equalTo(Utils.defaultToString(this.sut)))); } }
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright (c) 2008-2009, The KiWi Project (http://www.kiwi-project.eu) * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * - Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * - Neither the name of the KiWi Project nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * Contributor(s): * * */ package kiwi.service.interaction; import static kiwi.model.kbase.KiWiQueryLanguage.SPARQL; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; import javax.ejb.Stateless; import javax.persistence.EntityManager; import javax.persistence.NoResultException; import javax.persistence.PersistenceException; import javax.persistence.Query; import kiwi.api.entity.KiWiEntityManager; import kiwi.api.interaction.InteractionServiceLocal; import kiwi.api.interaction.InteractionServiceRemote; import kiwi.api.rating.RatingDataFacade; import kiwi.api.rating.RatingFacade; import kiwi.api.user.UserService; import kiwi.model.Constants; import kiwi.model.activity.CommentActivity; import kiwi.model.content.ContentItem; import kiwi.model.interaction.UserInteraction; import kiwi.model.user.User; import org.jboss.seam.ScopeType; import org.jboss.seam.annotations.AutoCreate; import org.jboss.seam.annotations.In; import org.jboss.seam.annotations.Logger; import org.jboss.seam.annotations.Name; import org.jboss.seam.annotations.Scope; import org.jboss.seam.log.Log; /** * @author Fred Durao * */ @Stateless @Scope(ScopeType.STATELESS) @AutoCreate @Name("interactionService") public class InteractionServiceImpl implements InteractionServiceLocal, InteractionServiceRemote { @Logger private Log log; @In private EntityManager entityManager; @In(create = true) private KiWiEntityManager kiwiEntityManager; @In private UserService userService; /* * (non-Javadoc) * * @see kiwi.api.interaction.InteractionService#getUserInteractivity(kiwi.model.user.User, * kiwi.model.user.User) */ public float computeUserInteractivity(User user1, User user2) { float userInteractivityLevel = 0f; if (user1 != null && user2 != null && user1!=user2) { userInteractivityLevel = getUserCommentInteractivity(user1, user2).size()+ getUserTagInteractivity(user1,user2).size()+ getUserEditingInteractivity(user1,user2).size() + getUserRatingInteractivity(user1,user2).size(); userInteractivityLevel = userInteractivityLevel/4; } return userInteractivityLevel; } /** * @return */ public boolean deleteUserInteractivity(){ List<UserInteraction> uss = this.listUserInteractions(); for (UserInteraction us : uss) { entityManager.remove(us); // entityManager.flush(); } return listUserInteractions().isEmpty(); } /** * computeUserSkills */ public void computeUserInteractivity(){ if (deleteUserInteractivity()) { List<String> comparisions = new ArrayList<String>(); List<User> newWsers = new ArrayList<User>(); List<User> users = userService.getAllCreatedUsers(); newWsers = users; for (User user : newWsers) { for (User user2 : newWsers) { if (user!=user2 && !comparisions.contains(user.getLogin()+user2.getLogin())) { //System.out.println("computeUserInteractivity...USER 1 "+user.getFirstName()+" USER 2 "+ user2.getFirstName()); float userInterctivityValue = computeUserInteractivity(user,user2); if (userInterctivityValue>0f) { UserInteraction userInteraction = new UserInteraction(user,user2,userInterctivityValue); kiwiEntityManager.persist(userInteraction); // kiwiEntityManager.flush(); comparisions.add(user.getLogin()+user2.getLogin()); } } } } } } /* (non-Javadoc) * @see kiwi.api.interaction.InteractionService#getUserInteractionByUser(kiwi.model.user.User) */ @SuppressWarnings("unchecked") public List<UserInteraction> getUserInteractionByUser(User user){ List<UserInteraction> result = null; String s = "select s from kiwi.model.interaction.UserInteraction s where s.user = :user"; javax.persistence.Query q = entityManager.createQuery(s); q.setParameter("user", user); try { result = (List<UserInteraction>)q.getResultList(); } catch (PersistenceException ex) { ex.printStackTrace(); log.warn("error while listing user skills: query failed"); } if (result == null) { return Collections.EMPTY_LIST; } else { return result; } } /** * @param user * @param user2 * @return */ public UserInteraction getUserInteractionByUsers(User user, User user2){ UserInteraction result = null; String s = "select s from kiwi.model.interaction.UserInteraction s where s.user = :user and s.user2 =:user2"; javax.persistence.Query q = entityManager.createQuery(s); q.setParameter("user", user); q.setParameter("user2", user2); try { result = (UserInteraction)q.getSingleResult(); } catch (NoResultException ex) { log.warn("error while listing user interactions by two users"); } catch (PersistenceException ex) { ex.printStackTrace(); log.warn("error while listing user skills: query failed"); } return result; } /* (non-Javadoc) * @see kiwi.api.interaction.InteractionService#listUserInteractions() */ @SuppressWarnings("unchecked") public List<UserInteraction> listUserInteractions(){ List<UserInteraction> result = null; String s = "from kiwi.model.interaction.UserInteraction u"; javax.persistence.Query q = entityManager.createQuery(s); try { result = (List<UserInteraction>)q.getResultList(); } catch (PersistenceException ex) { ex.printStackTrace(); log.warn("error while listing user skills: query failed"); } if(result == null ) { return Collections.EMPTY_LIST; } else { return result; } } /** * @param user1 * @param user2 * @return */ @SuppressWarnings({ "unchecked", "unchecked", "unchecked", "unused" }) private List<ContentItem> getUserTagInteractivity(User user1, User user2) { List<ContentItem> result = new LinkedList<ContentItem>(); String s = "select distinct ci " + "from kiwi.model.content.ContentItem ci left join fetch ci.resource, kiwi.model.tagging.Tag tag" + " where tag.taggedResource.id = ci.id and ci.author=:user1 and tag.taggingResource.author =:user2"; javax.persistence.Query q = entityManager.createQuery(s); q.setParameter("user1", user1); q.setParameter("user2", user2); q.setHint("org.hibernate.cacheable", true); try { result = (List<ContentItem>) q.getResultList(); } catch (PersistenceException ex) { ex.printStackTrace(); log .warn("error while listing user tag interactivity: query failed"); } if (result == null) { return Collections.EMPTY_LIST; } else { return result; } } /** * @param user1 * @param user2 * @return content items authored by user1 rated by user2 */ @SuppressWarnings({ "unused", "unchecked", "unchecked" }) private List<ContentItem> getUserRatingInteractivity(User user1, User user2) { List<ContentItem> result = new LinkedList<ContentItem>(); String s = "SELECT ?ci WHERE { " + " ?s <" + Constants.NS_RDF + "type> <"+ Constants.NS_KIWI_CORE + "RatingData> . " + " ?s <" + Constants.NS_KIWI_CORE + "author> <"+ user2.getResource() + "> . " + " ?s <" + Constants.NS_KIWI_CORE + "hasRatingFacade> ?ci . "+ " ?ci <" + Constants.NS_KIWI_CORE + "author> <"+ user1.getResource() +"> . }"; javax.persistence.Query q = kiwiEntityManager.createQuery(s, SPARQL, ContentItem.class); q.setHint("org.hibernate.cacheable", true); try { result = (List<ContentItem>) q.getResultList(); } catch (PersistenceException ex) { ex.printStackTrace(); log .warn("error while listing user Rating Interactivity: query failed"); } if (result == null) { return Collections.EMPTY_LIST; } else { return result; } } /** * @param item * @param user * @return */ private boolean isRatedByUser(ContentItem item, User user) { boolean isRated = false; final RatingFacade ratingF = kiwiEntityManager.createFacade( item, RatingFacade.class); for(RatingDataFacade ratingData:ratingF.getRatingDataFacades()){ if(user.equals(ratingData.getAuthor())){ isRated = true; } } return isRated; } /** * @param user1 * @param user2 * @return */ @SuppressWarnings({ "unused", "unchecked", "unchecked" }) private List<CommentActivity> getUserCommentInteractivity(User user1, User user2) { List<CommentActivity> result = new LinkedList<CommentActivity>(); String s = "select a " + "from CommentActivity a inner join fetch a.comment left outer join a.contentItem as cia " + "where cia.author = :user1 and a.comment.author = :user2"; Query q = entityManager .createQuery(s); q.setParameter("user1", user1); q.setParameter("user2", user2); q.setHint("org.hibernate.cacheable", true); try { result = (List<CommentActivity>) q.getResultList(); } catch (PersistenceException ex) { ex.printStackTrace(); log.warn("error while listing user: query failed"); } if (result == null) { return Collections.EMPTY_LIST; } else { return result; } } /** * @param user1 * @param user2 * @return */ @SuppressWarnings({ "unused", "unchecked", "unchecked" }) private List<ContentItem> getUserEditingInteractivity(User user1, User user2) { List<ContentItem> result = new LinkedList<ContentItem>(); String s = " select ci from ContentItem ci join fetch ci.resource left outer join fetch ci.textContent tc "+ " where ci.author.login =:user1 and tc.contentItem.author.login =:user2"; javax.persistence.Query q = entityManager.createQuery(s); q.setParameter("user1", user1.getLogin()); q.setParameter("user2", user2.getLogin()); q.setHint("org.hibernate.cacheable", true); try { result = (List<ContentItem>) q.getResultList(); } catch (PersistenceException ex) { ex.printStackTrace(); log.warn("error while listing user: query failed"); } if (result == null) { return Collections.EMPTY_LIST; } else { return result; } } }
package experiment.logAnalyzer; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import logging.ObjectLogger; import logging.logObjects.AvailabilityLog; import logging.logObjects.AvailabilityLog.State; import logging.logObjects.BandwidthLog; import logging.logObjects.ChunkGenerationLog; import logging.logObjects.ChunkPlayLog; import logging.logObjects.ChunkReceiveLog; import logging.logObjects.ChunkSendLog; import logging.logObjects.ChurnLog; import logging.logObjects.DegreeLog; import logging.logObjects.OptionLog; import logging.logObjects.SendLog; import modules.player.VideoStream; import utils.Common; import utils.Statistics; import utils.Utils; /** * stores logs of experiment runs and analyzes them * * @author Alexander Libov * */ public class LogAnalyzer implements Serializable { private static final long serialVersionUID = -3227406414017675340L; public final String dirName; public int descriptors = 1; private final Map<String /* node */, NodeInfo> nodeInfo = new TreeMap<String, NodeInfo>(); private final Map<String /* protocol */, ProtocolInfo> protocolInfo = new TreeMap<String, ProtocolInfo>(); private transient final Map<Long/* chunk */, Long/* time */> chunkGenerationTime = new TreeMap<Long, Long>(); private transient final Map<Long/* chunk */, Map<Integer/* descriptor */, Map<String/* source * node */, Set<String>/* dest * nodes */>>> chunkReceiveMap = new TreeMap<Long, Map<Integer, Map<String, Set<String>>>>(); private final List<Long> totalUploadBandwidth = new LinkedList<Long>(); private final ArrayList<Long> avgUploadBandwidth = new ArrayList<Long>(); private final ArrayList<Integer> avgUploadBandwidthCount = new ArrayList<Integer>(); private final List<Long> totalServerUploadBandwidth = new LinkedList<Long>(); private final Map<Integer/* order */, OrderInfo> orderInfo = new TreeMap<Integer, OrderInfo>(); private String serverId = null; public int maxOrder = 0; private transient String separator = "\t"; private transient String fileExtension; private long averageLag; private long averageStartupDelay; private double averageHopcount; private final Statistics averageNodeLatency = new Statistics(); private final Statistics averageNodeLatencySD = new Statistics(); private long averageLastDelay; private double averageQuality; private final Statistics continuityIndex = new Statistics(); private final Statistics simpleContinuityIndex = new Statistics(); private int maxGroup = 0; private double perfectCIpercentage; private final Statistics ZeroCI = new Statistics(); private final Statistics ZeroCITime = new Statistics(); private final Statistics continuityIndexSD = new Statistics(); private final Map<Long, SecondInfo> secondInfo = new TreeMap<Long, SecondInfo>(); private final Map<Integer, PeriodInfo> periodInfo = new TreeMap<Integer, PeriodInfo>(); private int maxOption = 0; private final Map<Integer/* alg */, Map<Long, UptimeInfo>> uptimeInfo = new TreeMap<Integer, Map<Long, UptimeInfo>>(); private final Map<Long, Long> chunkIdPlayed = new TreeMap<Long, Long>(); private int analyzerCount = 1; private transient Integer algFilter = null; // TODO each chunk creates a tree. create a visualization of that tree. public LogAnalyzer(final String dirName) { this.dirName = dirName; } public LogAnalyzer(final String dirName, final int algFilter) { this(dirName); this.algFilter = algFilter; } public LogAnalyzer(final Collection<String> analyzers, final Integer group) { String addition = ""; if (group != null) { addition += group; } dirName = Common.currentConfiguration.name + File.separator + Common.currentConfiguration.toString() + "averageOf" + analyzers.size(); final File dir = new File(dirName); dir.mkdir(); final Map<Integer/* order */, Integer/* count */> orderCount = new HashMap<Integer, Integer>(); final Map<String/* protocol */, Integer/* count */> protocolCount = new HashMap<String, Integer>(); long averageLagSum = 0; long averageStartupDelaySum = 0; double averageHopcountSum = 0; long averageLastDelaySum = 0; double averageQualitySum = 0; for (final String analyzerPath : analyzers) { LogAnalyzer analyzer = null; analyzer = LogAnalyzer.retrieve(analyzerPath + addition); maxOrder = Math.max(maxOrder, analyzer.maxOrder); maxOption = Math.max(maxOption, analyzer.maxOption); for (final Integer i : analyzer.orderInfo.keySet()) { Utils.checkExistence(orderInfo, i, new OrderInfo()); Utils.checkExistence(orderCount, i, 0); orderInfo.get(i).averageLatency += analyzer.orderInfo.get(i).averageLatency; orderInfo.get(i).count += analyzer.orderInfo.get(i).count; orderCount.put(i, orderCount.get(i) + 1); } for (final String i : analyzer.protocolInfo.keySet()) { Utils.checkExistence(protocolInfo, i, new ProtocolInfo()); Utils.checkExistence(protocolCount, i, 0); final ProtocolInfo p = protocolInfo.get(i); p.bytesSent += analyzer.protocolInfo.get(i).bytesSent; p.messagesSent += analyzer.protocolInfo.get(i).messagesSent; p.overheadBytesSent += analyzer.protocolInfo.get(i).overheadBytesSent; p.overheadMessagesSent += analyzer.protocolInfo.get(i).overheadMessagesSent; p.duplicateChunkBytesSent += analyzer.protocolInfo.get(i).duplicateChunkBytesSent; p.duplicateChunkMessagesSent += analyzer.protocolInfo.get(i).duplicateChunkMessagesSent; p.averageAverageDegree += analyzer.protocolInfo.get(i).averageAverageDegree; p.averageNodeDegreeVariance += analyzer.protocolInfo.get(i).averageNodeDegreeVariance; p.degreeVariance += analyzer.protocolInfo.get(i).degreeVariance; p.serverAverageDegree += analyzer.protocolInfo.get(i).serverAverageDegree; p.serverDegreeVariance += analyzer.protocolInfo.get(i).serverDegreeVariance; protocolCount.put(i, protocolCount.get(i) + 1); } for (final Long i : analyzer.secondInfo.keySet()) { Utils.checkExistence(secondInfo, i, new SecondInfo()); final SecondInfo si = secondInfo.get(i); si.allBitsInQueue += analyzer.secondInfo.get(i).allBitsInQueue; si.allUploadUtilization += analyzer.secondInfo.get(i).allUploadUtilization; si.serverBitsInQueue += analyzer.secondInfo.get(i).serverBitsInQueue; si.serverUploadUtilization += analyzer.secondInfo.get(i).serverUploadUtilization; si.availablenodes += analyzer.secondInfo.get(i).availablenodes; for (final State s : AvailabilityLog.State.values()) { Utils.checkExistence(si.nodesInState, s, 0); if (analyzer.secondInfo.get(i).nodesInState.containsKey(s)) { si.nodesInState.put(s, si.nodesInState.get(s) + analyzer.secondInfo.get(i).nodesInState.get(s)); } } si.latencyAvg += analyzer.secondInfo.get(i).latencyAvg; // si.chunksPlayed += analyzer.secondInfo.get(i).chunksPlayed; for (final int opt : analyzer.secondInfo.get(i).optionChosen.keySet()) { Utils.checkExistence(si.optionChosen, opt, 0); si.optionChosen.put(opt, si.optionChosen.get(opt) + analyzer.secondInfo.get(i).optionChosen.get(opt)); } si.optionChange += analyzer.secondInfo.get(i).optionChange; } for (final Integer i : analyzer.periodInfo.keySet()) { Utils.checkExistence(periodInfo, i, new PeriodInfo()); final PeriodInfo pi = periodInfo.get(i); for (final int opt : analyzer.periodInfo.get(i).optionChosen.keySet()) { Utils.checkExistence(pi.optionChosen, opt, 0); pi.optionChosen.put(opt, pi.optionChosen.get(opt) + analyzer.periodInfo.get(i).optionChosen.get(opt)); } pi.optionChange += analyzer.periodInfo.get(i).optionChange; pi.nodesStartingPeriod += analyzer.periodInfo.get(i).nodesStartingPeriod; } for (final Integer alg : analyzer.uptimeInfo.keySet()) { Utils.checkExistence(uptimeInfo, alg, new TreeMap<Long, UptimeInfo>()); for (final Long i : analyzer.uptimeInfo.get(alg).keySet()) { Utils.checkExistence(uptimeInfo.get(alg), i, new UptimeInfo()); final UptimeInfo ui = uptimeInfo.get(alg).get(i); final UptimeInfo otherUI = analyzer.uptimeInfo.get(alg).get(i); for (final String protocol : otherUI.protocolDegree.keySet()) { Utils.checkExistence(ui.protocolDegree, protocol, new Statistics()); ui.protocolDegree.get(protocol).addWeightedDatum(otherUI.protocolDegree.get(protocol).dataSize(), otherUI.protocolDegree.get(protocol).getMean()); } } } averageLagSum += analyzer.averageLag; averageStartupDelaySum += analyzer.averageStartupDelay; averageHopcountSum += analyzer.averageHopcount; averageNodeLatency.addDatum(analyzer.averageNodeLatency.getMean()); averageNodeLatencySD.addDatum(analyzer.averageNodeLatency.getStdDev()); averageQualitySum += analyzer.averageQuality; perfectCIpercentage += analyzer.perfectCIpercentage; ZeroCI.addWeightedDatum(analyzer.ZeroCI.dataSize(), analyzer.ZeroCI.getMean()); ZeroCITime.addDatum(analyzer.ZeroCITime.getMean()); continuityIndex.addDatum(analyzer.continuityIndex.getMean()); simpleContinuityIndex.addDatum(analyzer.simpleContinuityIndex.getMean()); continuityIndexSD.addDatum(analyzer.continuityIndex.getStdDev()); averageLastDelaySum += analyzer.averageLastDelay; for (final Entry<Long, Long> entry : analyzer.chunkIdPlayed.entrySet()) { Utils.checkExistence(chunkIdPlayed, entry.getKey(), 0L); chunkIdPlayed.put(entry.getKey(), chunkIdPlayed.get(entry.getKey()) + entry.getValue()); } for (final Entry<String, NodeInfo> ni : analyzer.nodeInfo.entrySet()) { if (analyzer.serverId.equals(ni.getKey())) { totalServerUploadBandwidth.add(ni.getValue().totalUsedUploadBandwidth); } else { totalUploadBandwidth.add(ni.getValue().totalUsedUploadBandwidth); analyzer.avgUploadBandwidth.add(ni.getValue().totalUsedUploadBandwidth); } } Collections.sort(analyzer.avgUploadBandwidth, Collections.reverseOrder()); if (avgUploadBandwidth.isEmpty()) { avgUploadBandwidth.addAll(analyzer.avgUploadBandwidth); for (int i = 0; i < analyzer.avgUploadBandwidth.size(); ++i) { avgUploadBandwidthCount.add(1); } } else { int i = 0; for (; i < avgUploadBandwidth.size(); i++) { if (i >= analyzer.avgUploadBandwidth.size()) { break; } avgUploadBandwidth.set(i, avgUploadBandwidth.get(i) + analyzer.avgUploadBandwidth.get(i)); avgUploadBandwidthCount.set(i, avgUploadBandwidthCount.get(i) + 1); } for (; i < analyzer.avgUploadBandwidth.size(); ++i) { avgUploadBandwidth.add(analyzer.avgUploadBandwidth.get(i)); avgUploadBandwidthCount.add(1); } } } Collections.sort(totalUploadBandwidth, Collections.reverseOrder()); Collections.sort(totalServerUploadBandwidth, Collections.reverseOrder()); for (int i = 0; i < avgUploadBandwidth.size(); i++) { avgUploadBandwidth.set(i, avgUploadBandwidth.get(i) / avgUploadBandwidthCount.get(i)); } perfectCIpercentage /= analyzers.size(); averageLag = averageLagSum / analyzers.size(); averageStartupDelay = averageStartupDelaySum / analyzers.size(); averageHopcount = averageHopcountSum / analyzers.size(); averageLastDelay = averageLastDelaySum / analyzers.size(); averageQuality = averageQualitySum / analyzers.size(); for (final Integer i : orderInfo.keySet()) { orderInfo.get(i).averageLatency /= orderCount.get(i); orderInfo.get(i).count /= orderCount.get(i); } for (final String i : protocolInfo.keySet()) { final int count = protocolCount.get(i); final ProtocolInfo pc = protocolInfo.get(i); pc.bytesSent /= count; pc.messagesSent /= count; pc.overheadBytesSent /= count; pc.overheadMessagesSent /= count; pc.duplicateChunkBytesSent /= count; pc.duplicateChunkMessagesSent /= count; pc.averageAverageDegree /= count; pc.averageNodeDegreeVariance /= count; pc.degreeVariance /= count; pc.serverAverageDegree /= count; pc.serverDegreeVariance /= count; } for (final Long i : secondInfo.keySet()) { final SecondInfo si = secondInfo.get(i); si.serverBitsInQueue /= analyzers.size(); si.serverUploadUtilization /= analyzers.size(); // si.chunksPlayed /= analyzers.size(); si.availablenodes /= analyzers.size(); si.latencyAvg /= analyzers.size(); si.allBitsInQueue /= analyzers.size(); si.allUploadUtilization /= analyzers.size(); for (final int opt : si.optionChosen.keySet()) { si.optionChosen.put(opt, si.optionChosen.get(opt) / analyzers.size()); } si.optionChange /= analyzers.size(); } for (final Integer i : periodInfo.keySet()) { final PeriodInfo pi = periodInfo.get(i); for (final int opt : pi.optionChosen.keySet()) { pi.optionChosen.put(opt, pi.optionChosen.get(opt) / analyzers.size()); } pi.optionChange /= analyzers.size(); pi.nodesStartingPeriod /= analyzers.size(); } for (final Entry<Long, Long> entry : chunkIdPlayed.entrySet()) { chunkIdPlayed.put(entry.getKey(), entry.getValue() / analyzers.size()); } analyzerCount = analyzers.size(); } public void handleLogObject(final Object m, final String logName) { if (m instanceof AvailabilityLog) { handleAvailabilityLog((AvailabilityLog) m); } else if (m instanceof ChurnLog) { handleChurnLog((ChurnLog) m); } else if (m instanceof ChunkGenerationLog) { handleChunkGenerationLog((ChunkGenerationLog) m); } else if (m instanceof ChunkPlayLog) { handleChunkPlayLog((ChunkPlayLog) m); } else if (m instanceof ChunkReceiveLog) { handleChunkReceiveLog((ChunkReceiveLog) m); } else if (m instanceof BandwidthLog) { handleBandwidthLog((BandwidthLog) m); } else if (m instanceof SendLog) { handleSendLog((SendLog) m); } else if (logName.equals("degreeLog")) { handleDegreeLog((DegreeLog) m); } else if (logName.equals("secDegreeLog")) { handleSecDegreeLog((DegreeLog) m); } else if (logName.equals("puloptlog")) { handlePullOptionLog((OptionLog) m); } else { throw new IllegalStateException("unrecognised object " + m + "in log: " + logName); } } public void analyze() { if (ObjectLogger.objLists.containsKey("availLog")) { for (final Object sid : ObjectLogger.objLists.get("availLog")) { handleAvailabilityLog((AvailabilityLog) sid); } } if (ObjectLogger.objLists.containsKey("churnLog")) { for (final Object sid : ObjectLogger.objLists.get("churnLog")) { handleChurnLog((ChurnLog) sid); } } if (ObjectLogger.objLists.containsKey("chunkGen")) { for (final Object sid : ObjectLogger.objLists.get("chunkGen")) { handleChunkGenerationLog((ChunkGenerationLog) sid); } } if (ObjectLogger.objLists.containsKey("chunkPlay")) { for (final Object sid : ObjectLogger.objLists.get("chunkPlay")) { handleChunkPlayLog((ChunkPlayLog) sid); } } if (ObjectLogger.objLists.containsKey("chunkRec")) { for (final Object sid : ObjectLogger.objLists.get("chunkRec")) { handleChunkReceiveLog((ChunkReceiveLog) sid); } } if (ObjectLogger.objLists.containsKey("bandLog")) { for (final Object sid : ObjectLogger.objLists.get("bandLog")) { handleBandwidthLog((BandwidthLog) sid); } } if (ObjectLogger.objLists.containsKey("sendlog")) { for (final Object sid : ObjectLogger.objLists.get("sendlog")) { handleSendLog((SendLog) sid); } } if (ObjectLogger.objLists.containsKey("degreeLog")) { for (final Object sid : ObjectLogger.objLists.get("degreeLog")) { handleDegreeLog((DegreeLog) sid); } } if (ObjectLogger.objLists.containsKey("secDegreeLog")) { for (final Object sid : ObjectLogger.objLists.get("secDegreeLog")) { handleSecDegreeLog((DegreeLog) sid); } } if (ObjectLogger.objLists.containsKey("puloptlog")) { for (final Object sid : ObjectLogger.objLists.get("puloptlog")) { handlePullOptionLog((OptionLog) sid); } } calculateAverageOrder(); calculateAverageLatency(); calculateAverageLatencyPerOrder(); calculateStartupDelay(); calculateDegreeVariance(); } private transient Map<String, Integer> lastOption = new HashMap<String, Integer>(); private transient Map<Integer, Set<String>> nodesInPeriod = new HashMap<Integer, Set<String>>(); private void handlePullOptionLog(final OptionLog sid) { if (algFilter != null && !algFilter.equals(sid.group)) { return; } final long second = sid.time / 1000; Utils.checkExistence(secondInfo.get(second).optionChosen, sid.chosenOption, 0); secondInfo.get(second).optionChosen.put(sid.chosenOption, secondInfo.get(second).optionChosen.get(sid.chosenOption) + 1); Utils.checkExistence(periodInfo, sid.currentPeriod, new PeriodInfo()); Utils.checkExistence(nodesInPeriod, sid.currentPeriod, new HashSet<String>()); Utils.checkExistence(periodInfo.get(sid.currentPeriod).optionChosen, sid.chosenOption, 0); if (!nodesInPeriod.get(sid.currentPeriod).contains(sid.node)) { periodInfo.get(sid.currentPeriod).optionChosen.put(sid.chosenOption, periodInfo.get(sid.currentPeriod).optionChosen.get(sid.chosenOption) + 1); periodInfo.get(sid.currentPeriod).nodesStartingPeriod++; nodesInPeriod.get(sid.currentPeriod).add(sid.node); } if (sid.chosenOption > maxOption) { maxOption = sid.chosenOption; } Utils.checkExistence(lastOption, sid.node, sid.chosenOption); if (!lastOption.get(sid.node).equals(sid.chosenOption)) { periodInfo.get(sid.currentPeriod).optionChange++; secondInfo.get(second).optionChange++; lastOption.put(sid.node, sid.chosenOption); } } private void handleSecDegreeLog(final DegreeLog sid) { if (algFilter != null && !algFilter.equals(sid.group)) { return; } final int alg = nodeInfo.get(sid.node).alg; Utils.checkExistence(uptimeInfo, alg, new TreeMap<Long, UptimeInfo>()); Utils.checkExistence(uptimeInfo.get(alg), sid.duration, new UptimeInfo()); final UptimeInfo ui = uptimeInfo.get(alg).get(sid.duration); Utils.checkExistence(ui.protocolDegree, sid.protocol, new Statistics()); ui.protocolDegree.get(sid.protocol).addDatum(sid.degree); } private void calculateDegreeVariance() { for (final ProtocolInfo protocol : protocolInfo.values()) { if (protocol.nodeDegree.isEmpty()) { continue; } final Statistics averageDegree = new Statistics(); final Statistics averageVariance = new Statistics(); for (final Statistics stat : protocol.nodeDegree.values()) { averageDegree.addDatum(stat.getMean()); averageVariance.addDatum(stat.getVariance()); } protocol.averageAverageDegree = averageDegree.getMean(); protocol.averageNodeDegreeVariance = averageVariance.getMean(); protocol.degreeVariance = averageDegree.getVariance(); protocol.serverAverageDegree = protocol.serverStats.getMean(); protocol.serverDegreeVariance = protocol.serverStats.getVariance(); } } private void handleDegreeLog(final DegreeLog sid) { if (algFilter != null && !algFilter.equals(sid.group)) { return; } Utils.checkExistence(protocolInfo, sid.protocol, new ProtocolInfo()); final ProtocolInfo info = protocolInfo.get(sid.protocol); final long lifeTime = nodeInfo.get(sid.node.toString()).leaveTime - nodeInfo.get(sid.node.toString()).joinTime; if (sid.node.toString().equals(serverId)) { info.serverStats.addWeightedDatum(((double) sid.duration / lifeTime), sid.degree); return; } try { Utils.checkExistence(info.nodeDegree, sid.node.toString(), new Statistics()); } catch (final Throwable t) { t.printStackTrace(); } info.nodeDegree.get(sid.node.toString()).addWeightedDatum(((double) sid.duration / lifeTime), sid.degree); } private void handleBandwidthLog(final BandwidthLog sid) { if (algFilter != null && !algFilter.equals(sid.group)) { return; } nodeInfo.get(sid.node).totalUsedUploadBandwidth += sid.usedBandwidth; final long second = sid.time / 1000; Utils.checkExistence(secondInfo, second, new SecondInfo()); if (sid.node.toString().equals(serverId)) { secondInfo.get(second).serverBitsInQueue = sid.bitsInWaitQueue; secondInfo.get(second).serverUploadUtilization = sid.usedBandwidth; } else { secondInfo.get(second).allBitsInQueue += sid.bitsInWaitQueue; secondInfo.get(second).allUploadUtilization += sid.usedBandwidth; secondInfo.get(second).availablenodes++; } } private void calculateStartupDelay() { final Statistics lagStats = new Statistics(); final Statistics delayStats = new Statistics(); final Statistics lastDelayStats = new Statistics(); final Statistics avgHopcountStats = new Statistics(); for (final String node : nodeInfo.keySet()) { final NodeInfo info = nodeInfo.get(node); if (info.chunkInfo.isEmpty()) { continue; } final long firstChunkPlayed = info.chunkInfo.keySet().iterator().next(); info.startupDelay = info.chunkInfo.get(firstChunkPlayed).playTime - Math.max(info.joinTime - Utils.movieStartTime, chunkGenerationTime.get(VideoStream.startingFrame)); info.lag = info.chunkInfo.get(info.lastChunkPlayed).playLatency - info.chunkInfo.get(firstChunkPlayed).playLatency; info.lastDelay = info.leaveTime - Utils.movieStartTime - info.chunkInfo.get(info.lastChunkPlayed).playTime; if (info.startupDelay < 0) { // throw new RuntimeException("illegal results! for node " + node + // ": startupDelay = " + info.startupDelay); System.out.println("illegal results! for node " + node + ": startupDelay = " + info.startupDelay); } lagStats.addDatum(info.lag, node); delayStats.addDatum(info.startupDelay, node); lastDelayStats.addDatum(info.lastDelay, node); if (info.chunkOrderAvg != -1) { avgHopcountStats.addDatum(info.chunkOrderAvg, node); } if (info.lastDelay > 10000) { printNodeInfo(node); } } averageHopcount = avgHopcountStats.getMean(); averageLag = (long) lagStats.getMean(); averageStartupDelay = (long) delayStats.getMean(); averageLastDelay = (long) lastDelayStats.getMean(); if (avgHopcountStats.dataSize() == 1.0 || avgHopcountStats.getMaxItem() == null) { throw new RuntimeException("no chunks were delivered!!"); } System.err.print("max hopcount: "); printNodeInfo(avgHopcountStats.getMaxItem()); System.err.print("max lag: "); printNodeInfo(lagStats.getMaxItem()); System.err.print("max delay: "); printNodeInfo(delayStats.getMaxItem()); System.err.print("max lastDelay: "); printNodeInfo(lastDelayStats.getMaxItem()); } public void printFullnodeInfo() { for (final String node : nodeInfo.keySet()) { printNodeInfo(node); } } private void printNodeInfo(final String node) { final NodeInfo info = nodeInfo.get(node); if (!info.chunkInfo.isEmpty()) { final long firstChunkPlayed = info.chunkInfo.keySet().iterator().next(); System.err.print("node " + node + " startup delay: " + info.startupDelay + ", startup latency: " + info.chunkInfo.get(firstChunkPlayed).playLatency + " , lag: " + info.lag); System.err.println(" last delay: " + info.lastDelay + " avg hopcount: " + info.chunkOrderAvg + " up from " + info.joinTime + " to " + info.leaveTime + " (" + (info.leaveTime - info.joinTime) + " ms)"); } else { System.err.println("node " + node + " didn't play any chunks! and was up from " + info.joinTime + " to " + info.leaveTime + " (" + (info.leaveTime - info.joinTime) + " ms)"); } } private void handleChurnLog(final ChurnLog sid) { if (algFilter != null && !algFilter.equals(sid.group)) { return; } if (sid.joined) { Utils.checkExistence(nodeInfo, sid.node, new NodeInfo()); nodeInfo.get(sid.node).joinTime = sid.time; nodeInfo.get(sid.node).leaveTime = Long.MAX_VALUE; nodeInfo.get(sid.node).alg = sid.alg; if (sid.alg > maxGroup) { maxGroup = sid.alg; } nodeInfo.get(sid.node).startupBuffer = sid.startupBuffer; nodeInfo.get(sid.node).cycle = sid.cycle; nodeInfo.get(sid.node).bufferFromFirstChunk = sid.bufferFromFirstChunk; } else { nodeInfo.get(sid.node.toString()).leaveTime = sid.time; } } public void writeLatencyPerOrder(final String filename) { try { final FileWriter fw = new FileWriter(new File(dirName + File.separator + filename + "." + fileExtension)); fw.write("#per order\n"); fw.write("#hopCount" + separator + "average latency" + separator + "number of chunks\n"); for (int i = 0; i < maxOrder + 1; i++) { final OrderInfo info = orderInfo.get(i); fw.write(i + separator + info.averageLatency + separator + info.count + "\n"); } fw.close(); } catch (final IOException e) { e.printStackTrace(); } } public void writeDataUsage(final String filename) { try { final FileWriter fw = new FileWriter(new File(dirName + File.separator + filename + "." + fileExtension)); fw.write("#data usage\n"); fw.write("#protocol" + separator + "overhead messages" + separator + "overall messages" + separator + "overhead bytes" + separator + "overall bytes" + separator); fw.write("averageAverageDegree" + separator + "averageNodeDegreeVariance" + separator + "degreeVariance" + separator + "serverAverageDegree" + separator + "serverDegreeVariance\n"); for (final String prot : protocolInfo.keySet()) { final ProtocolInfo info = protocolInfo.get(prot); fw.write(prot + separator + info.overheadMessagesSent + separator + info.duplicateChunkMessagesSent + separator + info.messagesSent); fw.write(separator + info.overheadBytesSent + separator + info.duplicateChunkBytesSent + separator + info.bytesSent); fw.write(separator + info.averageAverageDegree + separator + info.averageNodeDegreeVariance); fw.write(separator + info.degreeVariance + separator + info.serverAverageDegree + separator + info.serverDegreeVariance + "\n"); } fw.close(); } catch (final IOException e) { e.printStackTrace(); } } public void writeSecondInfo(final String filename) { try { final FileWriter fw = new FileWriter(new File(dirName + File.separator + filename + "." + fileExtension)); fw.write("#second info\n"); fw.write("#movie second" + separator + "server Upload Utilization" + separator + "server Bits In Queue" + separator + "all Upload Utilization" + separator + "all Bits In Queue" + separator + "available nodes" + separator + "latency"); for (final State s : State.values()) { fw.write(separator + s.name()); } for (int i = 0; i <= maxOption; ++i) { fw.write(separator + "option" + i); } fw.write(separator + "option change"); fw.write("\n"); for (final Long i : secondInfo.keySet()) { final SecondInfo info = secondInfo.get(i); if (info.availablenodes == 0) { continue; } double latency = -1; if (info.nodesInState.get(State.PLAYING) > 0) { latency = info.latencyAvg / info.nodesInState.get(State.PLAYING); } fw.write(i + separator + info.serverUploadUtilization + separator + info.serverBitsInQueue); fw.write(separator + info.allUploadUtilization / info.availablenodes + separator + info.allBitsInQueue / info.availablenodes); fw.write(separator + info.availablenodes); fw.write(separator + latency); for (final State s : State.values()) { fw.write(separator + info.nodesInState.get(s)); } for (int j = 0; j <= maxOption; ++j) { Integer num = info.optionChosen.get(j); if (num == null) { num = 0; } fw.write(separator + ((double) num) / info.availablenodes); } fw.write(separator + info.optionChange); fw.write("\n"); } fw.close(); } catch (final IOException e) { e.printStackTrace(); } } public void writePeriodInfo(final String filename) { try { final FileWriter fw = new FileWriter(new File(dirName + File.separator + filename + "." + fileExtension)); fw.write("#period info\n"); fw.write("#period" + separator + "available nodes"); for (int i = 0; i <= maxOption; ++i) { fw.write(separator + "option" + i); } fw.write(separator + "option change"); fw.write("\n"); for (final Integer i : periodInfo.keySet()) { final PeriodInfo info = periodInfo.get(i); if (info.nodesStartingPeriod == 0) { continue; } fw.write(i + separator + info.nodesStartingPeriod); for (int j = 0; j <= maxOption; ++j) { Integer num = info.optionChosen.get(j); if (num == null) { num = 0; } fw.write(separator + ((double) num) / info.nodesStartingPeriod); } fw.write(separator + info.optionChange); fw.write("\n"); } fw.close(); } catch (final IOException e) { e.printStackTrace(); } } public void writeUploadBandwidth(final String filename) { try { final FileWriter fw = new FileWriter(new File(dirName + File.separator + filename + ".dat")); fw.write("#UploadBandwidth\n"); for (final Long i : totalUploadBandwidth) { fw.write(i + "\n"); } fw.close(); } catch (final IOException e) { e.printStackTrace(); } } public void writeAvgUploadBandwidth(final String filename) { try { final FileWriter fw = new FileWriter(new File(dirName + File.separator + filename + ".dat")); fw.write("#AvgUploadBandwidth\n"); for (final Long i : avgUploadBandwidth) { fw.write(i + "\n"); } fw.close(); } catch (final IOException e) { e.printStackTrace(); } } public void writeServerUploadBandwidth(final String filename) { try { final FileWriter fw = new FileWriter(new File(dirName + File.separator + filename + ".dat")); fw.write("#ServerUploadBandwidth\n"); for (final Long i : totalServerUploadBandwidth) { fw.write(i + "\n"); } fw.close(); } catch (final IOException e) { e.printStackTrace(); } } public void writeUptimeInfo(final String filename) { for (final Integer alg : uptimeInfo.keySet()) { try { final FileWriter fw = new FileWriter(new File(dirName + File.separator + filename + alg + "." + fileExtension)); fw.write("#uptime info\n"); fw.write("#uptime second"); for (final String prot : uptimeInfo.get(alg).get(1L).protocolDegree.keySet()) { fw.write(separator + prot + "degree"); } fw.write(separator + "count\n"); for (final Long i : uptimeInfo.get(alg).keySet()) { final UptimeInfo info = uptimeInfo.get(alg).get(i); fw.write(i + separator); double count = 0; for (final Statistics degree : info.protocolDegree.values()) { fw.write(degree.getMean() + separator); count = degree.dataSize(); } fw.write(count / analyzerCount + "\n"); } fw.close(); } catch (final IOException e) { e.printStackTrace(); } } } public void writechunkIdInfo(final String filename) { try { final FileWriter fw = new FileWriter(new File(dirName + File.separator + filename + "." + fileExtension)); fw.write("#chunkID info\n"); fw.write("#chunk ID" + separator + "total played\n"); for (final Long i : chunkIdPlayed.keySet()) { final Long info = chunkIdPlayed.get(i); fw.write(i + separator + info + "\n"); } fw.close(); } catch (final IOException e) { e.printStackTrace(); } } /* public void printSecondInfo() { for (final Long i : secondInfo.keySet()) { * final SecondInfo info = secondInfo.get(i); System.out.print("second: " + i * + " server sent: " + info.serverUploadUtilization + " server delayed: " + * info.serverBitsInQueue); System.out.print(" all on average sent: " + * info.allUploadUtilization / info.availablenodes + " on average delayed: " + * info.allBitsInQueue / info.availablenodes); * System.out.println(" chunks played: " + info.chunksPlayed + * " available nodes: " + info.availablenodes); } } */ public void printAverageDataSent() { for (final String prot : protocolInfo.keySet()) { final ProtocolInfo info = protocolInfo.get(prot); System.out.println("Protocol " + prot + " sent " + info.overheadMessagesSent + " overhead messages and " + info.duplicateChunkMessagesSent + " duplicate chunk messages out of " + info.messagesSent); System.out.println("Protocol " + prot + " sent " + info.overheadBytesSent + " overhead bytes and " + info.duplicateChunkBytesSent + " duplicate chunk bytes out of " + info.bytesSent); } } public void printOverall() { System.out.println("averageLag: " + averageLag); System.out.println("averageStartupDelay: " + averageStartupDelay); System.out.println("averageHopcount: " + averageHopcount); System.out.println("averageLatency: " + averageNodeLatency.getMean()); System.out.println("averageLatencySD: " + averageNodeLatencySD.getMean()); System.out.println("averageLastDelay: " + averageLastDelay); System.out.println("averageQuality: " + averageQuality); System.out.println("perferctCIpercentage: " + perfectCIpercentage); System.out.println("zeroCI: " + ZeroCI.getMean()); System.out.println("zeroCItime: " + ZeroCITime.getMean()); System.out.println("continuityIndex: " + continuityIndex.getMean()); System.out.println("simpleContinuityIndex: " + simpleContinuityIndex.getMean()); System.out.println("continuityIndexSD: " + continuityIndexSD.getMean()); if (protocolInfo.containsKey("_overall")) { final long overhead = protocolInfo.get("_overall").overheadBytesSent; final long duplicate = protocolInfo.get("_overall").duplicateChunkBytesSent; final long all = protocolInfo.get("_overall").bytesSent; System.out.println("overheadBitsSent: " + overhead); System.out.println("totalBitsSent: " + all); System.out.println("overhead/vital: " + ((double) overhead) / all); System.out.println("duplicate/vital: " + ((double) duplicate) / all); } } public void writeOverall(final String filename) { try { final FileWriter fw = new FileWriter(new File(dirName + File.separator + filename + "." + fileExtension)); fw.write("#overall\n"); fw.write("averageLag" + separator + averageLag + "\n"); fw.write("averageStartupDelay" + separator + averageStartupDelay + "\n"); fw.write("averageHopcount" + separator + averageHopcount + "\n"); fw.write("averageLatency" + separator + averageNodeLatency.getMean() + "\n"); fw.write("averageLatencySD" + separator + averageNodeLatencySD.getMean() + "\n"); fw.write("averageLastDelay" + separator + averageLastDelay + "\n"); fw.write("averageQuality" + separator + averageQuality + "\n"); fw.write("perferctCIpercentage" + separator + perfectCIpercentage + "\n"); fw.write("zeroCI" + separator + ZeroCI.getMean() + "\n"); fw.write("zeroCITime" + separator + ZeroCITime.getMean() + "\n"); fw.write("continuityIndex" + separator + continuityIndex.getMean() + "\n"); fw.write("simpleContinuityIndex" + separator + simpleContinuityIndex.getMean() + "\n"); fw.write("continuityIndexSD" + separator + continuityIndexSD.getMean() + "\n"); if (protocolInfo.containsKey("_overall")) { final long overhead = protocolInfo.get("_overall").overheadBytesSent; final long duplicate = protocolInfo.get("_overall").duplicateChunkBytesSent; final long all = protocolInfo.get("_overall").bytesSent; fw.write("overheadBitsSent" + separator + overhead + "\n"); fw.write("totalBitsSent" + separator + all + "\n"); fw.write("overhead/all" + separator + ((double) overhead) / all + "\n"); fw.write("duplicate/all" + separator + ((double) duplicate) / all + "\n"); fw.close(); } } catch (final IOException e) { e.printStackTrace(); } } // TODO not works for MDC private transient HashMap<String, Set<Long>> receivedChunks = new HashMap<String, Set<Long>>(); private void handleSendLog(final SendLog sid) { if (algFilter != null && !algFilter.equals(sid.group)) { return; } Utils.checkExistence(nodeInfo, sid.node, new NodeInfo()); final NodeInfo sendingNodeInfo = nodeInfo.get(sid.node); sendingNodeInfo.messagesSent++; sendingNodeInfo.bytesSent += sid.messageSize; final String messageFullID = sid.messageTag + "-" + sid.messageType; Utils.checkExistence(protocolInfo, messageFullID, new ProtocolInfo()); Utils.checkExistence(protocolInfo, sid.messageTag, new ProtocolInfo()); Utils.checkExistence(protocolInfo, "_overall", new ProtocolInfo()); protocolInfo.get(messageFullID).messagesSent++; protocolInfo.get(messageFullID).bytesSent += sid.messageSize; protocolInfo.get(sid.messageTag).messagesSent++; protocolInfo.get(sid.messageTag).bytesSent += sid.messageSize; protocolInfo.get("_overall").messagesSent++; protocolInfo.get("_overall").bytesSent += sid.messageSize; if (sid.isOverhead) { sendingNodeInfo.overheadMessagesSent++; sendingNodeInfo.overheadBytesSent += sid.messageSize; protocolInfo.get("_overall").overheadMessagesSent++; protocolInfo.get("_overall").overheadBytesSent += sid.messageSize; protocolInfo.get(messageFullID).overheadMessagesSent++; protocolInfo.get(messageFullID).overheadBytesSent += sid.messageSize; protocolInfo.get(sid.messageTag).overheadMessagesSent++; protocolInfo.get(sid.messageTag).overheadBytesSent += sid.messageSize; } if (sid instanceof ChunkSendLog) { Utils.checkExistence(receivedChunks, sid.receiveingNode, new HashSet<Long>()); final long id = ((ChunkSendLog) sid).chunkId; if (receivedChunks.get(sid.receiveingNode).contains(id)) { sendingNodeInfo.duplicateChunkMessagesSent++; sendingNodeInfo.duplicateChunkBytesSent += sid.messageSize; protocolInfo.get("_overall").duplicateChunkMessagesSent++; protocolInfo.get("_overall").duplicateChunkBytesSent += sid.messageSize; protocolInfo.get(messageFullID).duplicateChunkMessagesSent++; protocolInfo.get(messageFullID).duplicateChunkBytesSent += sid.messageSize; protocolInfo.get(sid.messageTag).duplicateChunkMessagesSent++; protocolInfo.get(sid.messageTag).duplicateChunkBytesSent += sid.messageSize; } receivedChunks.get(sid.receiveingNode).add(id); } } private void calculateAverageLatencyPerOrder() { final double latencySum[] = new double[maxOrder + 1]; final int latencyCount[] = new int[maxOrder + 1]; for (int i = 0; i < maxOrder + 1; i++) { latencyCount[i] = 0; latencySum[i] = 0; } for (final String node : nodeInfo.keySet()) { final NodeInfo info = nodeInfo.get(node); for (final long chunk : info.chunkInfo.keySet()) { final NodeChunkInfo nci = info.chunkInfo.get(chunk); final double latency = nci.playLatency; int order = 0; if (node.equals(serverId)) { order = 0; } else { for (final int ord : nci.descToOrder.values()) { order += ord; } if (nci.descToOrder.size() > 0) { order /= nci.descToOrder.size(); } } if (order == -1) { System.err.println("node " + node + " has no order for chunk " + chunk); } else { latencySum[order] += latency; latencyCount[order]++; } } } for (int i = 0; i < maxOrder + 1; i++) { final OrderInfo info = new OrderInfo(); if (latencyCount[i] > 0) { info.averageLatency = latencySum[i] / latencyCount[i]; info.count = latencyCount[i]; } else { info.averageLatency = 0; info.count = 0; } orderInfo.put(i, info); } } public void printAverageLatencyPerOrder() { for (int i = 0; i < maxOrder + 1; i++) { final OrderInfo info = orderInfo.get(i); System.out.println("average latency per order " + i + ": " + info.averageLatency + ", for " + info.count + " chunks"); } } private void calculateAverageOrder() { for (final long chunk : chunkReceiveMap.keySet()) { for (int i = 0; i < descriptors; ++i) { final Map<String/* source node */, Set<String>/* dest nodes */> chunkMap = chunkReceiveMap.get(chunk).get(i); if (chunkMap == null) { System.err.println("chunk " + chunk + ", descriptor " + i + " has no chunkReceiveMap!"); continue; } populateChunkOrderMap(chunk, i, serverId, chunkMap, 0, new HashSet<String>()); } } for (final String node : nodeInfo.keySet()) { final NodeInfo info = nodeInfo.get(node); Double avg = 0.0; int count = 0; for (final NodeChunkInfo chunkInfo : info.chunkInfo.values()) { for (final Integer order : chunkInfo.descToOrder.values()) { count++; avg += order; maxOrder = Math.max(maxOrder, order); } } avg /= count; if (count == 0) { info.chunkOrderAvg = -1; System.err.println("node " + node + " has no chunkInfo!"); } else { info.chunkOrderAvg = avg; } } } private void populateChunkOrderMap(final long chunk, final int desc, final String nodeId, final Map<String, Set<String>> chunkMap, final int order, final HashSet<String> visited) { if (nodeInfo.get(nodeId) != null) { final NodeChunkInfo ci = nodeInfo.get(nodeId).chunkInfo.get(chunk); if (ci != null) { Utils.checkExistence(ci.descToOrder, desc, order); ci.descToOrder.put(desc, Math.min(order, ci.descToOrder.get(desc))); } } if (!chunkMap.containsKey(nodeId)) { return; } final Set<String> children = chunkMap.get(nodeId); visited.add(nodeId); for (final String child : children) { if (!visited.contains(child)) { populateChunkOrderMap(chunk, desc, child, chunkMap, order + 1, visited); } } visited.remove(nodeId); } private void handleAvailabilityLog(final AvailabilityLog sid) { if (serverId == null && sid.state == State.SERVER) { serverId = sid.node; System.out.println("server id is: " + serverId); } if (algFilter != null && !algFilter.equals(sid.group)) { return; } final long second = sid.movieTime / 1000; Utils.checkExistence(secondInfo, second, new SecondInfo()); Utils.checkExistence(secondInfo.get(second).nodesInState, sid.state, 0); secondInfo.get(second).nodesInState.put(sid.state, secondInfo.get(second).nodesInState.get(sid.state) + 1); Utils.checkExistence(nodeInfo, sid.node, new NodeInfo()); final NodeInfo ni = nodeInfo.get(sid.node); if (sid.state == State.PLAYING) { secondInfo.get(second).latencyAvg += sid.latency; } if (ni.playSeconds == 0 && sid.played == 0.0) { return; } ni.playSeconds++; ni.ci += sid.played / sid.playbackSpeed; } private void handleChunkReceiveLog(final ChunkReceiveLog sid) { final long index = sid.index; Utils.checkExistence(chunkReceiveMap, index, new TreeMap<Integer, Map<String, Set<String>>>()); final Map<Integer, Map<String, Set<String>>> descMap = chunkReceiveMap.get(index); for (final Integer desc : sid.descriptors) { Utils.checkExistence(descMap, desc, new TreeMap<String, Set<String>>()); final Map<String, Set<String>> recMap = descMap.get(desc); Utils.checkExistence(recMap, sid.sourceNode, new TreeSet<String>()); recMap.get(sid.sourceNode).add(sid.destinationNode); } if (descriptors - 1 < sid.descriptors.last()) { descriptors = sid.descriptors.last() + 1; } } private void calculateAverageLatency() { double overallAverageQuality = 0; int overallCount = 0; perfectCIpercentage = 0.0; for (final String node : nodeInfo.keySet()) { final NodeInfo info = nodeInfo.get(node); long firstChunk = -1; long count = 0; long delay = 0; double quality = 0; for (final long chunk : info.chunkInfo.keySet()) { final NodeChunkInfo chunkInfo = info.chunkInfo.get(chunk); if (firstChunk == -1) { firstChunk = chunk; } count++; delay += chunkInfo.playLatency; quality += chunkInfo.quality; } final long availablePlayTime = info.leaveTime - Math.max(info.joinTime, chunkGenerationTime.get(VideoStream.startingFrame)) - info.startupBuffer; simpleContinuityIndex.addWeightedDatum(availablePlayTime, (double) info.playSeconds * info.cycle / (availablePlayTime)); if (info.playSeconds > 0L) { info.ci /= info.playSeconds; if (node.equals(serverId)) { continue; } continuityIndex.addWeightedDatum(info.playSeconds, info.ci); if (info.ci < 1.0) { System.err.println("node " + node + " has <1 continuity index: " + info.ci + " play seconds: " + info.playSeconds); } else { perfectCIpercentage++; } if (info.ci > 1) { System.err.println("more chunks than possible for " + node + ": " + info.ci + " playSeconds: " + info.playSeconds); } } if (count > 0) { ZeroCI.addDatum(0); averageNodeLatency.addDatum(delay / count); overallAverageQuality += quality / count; overallCount++; // System.out.println("node " + node + " played "+count // +" chunks, from chunk: " + firstChunk+". average latency: " + // delay/count +" order avg: " + info.chunkOrderAvg); } else { ZeroCI.addDatum(1); ZeroCITime.addDatum(info.leaveTime - info.joinTime); System.err.println("node " + node + " didn't play any chunks! and was up from " + info.joinTime + " to " + info.leaveTime + " (" + (info.leaveTime - info.joinTime) + " ms)"); } } perfectCIpercentage /= continuityIndex.getCount(); continuityIndexSD.addDatum(continuityIndex.getStdDev()); averageNodeLatencySD.addDatum(averageNodeLatency.getStdDev()); averageQuality = overallAverageQuality / overallCount; } private void handleChunkPlayLog(final ChunkPlayLog sid) { if (algFilter != null && !algFilter.equals(sid.group)) { return; } final NodeInfo info = nodeInfo.get(sid.node); final NodeChunkInfo chunkInfo = new NodeChunkInfo(); chunkInfo.playTime = sid.time; chunkInfo.quality = sid.quality; if (!chunkGenerationTime.containsKey(sid.chunkIndex)) { System.err.println("no generation time for " + sid.chunkIndex); return; } final long generationTime = chunkGenerationTime.get(sid.chunkIndex); chunkInfo.playLatency = sid.time - generationTime; info.chunkInfo.put(sid.chunkIndex, chunkInfo); info.lastChunkPlayed = sid.chunkIndex; Utils.checkExistence(chunkIdPlayed, sid.chunkIndex, 0L); chunkIdPlayed.put(sid.chunkIndex, chunkIdPlayed.get(sid.chunkIndex) + 1); } private void handleChunkGenerationLog(final ChunkGenerationLog sid) { chunkGenerationTime.put(sid.index, sid.generationTime); } public void setCSV() { separator = ","; fileExtension = "csv"; } public void setTSV() { separator = "\t"; fileExtension = "tsv"; } public void store() { FileOutputStream fstream = null; ObjectOutputStream out = null; try { fstream = new FileOutputStream(getStoredName()); out = new ObjectOutputStream(fstream); out.writeObject(this); } catch (final IOException e) { e.printStackTrace(); } finally { if (fstream != null) { try { fstream.close(); } catch (final IOException e) { e.printStackTrace(); } } if (out != null) { try { out.close(); } catch (final IOException e) { e.printStackTrace(); } } } } public String getStoredName() { if (algFilter == null) { return dirName + File.separator + "LA"; } return dirName + File.separator + "LA" + algFilter; } static public LogAnalyzer retrieve(final String path) { FileInputStream fis = null; ObjectInputStream ois = null; try { fis = new FileInputStream(path); ois = new ObjectInputStream(fis); return (LogAnalyzer) ois.readObject(); } catch (final Throwable e) { e.printStackTrace(); throw new RuntimeException(e); } finally { try { if (fis != null) { fis.close(); } if (ois != null) { ois.close(); } } catch (final IOException e) { e.printStackTrace(); } } } public void writeConfigfile() { try { final FileWriter fw = new FileWriter(new File(dirName + File.separator + "config.xml")); fw.write(Common.currentConfiguration.toXml()); fw.close(); } catch (final IOException e) { e.printStackTrace(); } } public Map<String, NodeInfo> getNodeInfo() { return nodeInfo; } public static void main(final String[] args) { final LogAnalyzer LA = new LogAnalyzer("collection"); for (final String dir : args) { ObjectLogger.addGatheredDir(new File(dir)); } // ObjectLogger.useFiles = true; ObjectLogger.readFromGatheredDirs(); LA.analyze(); LA.printAverageLatencyPerOrder(); LA.printOverall(); final int groups = LA.getGroupsNum(); if (groups < 1) { return; } if (groups > 1) { for (int i = 0; i < groups; i++) { final LogAnalyzer groupLA = new LogAnalyzer("collection", i); groupLA.analyze(); groupLA.printAverageLatencyPerOrder(); groupLA.printOverall(); } } } private int getGroupsNum() { return maxGroup + 1; } public double getCI() { return continuityIndex.getMean(); } }
/* Copyright 2013, 2020 Nationale-Nederlanden, 2020, 2021 WeAreFrank! Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package nl.nn.adapterframework.pipes; import org.apache.commons.lang3.StringUtils; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import nl.nn.adapterframework.configuration.ConfigurationException; import nl.nn.adapterframework.configuration.ConfigurationWarning; import nl.nn.adapterframework.core.ParameterException; import nl.nn.adapterframework.core.PipeLineSession; import nl.nn.adapterframework.core.PipeRunException; import nl.nn.adapterframework.core.PipeRunResult; import nl.nn.adapterframework.doc.IbisDoc; import nl.nn.adapterframework.parameters.ParameterList; import nl.nn.adapterframework.parameters.ParameterValue; import nl.nn.adapterframework.parameters.ParameterValueList; import nl.nn.adapterframework.stream.Message; import nl.nn.adapterframework.util.XmlUtils; /** * Pipe that compares lexicographically two strings. * * @ff.parameter operand1 The first operand, holds v1. Defaults to input message * @ff.parameter operand2 The second operand, holds v2. Defaults to input message * @ff.parameter ignorepatterns (optional) contains a xml table with references to substrings which have to be ignored during the comparison. This xml table has the following layout: * <br/><code><pre> * &lt;ignores&gt; * &lt;ignore&gt; * &lt;after&gt;...&lt;/after&gt; * &lt;before&gt;...&lt;/before&gt; * &lt;/ignore&gt; * &lt;ignore&gt; * &lt;after&gt;...&lt;/after&gt; * &lt;before&gt;...&lt;/before&gt; * &lt;/ignore&gt; * &lt;/ignores&gt; * </pre></code><br/>Substrings between "after" and "before" are ignored * * @ff.forward lessthan operand1 &lt; operand2 * @ff.forward greaterthan operand1 &gt; operand2 * @ff.forward equals operand1 = operand2 * * @author Peter Leeuwenburgh */ public class CompareStringPipe extends AbstractPipe { private static final String LESSTHANFORWARD = "lessthan"; private static final String GREATERTHANFORWARD = "greaterthan"; private static final String EQUALSFORWARD = "equals"; private static final String OPERAND1 = "operand1"; private static final String OPERAND2 = "operand2"; private static final String IGNOREPATTERNS = "ignorepatterns"; private String sessionKey1 = null; private String sessionKey2 = null; private boolean xml = false; @Override public void configure() throws ConfigurationException { super.configure(); if (null == findForward(LESSTHANFORWARD)) throw new ConfigurationException("forward [" + LESSTHANFORWARD + "] is not defined"); if (null == findForward(GREATERTHANFORWARD)) throw new ConfigurationException("forward [" + GREATERTHANFORWARD + "] is not defined"); if (null == findForward(EQUALSFORWARD)) throw new ConfigurationException("forward [" + EQUALSFORWARD + "] is not defined"); if (StringUtils.isEmpty(sessionKey1) && StringUtils.isEmpty(sessionKey2)) { ParameterList parameterList = getParameterList(); if (parameterList.findParameter(OPERAND1) == null && parameterList.findParameter(OPERAND2) == null) { throw new ConfigurationException("has neither parameter [" + OPERAND1 + "] nor parameter [" + OPERAND2 + "] specified"); } } } @Override public PipeRunResult doPipe(Message message, PipeLineSession session) throws PipeRunException { ParameterValueList pvl = null; if (getParameterList() != null) { try { pvl = getParameterList().getValues(message, session); } catch (ParameterException e) { throw new PipeRunException(this, getLogPrefix(session) + "exception extracting parameters", e); } } String operand1 = getParameterValue(pvl, OPERAND1); try { if (operand1 == null) { if (StringUtils.isNotEmpty(getSessionKey1())) { operand1 = session.getMessage(getSessionKey1()).asString(); } if (operand1 == null) { operand1 = message.asString(); } } } catch (Exception e) { throw new PipeRunException(this, getLogPrefix(session) + " Exception on getting operand1 from input message", e); } String operand2 = getParameterValue(pvl, OPERAND2); try { if (operand2 == null) { if (StringUtils.isNotEmpty(getSessionKey2())) { operand2 = session.getMessage(getSessionKey2()).asString(); } if (operand2 == null) { operand2 = message.asString(); } } } catch (Exception e) { throw new PipeRunException(this, getLogPrefix(session) + " Exception on getting operand2 from input message", e); } if (isXml()) { try { operand1 = XmlUtils.canonicalize(operand1); operand2 = XmlUtils.canonicalize(operand2); } catch (Exception e) { throw new PipeRunException(this, getLogPrefix(session) + " Exception on pretty printing input", e); } } String ip = getParameterValue(pvl, IGNOREPATTERNS); if (ip != null) { try { Node n = XmlUtils.buildNode(ip); if (n.getNodeName().equals("ignores")) { NodeList nList = n.getChildNodes(); for (int i = 0; i <= nList.getLength() - 1; i++) { Node cn = nList.item(i); if (cn.getNodeName().equals("ignore")) { NodeList cnList = cn.getChildNodes(); String after = null; String before = null; for (int j = 0; j <= cnList.getLength() - 1; j++) { Node ccn = cnList.item(j); if (ccn.getNodeName().equals("after")) { after = ccn.getFirstChild().getNodeValue(); } else { if (ccn.getNodeName().equals("before")) { before = ccn.getFirstChild().getNodeValue(); } } } operand1 = ignoreBetween(operand1, after, before); operand2 = ignoreBetween(operand2, after, before); } } } } catch (Exception e) { throw new PipeRunException(this, getLogPrefix(session) + " Exception on ignoring parts of input", e); } } if (log.isDebugEnabled()) { log.debug("operand1 [" + operand1 + "]"); log.debug("operand2 [" + operand2 + "]"); } int comparison = operand1.compareTo(operand2); if (comparison == 0) return new PipeRunResult(findForward(EQUALSFORWARD), message); else if (comparison < 0) return new PipeRunResult(findForward(LESSTHANFORWARD), message); else return new PipeRunResult(findForward(GREATERTHANFORWARD), message); } private String ignoreBetween(String source, String after, String before) { int afterLength = after.length(); int beforeLength = before.length(); int start = source.indexOf(after); if (start == -1) { return source; } int stop = source.indexOf(before, start + afterLength); if (stop == -1) { return source; } char[] sourceArray = source.toCharArray(); StringBuffer buffer = new StringBuffer(); int srcPos = 0; while (start != -1 && stop != -1) { buffer.append(sourceArray, srcPos, start + afterLength); if (isXml()) { buffer.append("<!-- ignored text -->"); } else { buffer.append("{ignored text}"); } buffer.append(sourceArray, stop, beforeLength); srcPos = stop + beforeLength; start = source.indexOf(after, srcPos); stop = source.indexOf(before, start + afterLength); } buffer.append(sourceArray, srcPos, sourceArray.length - srcPos); return buffer.toString(); } private String getParameterValue(ParameterValueList pvl, String parameterName) { ParameterList parameterList = getParameterList(); if (pvl != null && parameterList != null) { ParameterValue pv = pvl.findParameterValue(parameterName); if(pv != null) { return pv.asStringValue(null); } } return null; } @Override public boolean consumesSessionVariable(String sessionKey) { return super.consumesSessionVariable(sessionKey) || sessionKey.equals(getSessionKey1()) || sessionKey.equals(getSessionKey2()); } @IbisDoc({"reference to one of the session variables to be compared. Do not use, but use Parameter operand1 instead", ""}) @Deprecated @ConfigurationWarning("Please use the parameter operand1") public void setSessionKey1(String string) { sessionKey1 = string; } public String getSessionKey1() { return sessionKey1; } @IbisDoc({"reference to the other session variables to be compared. Do not use, but use Parameter operand2 instead", ""}) @Deprecated @ConfigurationWarning("Please use the parameter operand2") public void setSessionKey2(String string) { sessionKey2 = string; } public String getSessionKey2() { return sessionKey2; } public boolean isXml() { return xml; } @IbisDoc({"when set <code>true</code> the string values to compare are considered to be xml strings and before the actual compare both xml strings are transformed to a canonical form", "false"}) public void setXml(boolean b) { xml = b; } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simpleworkflow.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListDomainsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To retrieve * the next page of results, make the call again using the returned token in <code>nextPageToken</code>. Keep all * other arguments unchanged. * </p> * <p> * The configured <code>maximumPageSize</code> determines how many results can be returned in a single call. * </p> */ private String nextPageToken; /** * <p> * Specifies the registration status of the domains to list. * </p> */ private String registrationStatus; /** * <p> * The maximum number of results that will be returned per call. <code>nextPageToken</code> can be used to obtain * futher pages of results. The default is 1000, which is the maximum allowed page size. You can, however, specify a * page size <i>smaller</i> than the maximum. * </p> * <p> * This is an upper limit only; the actual number of results returned per call may be fewer than the specified * maximum. * </p> */ private Integer maximumPageSize; /** * <p> * When set to <code>true</code>, returns the results in reverse order. By default, the results are returned in * ascending alphabetical order by <code>name</code> of the domains. * </p> */ private Boolean reverseOrder; /** * <p> * If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To retrieve * the next page of results, make the call again using the returned token in <code>nextPageToken</code>. Keep all * other arguments unchanged. * </p> * <p> * The configured <code>maximumPageSize</code> determines how many results can be returned in a single call. * </p> * * @param nextPageToken * If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To * retrieve the next page of results, make the call again using the returned token in * <code>nextPageToken</code>. Keep all other arguments unchanged.</p> * <p> * The configured <code>maximumPageSize</code> determines how many results can be returned in a single call. */ public void setNextPageToken(String nextPageToken) { this.nextPageToken = nextPageToken; } /** * <p> * If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To retrieve * the next page of results, make the call again using the returned token in <code>nextPageToken</code>. Keep all * other arguments unchanged. * </p> * <p> * The configured <code>maximumPageSize</code> determines how many results can be returned in a single call. * </p> * * @return If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To * retrieve the next page of results, make the call again using the returned token in * <code>nextPageToken</code>. Keep all other arguments unchanged.</p> * <p> * The configured <code>maximumPageSize</code> determines how many results can be returned in a single call. */ public String getNextPageToken() { return this.nextPageToken; } /** * <p> * If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To retrieve * the next page of results, make the call again using the returned token in <code>nextPageToken</code>. Keep all * other arguments unchanged. * </p> * <p> * The configured <code>maximumPageSize</code> determines how many results can be returned in a single call. * </p> * * @param nextPageToken * If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To * retrieve the next page of results, make the call again using the returned token in * <code>nextPageToken</code>. Keep all other arguments unchanged.</p> * <p> * The configured <code>maximumPageSize</code> determines how many results can be returned in a single call. * @return Returns a reference to this object so that method calls can be chained together. */ public ListDomainsRequest withNextPageToken(String nextPageToken) { setNextPageToken(nextPageToken); return this; } /** * <p> * Specifies the registration status of the domains to list. * </p> * * @param registrationStatus * Specifies the registration status of the domains to list. * @see RegistrationStatus */ public void setRegistrationStatus(String registrationStatus) { this.registrationStatus = registrationStatus; } /** * <p> * Specifies the registration status of the domains to list. * </p> * * @return Specifies the registration status of the domains to list. * @see RegistrationStatus */ public String getRegistrationStatus() { return this.registrationStatus; } /** * <p> * Specifies the registration status of the domains to list. * </p> * * @param registrationStatus * Specifies the registration status of the domains to list. * @return Returns a reference to this object so that method calls can be chained together. * @see RegistrationStatus */ public ListDomainsRequest withRegistrationStatus(String registrationStatus) { setRegistrationStatus(registrationStatus); return this; } /** * <p> * Specifies the registration status of the domains to list. * </p> * * @param registrationStatus * Specifies the registration status of the domains to list. * @see RegistrationStatus */ public void setRegistrationStatus(RegistrationStatus registrationStatus) { this.registrationStatus = registrationStatus.toString(); } /** * <p> * Specifies the registration status of the domains to list. * </p> * * @param registrationStatus * Specifies the registration status of the domains to list. * @return Returns a reference to this object so that method calls can be chained together. * @see RegistrationStatus */ public ListDomainsRequest withRegistrationStatus(RegistrationStatus registrationStatus) { setRegistrationStatus(registrationStatus); return this; } /** * <p> * The maximum number of results that will be returned per call. <code>nextPageToken</code> can be used to obtain * futher pages of results. The default is 1000, which is the maximum allowed page size. You can, however, specify a * page size <i>smaller</i> than the maximum. * </p> * <p> * This is an upper limit only; the actual number of results returned per call may be fewer than the specified * maximum. * </p> * * @param maximumPageSize * The maximum number of results that will be returned per call. <code>nextPageToken</code> can be used to * obtain futher pages of results. The default is 1000, which is the maximum allowed page size. You can, * however, specify a page size <i>smaller</i> than the maximum.</p> * <p> * This is an upper limit only; the actual number of results returned per call may be fewer than the * specified maximum. */ public void setMaximumPageSize(Integer maximumPageSize) { this.maximumPageSize = maximumPageSize; } /** * <p> * The maximum number of results that will be returned per call. <code>nextPageToken</code> can be used to obtain * futher pages of results. The default is 1000, which is the maximum allowed page size. You can, however, specify a * page size <i>smaller</i> than the maximum. * </p> * <p> * This is an upper limit only; the actual number of results returned per call may be fewer than the specified * maximum. * </p> * * @return The maximum number of results that will be returned per call. <code>nextPageToken</code> can be used to * obtain futher pages of results. The default is 1000, which is the maximum allowed page size. You can, * however, specify a page size <i>smaller</i> than the maximum.</p> * <p> * This is an upper limit only; the actual number of results returned per call may be fewer than the * specified maximum. */ public Integer getMaximumPageSize() { return this.maximumPageSize; } /** * <p> * The maximum number of results that will be returned per call. <code>nextPageToken</code> can be used to obtain * futher pages of results. The default is 1000, which is the maximum allowed page size. You can, however, specify a * page size <i>smaller</i> than the maximum. * </p> * <p> * This is an upper limit only; the actual number of results returned per call may be fewer than the specified * maximum. * </p> * * @param maximumPageSize * The maximum number of results that will be returned per call. <code>nextPageToken</code> can be used to * obtain futher pages of results. The default is 1000, which is the maximum allowed page size. You can, * however, specify a page size <i>smaller</i> than the maximum.</p> * <p> * This is an upper limit only; the actual number of results returned per call may be fewer than the * specified maximum. * @return Returns a reference to this object so that method calls can be chained together. */ public ListDomainsRequest withMaximumPageSize(Integer maximumPageSize) { setMaximumPageSize(maximumPageSize); return this; } /** * <p> * When set to <code>true</code>, returns the results in reverse order. By default, the results are returned in * ascending alphabetical order by <code>name</code> of the domains. * </p> * * @param reverseOrder * When set to <code>true</code>, returns the results in reverse order. By default, the results are returned * in ascending alphabetical order by <code>name</code> of the domains. */ public void setReverseOrder(Boolean reverseOrder) { this.reverseOrder = reverseOrder; } /** * <p> * When set to <code>true</code>, returns the results in reverse order. By default, the results are returned in * ascending alphabetical order by <code>name</code> of the domains. * </p> * * @return When set to <code>true</code>, returns the results in reverse order. By default, the results are returned * in ascending alphabetical order by <code>name</code> of the domains. */ public Boolean getReverseOrder() { return this.reverseOrder; } /** * <p> * When set to <code>true</code>, returns the results in reverse order. By default, the results are returned in * ascending alphabetical order by <code>name</code> of the domains. * </p> * * @param reverseOrder * When set to <code>true</code>, returns the results in reverse order. By default, the results are returned * in ascending alphabetical order by <code>name</code> of the domains. * @return Returns a reference to this object so that method calls can be chained together. */ public ListDomainsRequest withReverseOrder(Boolean reverseOrder) { setReverseOrder(reverseOrder); return this; } /** * <p> * When set to <code>true</code>, returns the results in reverse order. By default, the results are returned in * ascending alphabetical order by <code>name</code> of the domains. * </p> * * @return When set to <code>true</code>, returns the results in reverse order. By default, the results are returned * in ascending alphabetical order by <code>name</code> of the domains. */ public Boolean isReverseOrder() { return this.reverseOrder; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getNextPageToken() != null) sb.append("NextPageToken: ").append(getNextPageToken()).append(","); if (getRegistrationStatus() != null) sb.append("RegistrationStatus: ").append(getRegistrationStatus()).append(","); if (getMaximumPageSize() != null) sb.append("MaximumPageSize: ").append(getMaximumPageSize()).append(","); if (getReverseOrder() != null) sb.append("ReverseOrder: ").append(getReverseOrder()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListDomainsRequest == false) return false; ListDomainsRequest other = (ListDomainsRequest) obj; if (other.getNextPageToken() == null ^ this.getNextPageToken() == null) return false; if (other.getNextPageToken() != null && other.getNextPageToken().equals(this.getNextPageToken()) == false) return false; if (other.getRegistrationStatus() == null ^ this.getRegistrationStatus() == null) return false; if (other.getRegistrationStatus() != null && other.getRegistrationStatus().equals(this.getRegistrationStatus()) == false) return false; if (other.getMaximumPageSize() == null ^ this.getMaximumPageSize() == null) return false; if (other.getMaximumPageSize() != null && other.getMaximumPageSize().equals(this.getMaximumPageSize()) == false) return false; if (other.getReverseOrder() == null ^ this.getReverseOrder() == null) return false; if (other.getReverseOrder() != null && other.getReverseOrder().equals(this.getReverseOrder()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getNextPageToken() == null) ? 0 : getNextPageToken().hashCode()); hashCode = prime * hashCode + ((getRegistrationStatus() == null) ? 0 : getRegistrationStatus().hashCode()); hashCode = prime * hashCode + ((getMaximumPageSize() == null) ? 0 : getMaximumPageSize().hashCode()); hashCode = prime * hashCode + ((getReverseOrder() == null) ? 0 : getReverseOrder().hashCode()); return hashCode; } @Override public ListDomainsRequest clone() { return (ListDomainsRequest) super.clone(); } }
/* * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sbql4j8.com.sun.tools.doclets.formats.html; import java.io.IOException; import sbql4j8.com.sun.javadoc.*; import sbql4j8.com.sun.tools.doclets.formats.html.markup.*; import sbql4j8.com.sun.tools.doclets.internal.toolkit.*; import sbql4j8.com.sun.tools.doclets.internal.toolkit.builders.*; import sbql4j8.com.sun.tools.doclets.internal.toolkit.util.*; /** * Generate the Class Information Page. * * <p><b>This is NOT part of any supported API. * If you write code that depends on this, you do so at your own risk. * This code and its internal interfaces are subject to change or * deletion without notice.</b> * * @see sbql4j8.com.sun.javadoc.ClassDoc * @see java.util.Collections * @see java.util.List * @see java.util.ArrayList * @see java.util.HashMap * * @author Atul M Dambalkar * @author Robert Field * @author Bhavesh Patel (Modified) */ public class AnnotationTypeWriterImpl extends SubWriterHolderWriter implements AnnotationTypeWriter { protected AnnotationTypeDoc annotationType; protected Type prev; protected Type next; /** * @param annotationType the annotation type being documented. * @param prevType the previous class that was documented. * @param nextType the next class being documented. */ public AnnotationTypeWriterImpl(ConfigurationImpl configuration, AnnotationTypeDoc annotationType, Type prevType, Type nextType) throws Exception { super(configuration, DocPath.forClass(annotationType)); this.annotationType = annotationType; configuration.currentcd = annotationType.asClassDoc(); this.prev = prevType; this.next = nextType; } /** * Get this package link. * * @return a content tree for the package link */ protected Content getNavLinkPackage() { Content linkContent = getHyperLink(DocPaths.PACKAGE_SUMMARY, packageLabel); Content li = HtmlTree.LI(linkContent); return li; } /** * Get the class link. * * @return a content tree for the class link */ protected Content getNavLinkClass() { Content li = HtmlTree.LI(HtmlStyle.navBarCell1Rev, classLabel); return li; } /** * Get the class use link. * * @return a content tree for the class use link */ protected Content getNavLinkClassUse() { Content linkContent = getHyperLink(DocPaths.CLASS_USE.resolve(filename), useLabel); Content li = HtmlTree.LI(linkContent); return li; } /** * Get link to previous class. * * @return a content tree for the previous class link */ public Content getNavLinkPrevious() { Content li; if (prev != null) { Content prevLink = getLink(new LinkInfoImpl(configuration, LinkInfoImpl.Kind.CLASS, prev.asClassDoc()) .label(prevclassLabel).strong(true)); li = HtmlTree.LI(prevLink); } else li = HtmlTree.LI(prevclassLabel); return li; } /** * Get link to next class. * * @return a content tree for the next class link */ public Content getNavLinkNext() { Content li; if (next != null) { Content nextLink = getLink(new LinkInfoImpl(configuration, LinkInfoImpl.Kind.CLASS, next.asClassDoc()) .label(nextclassLabel).strong(true)); li = HtmlTree.LI(nextLink); } else li = HtmlTree.LI(nextclassLabel); return li; } /** * {@inheritDoc} */ public Content getHeader(String header) { String pkgname = (annotationType.containingPackage() != null)? annotationType.containingPackage().name(): ""; String clname = annotationType.name(); Content bodyTree = getBody(true, getWindowTitle(clname)); addTop(bodyTree); addNavLinks(true, bodyTree); bodyTree.addContent(HtmlConstants.START_OF_CLASS_DATA); HtmlTree div = new HtmlTree(HtmlTag.DIV); div.addStyle(HtmlStyle.header); if (pkgname.length() > 0) { Content pkgNameContent = new StringContent(pkgname); Content pkgNameDiv = HtmlTree.DIV(HtmlStyle.subTitle, pkgNameContent); div.addContent(pkgNameDiv); } LinkInfoImpl linkInfo = new LinkInfoImpl(configuration, LinkInfoImpl.Kind.CLASS_HEADER, annotationType); Content headerContent = new StringContent(header); Content heading = HtmlTree.HEADING(HtmlConstants.CLASS_PAGE_HEADING, true, HtmlStyle.title, headerContent); heading.addContent(getTypeParameterLinks(linkInfo)); div.addContent(heading); bodyTree.addContent(div); return bodyTree; } /** * {@inheritDoc} */ public Content getAnnotationContentHeader() { return getContentHeader(); } /** * {@inheritDoc} */ public void addFooter(Content contentTree) { contentTree.addContent(HtmlConstants.END_OF_CLASS_DATA); addNavLinks(false, contentTree); addBottom(contentTree); } /** * {@inheritDoc} */ public void printDocument(Content contentTree) throws IOException { printHtmlDocument(configuration.metakeywords.getMetaKeywords(annotationType), true, contentTree); } /** * {@inheritDoc} */ public Content getAnnotationInfoTreeHeader() { return getMemberTreeHeader(); } /** * {@inheritDoc} */ public Content getAnnotationInfo(Content annotationInfoTree) { return getMemberTree(HtmlStyle.description, annotationInfoTree); } /** * {@inheritDoc} */ public void addAnnotationTypeSignature(String modifiers, Content annotationInfoTree) { annotationInfoTree.addContent(new HtmlTree(HtmlTag.BR)); Content pre = new HtmlTree(HtmlTag.PRE); addAnnotationInfo(annotationType, pre); pre.addContent(modifiers); LinkInfoImpl linkInfo = new LinkInfoImpl(configuration, LinkInfoImpl.Kind.CLASS_SIGNATURE, annotationType); Content annotationName = new StringContent(annotationType.name()); Content parameterLinks = getTypeParameterLinks(linkInfo); if (configuration.linksource) { addSrcLink(annotationType, annotationName, pre); pre.addContent(parameterLinks); } else { Content span = HtmlTree.SPAN(HtmlStyle.memberNameLabel, annotationName); span.addContent(parameterLinks); pre.addContent(span); } annotationInfoTree.addContent(pre); } /** * {@inheritDoc} */ public void addAnnotationTypeDescription(Content annotationInfoTree) { if(!configuration.nocomment) { if (annotationType.inlineTags().length > 0) { addInlineComment(annotationType, annotationInfoTree); } } } /** * {@inheritDoc} */ public void addAnnotationTypeTagInfo(Content annotationInfoTree) { if(!configuration.nocomment) { addTagsInfo(annotationType, annotationInfoTree); } } /** * {@inheritDoc} */ public void addAnnotationTypeDeprecationInfo(Content annotationInfoTree) { Content hr = new HtmlTree(HtmlTag.HR); annotationInfoTree.addContent(hr); Tag[] deprs = annotationType.tags("deprecated"); if (Util.isDeprecated(annotationType)) { Content deprLabel = HtmlTree.SPAN(HtmlStyle.deprecatedLabel, deprecatedPhrase); Content div = HtmlTree.DIV(HtmlStyle.block, deprLabel); if (deprs.length > 0) { Tag[] commentTags = deprs[0].inlineTags(); if (commentTags.length > 0) { div.addContent(getSpace()); addInlineDeprecatedComment(annotationType, deprs[0], div); } } annotationInfoTree.addContent(div); } } /** * {@inheritDoc} */ protected Content getNavLinkTree() { Content treeLinkContent = getHyperLink(DocPaths.PACKAGE_TREE, treeLabel, "", ""); Content li = HtmlTree.LI(treeLinkContent); return li; } /** * Add summary details to the navigation bar. * * @param subDiv the content tree to which the summary detail links will be added */ protected void addSummaryDetailLinks(Content subDiv) { try { Content div = HtmlTree.DIV(getNavSummaryLinks()); div.addContent(getNavDetailLinks()); subDiv.addContent(div); } catch (Exception e) { e.printStackTrace(); throw new DocletAbortException(e); } } /** * Get summary links for navigation bar. * * @return the content tree for the navigation summary links */ protected Content getNavSummaryLinks() throws Exception { Content li = HtmlTree.LI(summaryLabel); li.addContent(getSpace()); Content ulNav = HtmlTree.UL(HtmlStyle.subNavList, li); MemberSummaryBuilder memberSummaryBuilder = (MemberSummaryBuilder) configuration.getBuilderFactory().getMemberSummaryBuilder(this); Content liNavField = new HtmlTree(HtmlTag.LI); addNavSummaryLink(memberSummaryBuilder, "doclet.navField", VisibleMemberMap.ANNOTATION_TYPE_FIELDS, liNavField); addNavGap(liNavField); ulNav.addContent(liNavField); Content liNavReq = new HtmlTree(HtmlTag.LI); addNavSummaryLink(memberSummaryBuilder, "doclet.navAnnotationTypeRequiredMember", VisibleMemberMap.ANNOTATION_TYPE_MEMBER_REQUIRED, liNavReq); addNavGap(liNavReq); ulNav.addContent(liNavReq); Content liNavOpt = new HtmlTree(HtmlTag.LI); addNavSummaryLink(memberSummaryBuilder, "doclet.navAnnotationTypeOptionalMember", VisibleMemberMap.ANNOTATION_TYPE_MEMBER_OPTIONAL, liNavOpt); ulNav.addContent(liNavOpt); return ulNav; } /** * Add the navigation summary link. * * @param builder builder for the member to be documented * @param label the label for the navigation * @param type type to be documented * @param liNav the content tree to which the navigation summary link will be added */ protected void addNavSummaryLink(MemberSummaryBuilder builder, String label, int type, Content liNav) { AbstractMemberWriter writer = ((AbstractMemberWriter) builder. getMemberSummaryWriter(type)); if (writer == null) { liNav.addContent(getResource(label)); } else { liNav.addContent(writer.getNavSummaryLink(null, ! builder.getVisibleMemberMap(type).noVisibleMembers())); } } /** * Get detail links for the navigation bar. * * @return the content tree for the detail links */ protected Content getNavDetailLinks() throws Exception { Content li = HtmlTree.LI(detailLabel); li.addContent(getSpace()); Content ulNav = HtmlTree.UL(HtmlStyle.subNavList, li); MemberSummaryBuilder memberSummaryBuilder = (MemberSummaryBuilder) configuration.getBuilderFactory().getMemberSummaryBuilder(this); AbstractMemberWriter writerField = ((AbstractMemberWriter) memberSummaryBuilder. getMemberSummaryWriter(VisibleMemberMap.ANNOTATION_TYPE_FIELDS)); AbstractMemberWriter writerOptional = ((AbstractMemberWriter) memberSummaryBuilder. getMemberSummaryWriter(VisibleMemberMap.ANNOTATION_TYPE_MEMBER_OPTIONAL)); AbstractMemberWriter writerRequired = ((AbstractMemberWriter) memberSummaryBuilder. getMemberSummaryWriter(VisibleMemberMap.ANNOTATION_TYPE_MEMBER_REQUIRED)); Content liNavField = new HtmlTree(HtmlTag.LI); if (writerField != null){ writerField.addNavDetailLink(annotationType.fields().length > 0, liNavField); } else { liNavField.addContent(getResource("doclet.navField")); } addNavGap(liNavField); ulNav.addContent(liNavField); if (writerOptional != null){ Content liNavOpt = new HtmlTree(HtmlTag.LI); writerOptional.addNavDetailLink(annotationType.elements().length > 0, liNavOpt); ulNav.addContent(liNavOpt); } else if (writerRequired != null){ Content liNavReq = new HtmlTree(HtmlTag.LI); writerRequired.addNavDetailLink(annotationType.elements().length > 0, liNavReq); ulNav.addContent(liNavReq); } else { Content liNav = HtmlTree.LI(getResource("doclet.navAnnotationTypeMember")); ulNav.addContent(liNav); } return ulNav; } /** * Add gap between navigation bar elements. * * @param liNav the content tree to which the gap will be added */ protected void addNavGap(Content liNav) { liNav.addContent(getSpace()); liNav.addContent("|"); liNav.addContent(getSpace()); } /** * {@inheritDoc} */ public AnnotationTypeDoc getAnnotationTypeDoc() { return annotationType; } }
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.net.impl; import android.util.Log; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.JNINamespace; import org.chromium.base.annotations.SuppressFBWarnings; import org.chromium.net.ChromiumUrlRequestError; import org.chromium.net.ChromiumUrlRequestPriority; import org.chromium.net.ChunkedWritableByteChannel; import org.chromium.net.HttpUrlRequest; import org.chromium.net.HttpUrlRequestListener; import org.chromium.net.ResponseTooLargeException; import java.io.IOException; import java.net.MalformedURLException; import java.net.SocketTimeoutException; import java.net.URL; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; /** * Network request using the native http stack implementation. * @deprecated Use {@link CronetUrlRequest} instead. */ @JNINamespace("cronet") @Deprecated public class ChromiumUrlRequest implements HttpUrlRequest { /** * Native adapter object, owned by UrlRequest. */ private long mUrlRequestAdapter; private final ChromiumUrlRequestContext mRequestContext; private final String mUrl; private final int mPriority; private final Map<String, String> mHeaders; private final WritableByteChannel mSink; private Map<String, String> mAdditionalHeaders; private String mUploadContentType; private String mMethod; private byte[] mUploadData; private ReadableByteChannel mUploadChannel; private boolean mChunkedUpload; private IOException mSinkException; private volatile boolean mStarted; private volatile boolean mCanceled; private volatile boolean mFinished; private boolean mHeadersAvailable; private long mUploadContentLength; private final HttpUrlRequestListener mListener; private boolean mBufferFullResponse; private long mOffset; private long mContentLengthLimit; private boolean mCancelIfContentLengthOverLimit; private boolean mContentLengthOverLimit; private boolean mSkippingToOffset; private long mSize; // Indicates whether redirects have been disabled. private boolean mDisableRedirects; // Http status code. Default to 0. Populated in onResponseStarted(). private int mHttpStatusCode = 0; // Http status text. Default to null. Populated in onResponseStarted(). private String mHttpStatusText; // Content type. Default to null. Populated in onResponseStarted(). private String mContentType; // Compressed content length as reported by the server. Populated in onResponseStarted(). private long mContentLength; // Native error code. Default to no error. Populated in onRequestComplete(). private int mErrorCode = ChromiumUrlRequestError.SUCCESS; // Native error string. Default to null. Populated in onRequestComplete(). private String mErrorString; // Protects access of mUrlRequestAdapter, mStarted, mCanceled, and mFinished. private final Object mLock = new Object(); public ChromiumUrlRequest(ChromiumUrlRequestContext requestContext, String url, int priority, Map<String, String> headers, HttpUrlRequestListener listener) { this(requestContext, url, priority, headers, new ChunkedWritableByteChannel(), listener); mBufferFullResponse = true; } /** * Constructor. * * @param requestContext The context. * @param url The URL. * @param priority Request priority, e.g. {@link #REQUEST_PRIORITY_MEDIUM}. * @param headers HTTP headers. * @param sink The output channel into which downloaded content will be * written. */ public ChromiumUrlRequest(ChromiumUrlRequestContext requestContext, String url, int priority, Map<String, String> headers, WritableByteChannel sink, HttpUrlRequestListener listener) { if (requestContext == null) { throw new NullPointerException("Context is required"); } if (url == null) { throw new NullPointerException("URL is required"); } mRequestContext = requestContext; mUrl = url; mPriority = convertRequestPriority(priority); mHeaders = headers; mSink = sink; mUrlRequestAdapter = nativeCreateRequestAdapter( mRequestContext.getUrlRequestContextAdapter(), mUrl, mPriority); mListener = listener; } @Override public void setOffset(long offset) { mOffset = offset; if (offset != 0) { addHeader("Range", "bytes=" + offset + "-"); } } /** * The compressed content length as reported by the server. May be -1 if * the server did not provide a length. Some servers may also report the * wrong number. Since this is the compressed content length, and only * uncompressed content is returned by the consumer, the consumer should * not rely on this value. */ @Override public long getContentLength() { return mContentLength; } @Override public void setContentLengthLimit(long limit, boolean cancelEarly) { mContentLengthLimit = limit; mCancelIfContentLengthOverLimit = cancelEarly; } @Override public int getHttpStatusCode() { // TODO(mef): Investigate the following: // If we have been able to successfully resume a previously interrupted // download, the status code will be 206, not 200. Since the rest of the // application is expecting 200 to indicate success, we need to fake it. if (mHttpStatusCode == 206) { return 200; } return mHttpStatusCode; } @Override public String getHttpStatusText() { return mHttpStatusText; } /** * Returns an exception if any, or null if the request has not completed or * completed successfully. */ @Override public IOException getException() { if (mSinkException != null) { return mSinkException; } switch (mErrorCode) { case ChromiumUrlRequestError.SUCCESS: if (mContentLengthOverLimit) { return new ResponseTooLargeException(); } return null; case ChromiumUrlRequestError.UNKNOWN: return new IOException(mErrorString); case ChromiumUrlRequestError.MALFORMED_URL: return new MalformedURLException("Malformed URL: " + mUrl); case ChromiumUrlRequestError.CONNECTION_TIMED_OUT: return new SocketTimeoutException("Connection timed out"); case ChromiumUrlRequestError.UNKNOWN_HOST: String host; try { host = new URL(mUrl).getHost(); } catch (MalformedURLException e) { host = mUrl; } return new UnknownHostException("Unknown host: " + host); case ChromiumUrlRequestError.TOO_MANY_REDIRECTS: return new IOException("Request failed because there were too " + "many redirects or redirects have been disabled"); default: throw new IllegalStateException("Unrecognized error code: " + mErrorCode); } } @Override public ByteBuffer getByteBuffer() { return ((ChunkedWritableByteChannel) getSink()).getByteBuffer(); } @Override public byte[] getResponseAsBytes() { return ((ChunkedWritableByteChannel) getSink()).getBytes(); } /** * Adds a request header. Must be done before request has started. */ public void addHeader(String header, String value) { synchronized (mLock) { validateNotStarted(); if (mAdditionalHeaders == null) { mAdditionalHeaders = new HashMap<String, String>(); } mAdditionalHeaders.put(header, value); } } /** * Sets data to upload as part of a POST or PUT request. * * @param contentType MIME type of the upload content or null if this is not * an upload. * @param data The content that needs to be uploaded. */ @Override @SuppressFBWarnings("EI_EXPOSE_REP2") public void setUploadData(String contentType, byte[] data) { synchronized (mLock) { validateNotStarted(); validateContentType(contentType); mUploadContentType = contentType; mUploadData = data; mUploadChannel = null; mChunkedUpload = false; } } /** * Sets a readable byte channel to upload as part of a POST or PUT request. * * @param contentType MIME type of the upload content or null if this is not * an upload request. * @param channel The channel to read to read upload data from if this is an * upload request. * @param contentLength The length of data to upload. */ @Override public void setUploadChannel( String contentType, ReadableByteChannel channel, long contentLength) { synchronized (mLock) { validateNotStarted(); validateContentType(contentType); mUploadContentType = contentType; mUploadChannel = channel; mUploadContentLength = contentLength; mUploadData = null; mChunkedUpload = false; } } /** * Sets this request up for chunked uploading. To upload data call * {@link #appendChunk(ByteBuffer, boolean)} after {@link #start()}. * * @param contentType MIME type of the post content or null if this is not a * POST request. */ public void setChunkedUpload(String contentType) { synchronized (mLock) { validateNotStarted(); validateContentType(contentType); mUploadContentType = contentType; mChunkedUpload = true; mUploadData = null; mUploadChannel = null; } } /** * Uploads a new chunk. Must have called {@link #setChunkedUpload(String)} * and {@link #start()}. * * @param chunk The data, which will not be modified. Its current position. * must be zero. The last chunk can be empty, but all other * chunks must be non-empty. * @param isLastChunk Whether this chunk is the last one. */ public void appendChunk(ByteBuffer chunk, boolean isLastChunk) throws IOException { if (!isLastChunk && !chunk.hasRemaining()) { throw new IllegalArgumentException("Attempted to write empty buffer."); } if (chunk.position() != 0) { throw new IllegalArgumentException("The position must be zero."); } synchronized (mLock) { if (!mStarted) { throw new IllegalStateException("Request not yet started."); } if (!mChunkedUpload) { throw new IllegalStateException("Request not set for chunked uploadind."); } if (mUrlRequestAdapter == 0) { throw new IOException("Native peer destroyed."); } nativeAppendChunk(mUrlRequestAdapter, chunk, chunk.limit(), isLastChunk); } } @Override public void setHttpMethod(String method) { validateNotStarted(); mMethod = method; } @Override public void disableRedirects() { synchronized (mLock) { validateNotStarted(); validateNativeAdapterNotDestroyed(); mDisableRedirects = true; nativeDisableRedirects(mUrlRequestAdapter); } } public WritableByteChannel getSink() { return mSink; } @Override public void start() { synchronized (mLock) { if (mCanceled) { return; } validateNotStarted(); validateNativeAdapterNotDestroyed(); mStarted = true; if (mHeaders != null && !mHeaders.isEmpty()) { for (Entry<String, String> entry : mHeaders.entrySet()) { nativeAddHeader(mUrlRequestAdapter, entry.getKey(), entry.getValue()); } } if (mAdditionalHeaders != null) { for (Entry<String, String> entry : mAdditionalHeaders.entrySet()) { nativeAddHeader(mUrlRequestAdapter, entry.getKey(), entry.getValue()); } } if (mUploadData != null && mUploadData.length > 0) { nativeSetUploadData(mUrlRequestAdapter, mUploadContentType, mUploadData); } else if (mUploadChannel != null) { nativeSetUploadChannel( mUrlRequestAdapter, mUploadContentType, mUploadContentLength); } else if (mChunkedUpload) { nativeEnableChunkedUpload(mUrlRequestAdapter, mUploadContentType); } // Note: The above functions to set the upload body also set the // method to POST, behind the scenes, so if mMethod is null but // there's an upload body, the method will default to POST. if (mMethod != null) { nativeSetMethod(mUrlRequestAdapter, mMethod); } nativeStart(mUrlRequestAdapter); } } @Override public void cancel() { synchronized (mLock) { if (mCanceled) { return; } mCanceled = true; if (mUrlRequestAdapter != 0) { nativeCancel(mUrlRequestAdapter); } } } @Override public boolean isCanceled() { synchronized (mLock) { return mCanceled; } } @Override public String getNegotiatedProtocol() { synchronized (mLock) { validateNativeAdapterNotDestroyed(); validateHeadersAvailable(); return nativeGetNegotiatedProtocol(mUrlRequestAdapter); } } @Override public boolean wasCached() { synchronized (mLock) { validateNativeAdapterNotDestroyed(); validateHeadersAvailable(); return nativeGetWasCached(mUrlRequestAdapter); } } @Override public String getContentType() { return mContentType; } @Override public String getHeader(String name) { synchronized (mLock) { validateNativeAdapterNotDestroyed(); validateHeadersAvailable(); return nativeGetHeader(mUrlRequestAdapter, name); } } // All response headers. @Override public Map<String, List<String>> getAllHeaders() { synchronized (mLock) { validateNativeAdapterNotDestroyed(); validateHeadersAvailable(); ResponseHeadersMap result = new ResponseHeadersMap(); nativeGetAllHeaders(mUrlRequestAdapter, result); return result; } } @Override public String getUrl() { return mUrl; } private static int convertRequestPriority(int priority) { switch (priority) { case REQUEST_PRIORITY_IDLE: return ChromiumUrlRequestPriority.IDLE; case REQUEST_PRIORITY_LOWEST: return ChromiumUrlRequestPriority.LOWEST; case REQUEST_PRIORITY_LOW: return ChromiumUrlRequestPriority.LOW; case REQUEST_PRIORITY_MEDIUM: return ChromiumUrlRequestPriority.MEDIUM; case REQUEST_PRIORITY_HIGHEST: return ChromiumUrlRequestPriority.HIGHEST; default: return ChromiumUrlRequestPriority.MEDIUM; } } private void onContentLengthOverLimit() { mContentLengthOverLimit = true; cancel(); } /** * A callback invoked when the response has been fully consumed. */ private void onRequestComplete() { mErrorCode = nativeGetErrorCode(mUrlRequestAdapter); mErrorString = nativeGetErrorString(mUrlRequestAdapter); // When there is an error or redirects have been disabled, // onResponseStarted is often not invoked. // Populate status code and status text if that's the case. // Note that besides redirects, these two fields may be set on the // request for AUTH and CERT requests. if (mErrorCode != ChromiumUrlRequestError.SUCCESS) { mHttpStatusCode = nativeGetHttpStatusCode(mUrlRequestAdapter); mHttpStatusText = nativeGetHttpStatusText(mUrlRequestAdapter); } mListener.onRequestComplete(this); } private void validateNativeAdapterNotDestroyed() { if (mUrlRequestAdapter == 0) { throw new IllegalStateException("Adapter has been destroyed"); } } private void validateNotStarted() { if (mStarted) { throw new IllegalStateException("Request already started"); } } private void validateHeadersAvailable() { if (!mHeadersAvailable) { throw new IllegalStateException("Response headers not available"); } } private void validateContentType(String contentType) { if (contentType == null) { throw new NullPointerException("contentType is required"); } } // Private methods called by native library. /** * If @CalledByNative method throws an exception, request gets canceled * and exception could be retrieved from request using getException(). */ private void onCalledByNativeException(Exception e) { mSinkException = new IOException("CalledByNative method has thrown an exception", e); Log.e(ChromiumUrlRequestContext.LOG_TAG, "Exception in CalledByNative method", e); try { cancel(); } catch (Exception cancel_exception) { Log.e(ChromiumUrlRequestContext.LOG_TAG, "Exception trying to cancel request", cancel_exception); } } /** * A callback invoked when the first chunk of the response has arrived. */ @CalledByNative private void onResponseStarted() { try { mHttpStatusCode = nativeGetHttpStatusCode(mUrlRequestAdapter); mHttpStatusText = nativeGetHttpStatusText(mUrlRequestAdapter); mContentType = nativeGetContentType(mUrlRequestAdapter); mContentLength = nativeGetContentLength(mUrlRequestAdapter); mHeadersAvailable = true; if (mContentLengthLimit > 0 && mContentLength > mContentLengthLimit && mCancelIfContentLengthOverLimit) { onContentLengthOverLimit(); return; } if (mBufferFullResponse && mContentLength != -1 && !mContentLengthOverLimit) { ((ChunkedWritableByteChannel) getSink()).setCapacity((int) mContentLength); } if (mOffset != 0) { // The server may ignore the request for a byte range, in which // case status code will be 200, instead of 206. Note that we // cannot call getHttpStatusCode as it rewrites 206 into 200. if (mHttpStatusCode == 200) { // TODO(mef): Revisit this logic. if (mContentLength != -1) { mContentLength -= mOffset; } mSkippingToOffset = true; } else { mSize = mOffset; } } mListener.onResponseStarted(this); } catch (Exception e) { onCalledByNativeException(e); } } /** * Consumes a portion of the response. * * @param byteBuffer The ByteBuffer to append. Must be a direct buffer, and * no references to it may be retained after the method ends, as * it wraps code managed on the native heap. */ @CalledByNative private void onBytesRead(ByteBuffer buffer) { try { if (mContentLengthOverLimit) { return; } int size = buffer.remaining(); mSize += size; if (mSkippingToOffset) { if (mSize <= mOffset) { return; } else { mSkippingToOffset = false; buffer.position((int) (mOffset - (mSize - size))); } } boolean contentLengthOverLimit = (mContentLengthLimit != 0 && mSize > mContentLengthLimit); if (contentLengthOverLimit) { buffer.limit(size - (int) (mSize - mContentLengthLimit)); } while (buffer.hasRemaining()) { mSink.write(buffer); } if (contentLengthOverLimit) { onContentLengthOverLimit(); } } catch (Exception e) { onCalledByNativeException(e); } } /** * Notifies the listener, releases native data structures. */ @SuppressWarnings("unused") @CalledByNative private void finish() { try { synchronized (mLock) { if (mDisableRedirects) { mHeadersAvailable = true; } mFinished = true; if (mUrlRequestAdapter == 0) { return; } try { mSink.close(); } catch (IOException e) { // Ignore } try { if (mUploadChannel != null && mUploadChannel.isOpen()) { mUploadChannel.close(); } } catch (IOException e) { // Ignore } onRequestComplete(); nativeDestroyRequestAdapter(mUrlRequestAdapter); mUrlRequestAdapter = 0; } } catch (Exception e) { mSinkException = new IOException("Exception in finish", e); } } /** * Appends header |name| with value |value| to |headersMap|. */ @SuppressWarnings("unused") @CalledByNative private void onAppendResponseHeader(ResponseHeadersMap headersMap, String name, String value) { try { if (!headersMap.containsKey(name)) { headersMap.put(name, new ArrayList<String>()); } headersMap.get(name).add(value); } catch (Exception e) { onCalledByNativeException(e); } } /** * Reads a sequence of bytes from upload channel into the given buffer. * @param dest The buffer into which bytes are to be transferred. * @return Returns number of bytes read (could be 0) or -1 and closes * the channel if error occured. */ @SuppressWarnings("unused") @CalledByNative private int readFromUploadChannel(ByteBuffer dest) { try { if (mUploadChannel == null || !mUploadChannel.isOpen()) return -1; int result = mUploadChannel.read(dest); if (result < 0) { mUploadChannel.close(); return 0; } return result; } catch (Exception e) { onCalledByNativeException(e); } return -1; } // Native methods are implemented in chromium_url_request.cc. private native long nativeCreateRequestAdapter( long urlRequestContextAdapter, String url, int priority); private native void nativeAddHeader(long urlRequestAdapter, String name, String value); private native void nativeSetMethod(long urlRequestAdapter, String method); private native void nativeSetUploadData( long urlRequestAdapter, String contentType, byte[] content); private native void nativeSetUploadChannel( long urlRequestAdapter, String contentType, long contentLength); private native void nativeEnableChunkedUpload(long urlRequestAdapter, String contentType); private native void nativeDisableRedirects(long urlRequestAdapter); private native void nativeAppendChunk( long urlRequestAdapter, ByteBuffer chunk, int chunkSize, boolean isLastChunk); private native void nativeStart(long urlRequestAdapter); private native void nativeCancel(long urlRequestAdapter); private native void nativeDestroyRequestAdapter(long urlRequestAdapter); private native int nativeGetErrorCode(long urlRequestAdapter); private native int nativeGetHttpStatusCode(long urlRequestAdapter); private native String nativeGetHttpStatusText(long urlRequestAdapter); private native String nativeGetErrorString(long urlRequestAdapter); private native String nativeGetContentType(long urlRequestAdapter); private native long nativeGetContentLength(long urlRequestAdapter); private native String nativeGetHeader(long urlRequestAdapter, String name); private native void nativeGetAllHeaders(long urlRequestAdapter, ResponseHeadersMap headers); private native String nativeGetNegotiatedProtocol(long urlRequestAdapter); private native boolean nativeGetWasCached(long urlRequestAdapter); // Explicit class to work around JNI-generator generics confusion. private static class ResponseHeadersMap extends HashMap<String, List<String>> {} }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.tools.bmfont; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.graphics.Pixmap; import com.badlogic.gdx.graphics.PixmapIO; import com.badlogic.gdx.graphics.g2d.BitmapFont.BitmapFontData; import com.badlogic.gdx.graphics.g2d.BitmapFont.Glyph; import com.badlogic.gdx.graphics.g2d.PixmapPacker.Page; import com.badlogic.gdx.utils.Array; /** A utility to output BitmapFontData to a FNT file. This can be useful for caching the result from TrueTypeFont, for faster load * times. * <p> * The font file format is from the AngelCodeFont BMFont tool. * <p> * Output is nearly identical to the FreeType settting in the Hiero tool {@Link com.badlogic.gdx.tools.hiero.Hiero}. BitmapFontWriter gives more flexibility, eg * borders and shadows can be used. Hiero is able to avoid outputting the same glyph image more than once if multiple character * codes have the exact same glyph. * @author mattdesl AKA davedes */ public class BitmapFontWriter { /** The output format. */ public static enum OutputFormat { /** AngelCodeFont text format */ Text, /** AngelCodeFont XML format */ XML; } /** The output format */ private static OutputFormat format = OutputFormat.Text; /** Sets the AngelCodeFont output format for subsequent writes; can be text (for LibGDX) or XML (for other engines, like * Pixi.js). * * @param fmt the output format to use */ public static void setOutputFormat (OutputFormat fmt) { if (fmt == null) throw new NullPointerException("format cannot be null"); format = fmt; } /** Returns the currently used output format. * @return the output format */ public static OutputFormat getOutputFormat () { return format; } /** The Padding parameter for FontInfo. */ public static class Padding { public int up, down, left, right; public Padding () { } public Padding (int up, int down, int left, int right) { this.up = up; this.down = down; this.left = left; this.right = right; } } /** The spacing parameter for FontInfo. */ public static class Spacing { public int horizontal, vertical; } /** The font "info" line; everything except padding and override metrics are ignored by LibGDX's BitmapFont reader, it is otherwise just useful for * clean and organized output. */ public static class FontInfo { /** Face name */ public String face; /** Font size (pt) */ public int size = 12; /** Whether the font is bold */ public boolean bold; /** Whether the font is italic */ public boolean italic; /** The charset; or null/empty for default */ public String charset; /** Whether the font uses unicode glyphs */ public boolean unicode = true; /** Stretch for height; default to 100% */ public int stretchH = 100; /** Whether smoothing is applied */ public boolean smooth = true; /** Amount of anti-aliasing that was applied to the font */ public int aa = 2; /** Padding that was applied to the font */ public Padding padding = new Padding(); /** Horizontal/vertical spacing that was applied to font */ public Spacing spacing = new Spacing(); public int outline = 0; /** Override metrics */ public boolean hasOverrideMetrics; public float ascent; public float descent; public float down; public float capHeight; public float lineHeight; public float spaceXAdvance; public float xHeight; public FontInfo () { } public FontInfo (String face, int size) { this.face = face; this.size = size; } public void overrideMetrics (BitmapFontData data) { hasOverrideMetrics = true; ascent = data.ascent; descent = data.descent; down = data.down; capHeight = data.capHeight; lineHeight = data.lineHeight; spaceXAdvance = data.spaceXadvance; xHeight = data.xHeight; } } private static String quote (Object params) { return quote(params, false); } private static String quote (Object params, boolean spaceAfter) { if (BitmapFontWriter.getOutputFormat() == OutputFormat.XML) return "\"" + params.toString().trim() + "\"" + (spaceAfter ? " " : ""); else return params.toString(); } /** Writes the given BitmapFontData to a file, using the specified <tt>pageRefs</tt> strings as the image paths for each * texture page. The glyphs in BitmapFontData have a "page" id, which references the index of the pageRef you specify here. * * The FontInfo parameter is useful for cleaner output; such as including a size and font face name hint. However, it can be * null to use default values. LibGDX ignores most of the "info" line when reading back fonts, only padding is used. Padding * also affects the size, location, and offset of the glyphs that are output. * * Likewise, the scaleW and scaleH are only for cleaner output. They are currently ignored by LibGDX's reader. For maximum * compatibility with other BMFont tools, you should use the width and height of your texture pages (each page should be the * same size). * * @param fontData the bitmap font * @param pageRefs the references to each texture page image file, generally in the same folder as outFntFile * @param outFntFile the font file to save to (typically ends with '.fnt') * @param info the optional info for the file header; can be null * @param scaleW the width of your texture pages * @param scaleH the height of your texture pages */ public static void writeFont (BitmapFontData fontData, String[] pageRefs, FileHandle outFntFile, FontInfo info, int scaleW, int scaleH) { if (info == null) { info = new FontInfo(); info.face = outFntFile.nameWithoutExtension(); } int lineHeight = (int)fontData.lineHeight; int pages = pageRefs.length; int packed = 0; int base = (int)((fontData.capHeight) + (fontData.flipped ? -fontData.ascent : fontData.ascent)); OutputFormat fmt = BitmapFontWriter.getOutputFormat(); boolean xml = fmt == OutputFormat.XML; StringBuilder buf = new StringBuilder(); if (xml) { buf.append("<font>\n"); } String xmlOpen = xml ? "\t<" : ""; String xmlCloseSelf = xml ? "/>" : ""; String xmlTab = xml ? "\t" : ""; String xmlClose = xml ? ">" : ""; String xmlQuote = xml ? "\"" : ""; String alphaChnlParams = xml ? " alphaChnl=\"0\" redChnl=\"0\" greenChnl=\"0\" blueChnl=\"0\"" : " alphaChnl=0 redChnl=0 greenChnl=0 blueChnl=0"; // INFO LINE buf.append(xmlOpen).append("info face=\"").append(info.face == null ? "" : info.face.replaceAll("\"", "'")) .append("\" size=").append(quote(info.size)).append(" bold=").append(quote(info.bold ? 1 : 0)).append(" italic=") .append(quote(info.italic ? 1 : 0)).append(" charset=\"").append(info.charset == null ? "" : info.charset) .append("\" unicode=").append(quote(info.unicode ? 1 : 0)).append(" stretchH=").append(quote(info.stretchH)) .append(" smooth=").append(quote(info.smooth ? 1 : 0)).append(" aa=").append(quote(info.aa)).append(" padding=") .append(xmlQuote).append(info.padding.up).append(",").append(info.padding.right).append(",").append(info.padding.down) .append(",").append(info.padding.left).append(xmlQuote).append(" spacing=").append(xmlQuote) .append(info.spacing.horizontal).append(",").append(info.spacing.vertical).append(xmlQuote).append(xmlCloseSelf) .append("\n"); // COMMON line buf.append(xmlOpen).append("common lineHeight=").append(quote(lineHeight)).append(" base=").append(quote(base)) .append(" scaleW=").append(quote(scaleW)).append(" scaleH=").append(quote(scaleH)).append(" pages=").append(quote(pages)) .append(" packed=").append(quote(packed)).append(alphaChnlParams).append(xmlCloseSelf).append("\n"); if (xml) buf.append("\t<pages>\n"); // PAGES for (int i = 0; i < pageRefs.length; i++) { buf.append(xmlTab).append(xmlOpen).append("page id=").append(quote(i)).append(" file=\"").append(pageRefs[i]) .append("\"").append(xmlCloseSelf).append("\n"); } if (xml) buf.append("\t</pages>\n"); // CHARS Array<Glyph> glyphs = new Array<Glyph>(256); for (int i = 0; i < fontData.glyphs.length; i++) { if (fontData.glyphs[i] == null) continue; for (int j = 0; j < fontData.glyphs[i].length; j++) { if (fontData.glyphs[i][j] != null) { glyphs.add(fontData.glyphs[i][j]); } } } buf.append(xmlOpen).append("chars count=").append(quote(glyphs.size)).append(xmlClose).append("\n"); int padLeft = 0, padRight = 0, padTop = 0, padX = 0, padY = 0; if (info != null) { padTop = info.padding.up; padLeft = info.padding.left; padRight = info.padding.right; padX = padLeft + padRight; padY = info.padding.up + info.padding.down; } // CHAR definitions for (int i = 0; i < glyphs.size; i++) { Glyph g = glyphs.get(i); boolean empty = g.width == 0 || g.height == 0; buf.append(xmlTab).append(xmlOpen).append("char id=").append(quote(String.format("%-6s", g.id), true)).append("x=") .append(quote(String.format("%-5s", empty ? 0 : g.srcX), true)).append("y=") .append(quote(String.format("%-5s", empty ? 0 : g.srcY), true)).append("width=") .append(quote(String.format("%-5s", empty ? 0 : g.width), true)).append("height=") .append(quote(String.format("%-5s", empty ? 0 : g.height), true)).append("xoffset=") .append(quote(String.format("%-5s", g.xoffset - padLeft), true)).append("yoffset=") .append(quote(String.format("%-5s", fontData.flipped ? g.yoffset + padTop : -(g.height + (g.yoffset + padTop))), true)) .append("xadvance=").append(quote(String.format("%-5s", g.xadvance), true)).append("page=") .append(quote(String.format("%-5s", g.page), true)).append("chnl=").append(quote(0, true)).append(xmlCloseSelf) .append("\n"); } if (xml) buf.append("\t</chars>\n"); // KERNINGS int kernCount = 0; StringBuilder kernBuf = new StringBuilder(); for (int i = 0; i < glyphs.size; i++) { for (int j = 0; j < glyphs.size; j++) { Glyph first = glyphs.get(i); Glyph second = glyphs.get(j); int kern = first.getKerning((char)second.id); if (kern != 0) { kernCount++; kernBuf.append(xmlTab).append(xmlOpen).append("kerning first=").append(quote(first.id)).append(" second=") .append(quote(second.id)).append(" amount=").append(quote(kern, true)).append(xmlCloseSelf).append("\n"); } } } // KERN info buf.append(xmlOpen).append("kernings count=").append(quote(kernCount)).append(xmlClose).append("\n"); buf.append(kernBuf); if (xml) { buf.append("\t</kernings>\n"); } // Override metrics if (info.hasOverrideMetrics) { if (xml) buf.append("\t<metrics>\n"); buf.append(xmlTab).append(xmlOpen) .append("metrics ascent=").append(quote(info.ascent, true)) .append(" descent=").append(quote(info.descent, true)) .append(" down=").append(quote(info.down, true)) .append(" capHeight=").append(quote(info.capHeight, true)) .append(" lineHeight=").append(quote(info.lineHeight, true)) .append(" spaceXAdvance=").append(quote(info.spaceXAdvance, true)) .append(" xHeight=").append(quote(info.xHeight, true)) .append(xmlCloseSelf).append("\n"); if (xml) buf.append("\t</metrics>\n"); } if (xml) { buf.append("</font>"); } String charset = info.charset; if (charset != null && charset.length() == 0) charset = null; outFntFile.writeString(buf.toString(), false, charset); } /** A utility method which writes the given font data to a file. * * The specified pixmaps are written to the parent directory of <tt>outFntFile</tt>, using that file's name without an * extension for the PNG file name(s). * * The specified FontInfo is optional, and can be null. * * Typical usage looks like this: * * <pre> * BitmapFontWriter.writeFont(myFontData, myFontPixmaps, Gdx.files.external(&quot;fonts/output.fnt&quot;), new FontInfo(&quot;Arial&quot;, 16)); * </pre> * * @param fontData the font data * @param pages the pixmaps to write as PNGs * @param outFntFile the output file for the font definition * @param info the optional font info for the header file, can be null */ public static void writeFont (BitmapFontData fontData, Pixmap[] pages, FileHandle outFntFile, FontInfo info) { String[] pageRefs = writePixmaps(pages, outFntFile.parent(), outFntFile.nameWithoutExtension()); // write the font data writeFont(fontData, pageRefs, outFntFile, info, pages[0].getWidth(), pages[0].getHeight()); } /** A utility method to write the given array of pixmaps to the given output directory, with the specified file name. If the * pages array is of length 1, then the resulting file ref will look like: "fileName.png". * * If the pages array is greater than length 1, the resulting file refs will be appended with "_N", such as "fileName_0.png", * "fileName_1.png", "fileName_2.png" etc. * * The returned string array can then be passed to the <tt>writeFont</tt> method. * * Note: None of the pixmaps will be disposed. * * @param pages the pages of pixmap data to write * @param outputDir the output directory * @param fileName the file names for the output images * @return the array of string references to be used with <tt>writeFont</tt> */ public static String[] writePixmaps (Pixmap[] pages, FileHandle outputDir, String fileName) { if (pages == null || pages.length == 0) throw new IllegalArgumentException("no pixmaps supplied to BitmapFontWriter.write"); String[] pageRefs = new String[pages.length]; for (int i = 0; i < pages.length; i++) { String ref = pages.length == 1 ? (fileName + ".png") : (fileName + "_" + i + ".png"); // the ref for this image pageRefs[i] = ref; // write the PNG in that directory PixmapIO.writePNG(outputDir.child(ref), pages[i]); } return pageRefs; } /** A convenience method to write pixmaps by page; typically returned from a PixmapPacker when used alongside * FreeTypeFontGenerator. * * @param pages the pages containing the Pixmaps * @param outputDir the output directory * @param fileName the file name * @return the file refs */ public static String[] writePixmaps (Array<Page> pages, FileHandle outputDir, String fileName) { Pixmap[] pix = new Pixmap[pages.size]; for (int i = 0; i < pages.size; i++) { pix[i] = pages.get(i).getPixmap(); } return writePixmaps(pix, outputDir, fileName); } }
/* * GeoTools - The Open Source Java GIS Toolkit * http://geotools.org * * (C) 2004-2008, Open Source Geospatial Foundation (OSGeo) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; * version 2.1 of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package org.geotools.geometry.jts; import com.vividsolutions.jts.geom.CoordinateSequence; import com.vividsolutions.jts.geom.CoordinateSequenceFactory; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryCollection; import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.LineString; import com.vividsolutions.jts.geom.LinearRing; import com.vividsolutions.jts.geom.MultiLineString; import com.vividsolutions.jts.geom.MultiPoint; import com.vividsolutions.jts.geom.MultiPolygon; import com.vividsolutions.jts.geom.Point; import com.vividsolutions.jts.geom.Polygon; /** * A builder for {@link com.vividsolutions.jts.geom.Geometry} objects. Primarily intended to * support fluent programming in test code. * <p> * Features include: * <ul> * <li>Both 2D and 3D coordinate dimensions are supported * (assuming the provided {@link com.vividsolutions.jts.geom.CoordinateSequenceFactory} supports them) * <li>Sequences of ordinate values can be supplied in a number of ways * <li>Rings do not need to be explicitly closed; a closing point will be * supplied if needed * <li>Empty geometries of all types can be created * <li>Composite geometries are validated to ensure they have a consistent * GeometryFactory and coordinate sequence dimension * <p> * Examples of intended usage are: * * <pre> * GeometryBuilder gb = new GeometryBuilder(geomFact); * LineString line = gb.linestring(1,2, 3,4); * Polygon poly = gb.polygon(0,0, 0,1, 1,1, 1,0); * Polygon box = gb.box(0,0, 1,1); * Polygon hexagon = gb.circle(0,0, 1,1, 6); * Polygon polyhole = gb.polygon(gb.linearring(0,0, 0,10, 10,10, 10,0), gb.linearring(1,1, 1,9, 9,9, 9,1)) * </pre> * * @author Martin Davis - OpenGeo * */ public class GeometryBuilder { private GeometryFactory geomFact; private CoordinateSequenceFactory csFact; /** * Create a new instance using the default {@link com.vividsolutions.jts.geom.GeometryFactory}. */ public GeometryBuilder() { this(new GeometryFactory()); } /** * Creates a new instance using a provided GeometryFactory. * * @param geomFact the factory to use */ public GeometryBuilder(GeometryFactory geomFact) { this.geomFact = geomFact; csFact = geomFact.getCoordinateSequenceFactory(); } /** * Creates an empty Point * * @return an empty Point */ public Point point() { return geomFact.createPoint(createCS(new double[0], 2)); } /** * Creates an empty Point with coordinate dimension = 3. * * @return an empty Point */ public Point pointZ() { return geomFact.createPoint(createCS(new double[0], 3)); } /** * Creates a 1D Point. * * @param x the X ordinate * @return a Point */ public Point point(double x) { return geomFact.createPoint(createCS(new double[] { x }, 1)); } /** * Creates a 2D Point. * * @param x the X ordinate * @param y the Y ordinate * @return a Point */ public Point point(double x, double y) { return geomFact.createPoint(createCS(new double[] { x, y }, 2)); } /** * Creates a 3D Point. * * @param x the X ordinate * @param y the Y ordinate * @param z the Z ordinate * @return a Point */ public Point pointZ(double x, double y, double z) { return geomFact.createPoint(createCS(new double[] { x, y, z }, 3)); } /** * Creates an empty 2D LineString * * @return an empty LineString */ public LineString lineString() { return geomFact.createLineString(createCS(new double[0], 2)); } /** * Creates an empty 3D LineString * * @return an empty LineString */ public LineString lineStringZ() { return geomFact.createLineString(createCS(new double[0], 3)); } /** * Creates a 2D LineString. * * @param ord the XY ordinates * @return a LineString */ public LineString lineString(double... ord) { return geomFact.createLineString(createCS(ord, 2)); } /** * Creates a 3D LineString. * * @param ord the XYZ ordinates * @return a LineString */ public LineString lineStringZ(double... ord) { return geomFact.createLineString(createCS(ord, 3)); } /** * Creates an empty 2D LinearRing * * @return an empty LinearRing */ public LinearRing linearRing() { return geomFact.createLinearRing(createRingCS(new double[0], 2)); } /** * Creates an empty 3D LinearRing * * @return an empty LinearRing */ public LinearRing linearRingZ() { return geomFact.createLinearRing(createRingCS(new double[0], 3)); } /** * Creates a 2D LinearRing. If the supplied coordinate list is not closed, a * closing coordinate is added. * * @param ord * @return a LinearRing */ public LinearRing linearRing(double... ord) { return geomFact.createLinearRing(createRingCS(ord, 2)); } /** * Creates a 3D LinearRing. If the supplied coordinate list is not closed, a * closing coordinate is added. * * @param ord the XYZ ordinates * @return a LinearRing */ public LinearRing linearRingZ(double... ord) { return geomFact.createLinearRing(createRingCS(ord, 3)); } /** * Creates an empty 2D Polygon. * * @return an empty Polygon */ public Polygon polygon() { return geomFact.createPolygon(linearRing(), null); } /** * Creates an empty 3D Polygon. * * @return an empty Polygon */ public Polygon polygonZ() { return geomFact.createPolygon(linearRingZ(), null); } /** * Creates a Polygon from a list of XY coordinates. * * @param ord a list of XY ordinates * @return a Polygon */ public Polygon polygon(double... ord) { return geomFact.createPolygon(linearRing(ord), null); } /** * Creates a Polygon from a list of XYZ coordinates. * * @param ord a list of XYZ ordinates * @return a Polygon */ public Polygon polygonZ(double... ord) { return geomFact.createPolygon(linearRingZ(ord), null); } /** * Creates a Polygon from an exterior ring. The coordinate dimension of the * Polygon is the dimension of the LinearRing. * * @param shell the exterior ring * @return a Polygon */ public Polygon polygon(LinearRing shell) { return geomFact.createPolygon(shell, null); } /** * Creates a Polygon with a hole from an exterior ring and an interior ring. * * @param shell the exterior ring * @param hole the interior ring * @return a Polygon with a hole */ public Polygon polygon(LinearRing shell, LinearRing hole) { return geomFact.createPolygon(shell, new LinearRing[] { hole }); } /** * Creates a Polygon with a hole from an exterior ring and an interior ring * supplied by the rings of Polygons. * * @param shell the exterior ring * @param hole the interior ring * @return a Polygon with a hole */ public Polygon polygon(Polygon shell, Polygon hole) { return geomFact.createPolygon((LinearRing) shell.getExteriorRing(), new LinearRing[] { (LinearRing) hole.getExteriorRing() }); } /** * Creates a rectangular 2D Polygon from X and Y bounds. * * @param x1 the lower X bound * @param y1 the lower Y bound * @param x2 the upper X bound * @param y2 the upper Y bound * @return a 2D Polygon */ public Polygon box(double x1, double y1, double x2, double y2) { double[] ord = new double[] { x1, y1, x1, y2, x2, y2, x2, y1, x1, y1 }; return polygon(ord); } /** * Creates a rectangular 3D Polygon from X and Y bounds. * * @param x1 the lower X bound * @param y1 the lower Y bound * @param x2 the upper X bound * @param y2 the upper Y bound * @param z the Z value for all coordinates * @return a 3D Polygon */ public Polygon boxZ(double x1, double y1, double x2, double y2, double z) { double[] ord = new double[] { x1, y1, z, x1, y2, z, x2, y2, z, x2, y1, z, x1, y1, z }; return polygonZ(ord); } /** * Creates an elliptical Polygon from a bounding box with a given number of * sides. * * @param x1 * @param y1 * @param x2 * @param y2 * @param nsides * @return a 2D Polygon */ public Polygon ellipse(double x1, double y1, double x2, double y2, int nsides) { double rx = Math.abs(x2 - x1) / 2; double ry = Math.abs(y2 - y1) / 2; double cx = Math.min(x1, x2) + rx; double cy = Math.min(y1, y2) + ry; double[] ord = new double[2 * nsides + 2]; double angInc = 2 * Math.PI / nsides; // create ring in CW order for (int i = 0; i < nsides; i++) { double ang = -(i * angInc); ord[2 * i] = cx + rx * Math.cos(ang); ord[2 * i + 1] = cy + ry * Math.sin(ang); } ord[2 * nsides] = ord[0]; ord[2 * nsides + 1] = ord[1]; return polygon(ord); } /** * Creates a circular Polygon with a given center, radius and number of sides. * * @param x the center X ordinate * @param y the center Y ordinate * @param radius the radius * @param nsides the number of sides * @return a 2D Polygon */ public Polygon circle(double x, double y, double radius, int nsides) { return ellipse(x - radius, y - radius, x + radius, y + radius, nsides); } /** * Creates a MultiPoint with 2 2D Points. * * @param x1 the X ordinate of the first point * @param y1 the Y ordinate of the first point * @param x2 the X ordinate of the second point * @param y2 the Y ordinate of the second point * @return A MultiPoint */ public MultiPoint multiPoint(double x1, double y1, double x2, double y2) { return geomFact .createMultiPoint(new Point[] { point(x1, y1), point(x2, y2) }); } /** * Creates a MultiPoint with 2 3D Points. * * @param x1 the X ordinate of the first point * @param y1 the Y ordinate of the first point * @param z1 the Z ordinate of the first point * @param x2 the X ordinate of the second point * @param y2 the Y ordinate of the second point * @param z2 the Z ordinate of the second point * @return A 3D MultiPoint */ public MultiPoint multiPointZ(double x1, double y1, double z1, double x2, double y2, double z2) { return geomFact.createMultiPoint(new Point[] { pointZ(x1, y1, z1), pointZ(x2, y2, z2) }); } /** * Creates a MultiLineString from a set of LineStrings * * @param lines the component LineStrings * @return a MultiLineString */ public MultiLineString multiLineString(LineString... lines) { return geomFact.createMultiLineString(lines); } /** * Creates a MultiPolygon from a set of Polygons. * * @param polys the component polygons * @return A MultiPolygon */ public MultiPolygon multiPolygon(Polygon... polys) { return geomFact.createMultiPolygon(polys); } /** * Creates a GeometryCollection from a set of Geometrys * * @param geoms the component Geometrys * @return a GeometryCollection */ public GeometryCollection geometryCollection(Geometry... geoms) { return geomFact.createGeometryCollection(geoms); } /** * Tests whether a sequence of ordinates of a given dimension is closed (i.e. * has the first and last coordinate identical). * * @param ord the list of ordinate values * @param dim the dimension of each coordinate * @return true if the sequence is closed */ private boolean isClosed(double[] ord, int dim) { int n = ord.length / dim; if (n == 0) return true; int lastPos = dim * (n - 1); double lastx = ord[lastPos]; double lasty = ord[lastPos + 1]; boolean isClosed = lastx == ord[0] && lasty == ord[1]; return isClosed; } /** * @param ord * @param dim * @return */ private CoordinateSequence createRingCS(double[] ord, int dim) { if (isClosed(ord, dim)) return createCS(ord, dim); double[] ord2 = new double[ord.length + dim]; System.arraycopy(ord, 0, ord2, 0, ord.length); // copy first coord to last int lastPos = ord.length; for (int i = 0; i < dim; i++) { ord2[lastPos + i] = ord2[i]; } return createCS(ord2, dim); } /** * @param ord * @param dim * @return */ private CoordinateSequence createCS(double[] ord, int dim) { if (ord.length % dim != 0) throw new IllegalArgumentException("Ordinate array length " + ord.length + " is not a multiple of dimension " + dim); int n = ord.length / dim; CoordinateSequence cs = csFact.create(n, dim); for (int i = 0; i < n; i++) { for (int d = 0; d < dim; d++) cs.setOrdinate(i, d, ord[dim * i + d]); } return cs; } }
/** * Copyright 2012 Markus Scheidgen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.hub.emffrag.testmodels.testmodel.frag.util; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.util.Switch; import de.hub.emffrag.model.emffrag.ContainmentIndexedMap; import de.hub.emffrag.model.emffrag.IndexedMap; import de.hub.emffrag.testmodels.testmodel.TestContainmentIndex; import de.hub.emffrag.testmodels.testmodel.TestObject; import de.hub.emffrag.testmodels.testmodel.TestObjectIndex; import de.hub.emffrag.testmodels.testmodel.TestObjectWithIndexes; import de.hub.emffrag.testmodels.testmodel.frag.meta.TestModelPackage; /** * <!-- begin-user-doc --> * The <b>Switch</b> for the model's inheritance hierarchy. * It supports the call {@link #doSwitch(EObject) doSwitch(object)} * to invoke the <code>caseXXX</code> method for each class of the model, * starting with the actual class of the object * and proceeding up the inheritance hierarchy * until a non-null result is returned, * which is the result of the switch. * <!-- end-user-doc --> * @see de.hub.emffrag.testmodels.testmodel.frag.meta.TestModelPackage * @generated */ public class TestModelSwitch<T> extends Switch<T> { /** * The cached model package * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected static TestModelPackage modelPackage; /** * Creates an instance of the switch. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public TestModelSwitch() { if (modelPackage == null) { modelPackage = TestModelPackage.eINSTANCE; } } /** * Checks whether this is a switch for the given package. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @parameter ePackage the package in question. * @return whether this is a switch for the given package. * @generated */ @Override protected boolean isSwitchFor(EPackage ePackage) { return ePackage == modelPackage; } /** * Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the first non-null result returned by a <code>caseXXX</code> call. * @generated */ @Override protected T doSwitch(int classifierID, EObject theEObject) { switch (classifierID) { case TestModelPackage.TEST_OBJECT: { TestObject testObject = (TestObject)theEObject; T result = caseTestObject(testObject); if (result == null) result = defaultCase(theEObject); return result; } case TestModelPackage.TEST_OBJECT_INDEX: { TestObjectIndex testObjectIndex = (TestObjectIndex)theEObject; T result = caseTestObjectIndex(testObjectIndex); if (result == null) result = caseIndexedMap(testObjectIndex); if (result == null) result = defaultCase(theEObject); return result; } case TestModelPackage.TEST_CONTAINMENT_INDEX: { TestContainmentIndex testContainmentIndex = (TestContainmentIndex)theEObject; T result = caseTestContainmentIndex(testContainmentIndex); if (result == null) result = caseContainmentIndexedMap(testContainmentIndex); if (result == null) result = caseIndexedMap(testContainmentIndex); if (result == null) result = defaultCase(theEObject); return result; } case TestModelPackage.TEST_OBJECT_WITH_INDEXES: { TestObjectWithIndexes testObjectWithIndexes = (TestObjectWithIndexes)theEObject; T result = caseTestObjectWithIndexes(testObjectWithIndexes); if (result == null) result = caseTestObject(testObjectWithIndexes); if (result == null) result = defaultCase(theEObject); return result; } default: return defaultCase(theEObject); } } /** * Returns the result of interpreting the object as an instance of '<em>Test Object</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Test Object</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseTestObject(TestObject object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Test Object Index</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Test Object Index</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseTestObjectIndex(TestObjectIndex object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Test Containment Index</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Test Containment Index</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseTestContainmentIndex(TestContainmentIndex object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Test Object With Indexes</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Test Object With Indexes</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseTestObjectWithIndexes(TestObjectWithIndexes object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Indexed Map</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Indexed Map</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public <K, V> T caseIndexedMap(IndexedMap<K, V> object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Containment Indexed Map</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Containment Indexed Map</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public <K, V> T caseContainmentIndexedMap(ContainmentIndexedMap<K, V> object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>EObject</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch, but this is the last case anyway. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>EObject</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) * @generated */ @Override public T defaultCase(EObject object) { return null; } } //TestModelSwitch
/* * Copyright 2003, 2004, 2005, 2006 Research Triangle Institute * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ package org.rti.zcore.struts.action; import java.io.IOException; import java.security.Principal; import java.sql.Connection; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.cidrz.project.zeprs.valueobject.BaseEncounter; import org.cidrz.webapp.dynasite.Constants; import org.rti.zcore.DropdownItem; import org.cidrz.webapp.dynasite.valueobject.DynaSiteObjects; import org.cidrz.project.zeprs.valueobject.EncounterData; import org.cidrz.webapp.dynasite.valueobject.Form; import org.cidrz.webapp.dynasite.valueobject.FormField; import org.cidrz.webapp.dynasite.valueobject.PageItem; import org.cidrz.webapp.dynasite.valueobject.Task; import org.cidrz.webapp.dynasite.dao.EncountersDAO; //import org.rti.zcore.dar.report.valueobject.StockReport; //import org.rti.zcore.impl.BaseSessionSubject; //import org.rti.zcore.impl.TimsSessionSubject; import org.cidrz.webapp.dynasite.struts.action.generic.BaseAction; import org.cidrz.webapp.dynasite.utils.DatabaseUtils; import org.cidrz.webapp.dynasite.utils.PatientRecordUtils; import org.cidrz.webapp.dynasite.session.SessionUtil; import org.cidrz.webapp.dynasite.utils.StringManipulation; import org.cidrz.webapp.dynasite.utils.WidgetUtils; /** * Note: You can't use this action on patients because it does not extend BasePatientAction. * If you send constraintClause and constraintLong in the request, you can get detail listings, * which is useful for stock_items. * @author ckelley * */ public class ListAction extends BaseAction { /** * Commons Logging instance. */ private Log log = LogFactory.getFactory().getInstance(ListAction.class); protected ActionForward doExecute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { // Extract attributes we will need HttpSession session = request.getSession(); Principal user = request.getUserPrincipal(); String username = user.getName(); Form encounterForm; BaseEncounter encounter = null; Map encMap = null; Long formId = null; Long patientId = null; Long eventId = null; String constraintClause = null; Long constraintLong = null; String detailName = null; // BaseSessionSubject sessionPatient = null; Integer maxRows = 0; Integer offset = 0; Integer prevRows = 0; Integer nextRows = 0; Connection conn = null; String className = null; try { conn = DatabaseUtils.getZEPRSConnection(username); if (request.getParameter("formId") != null) { formId = Long.decode(request.getParameter("formId")); } else if (request.getAttribute("formId") != null) { formId = Long.decode(request.getAttribute("formId").toString()); } // if (request.getParameter("className") != null) { // className = request.getParameter("className"); // formId = (Long) DynaSiteObjects.getFormNameMap().get(className); // } else if (request.getAttribute("className") != null) { // className = (String) request.getAttribute("className"); // formId = (Long) DynaSiteObjects.getFormNameMap().get(className); // } if (request.getParameter("constraintClause") != null) { constraintClause = request.getParameter("constraintClause"); } else if (request.getAttribute("constraintClause") != null) { constraintClause = request.getAttribute("constraintClause").toString(); } if (request.getParameter("constraintLong") != null) { constraintLong = Long.decode(request.getParameter("constraintLong")); } else if (request.getAttribute("constraintLong") != null) { constraintLong = Long.decode(request.getAttribute("constraintLong").toString()); } if (request.getParameter("maxRows") != null) { maxRows = Integer.decode(request.getParameter("maxRows")); } else if (request.getAttribute("maxRows") != null) { maxRows = Integer.decode(request.getAttribute("maxRows").toString()); } else { maxRows = 20; } if (request.getParameter("offset") != null) { offset = Integer.decode(request.getParameter("offset")); } else if (request.getAttribute("offset") != null) { offset = Integer.decode(request.getAttribute("offset").toString()); } if (request.getParameter("prevRows") != null) { prevRows = Integer.decode(request.getParameter("prevRows")); offset = prevRows; } else if (request.getAttribute("prevRows") != null) { prevRows = Integer.decode(request.getAttribute("prevRows").toString()); offset = prevRows; } if (request.getParameter("nextRows") != null) { nextRows = Integer.decode(request.getParameter("nextRows")); } else if (request.getAttribute("nextRows") != null) { nextRows = Integer.decode(request.getAttribute("nextRows").toString()); } // if (mapping.getParameter() != null && !mapping.getParameter().equals("")) { // String formName = mapping.getParameter(); // formId = (Long) DynaSiteObjects.getFormNameMap().get(formName); // } // Admin pages usually do not have a sessionPatient. This is a hack to use code that uses sessionPatient. // sessionPatient = new TimsSessionSubject(); // SessionUtil.getInstance(session).setSessionPatient(sessionPatient); if (formId == null) { formId = (long) 125; } encounterForm = ((Form) DynaSiteObjects.getForms().get(new Long(formId))); Long formTypeId = encounterForm.getFormTypeId(); // populate the records for this class List items = null; if (className != null && className.equals("MenuItem")) { // items = DynaSiteObjects.getMenuItemList(); //must be sorted } else { String classname = StringManipulation.fixClassname(encounterForm.getName()); Class clazz = null; try { clazz = Class.forName(Constants.DYNASITE_FORMS_PACKAGE + "." + classname); } catch (ClassNotFoundException e1) { if (classname.equals("UserInfo")) { clazz = Class.forName("org.rti.zcore." + classname); } } try { String orderBy = "id DESC"; switch (formTypeId.intValue()) { case 5: // admin if (constraintLong != null) { /*if (formId == 161) { // stock // Get the item - form 131 Class clazz = Class.forName(DynaSiteObjects.getDynasiteFormsPackage() + ".Item"); Item stockItem = (Item) EncountersDAO.getOne(conn, constraintLong, "SQL_RETRIEVE_ONE_ADMIN131", clazz); detailName = stockItem.getField2153(); request.setAttribute("detailName", detailName); }*/ //String orderBy = "id DESC"; items = EncountersDAO.getAllConstraintOrderBy(conn, formId, "SQL_RETRIEVE_ALL_ADMIN" + formId, clazz, constraintClause, constraintLong, orderBy); } else { if (formId == 161) { // stock items = EncountersDAO.getAll(conn, formId, "SQL_RETRIEVE_ALL_ADMIN" + formId, clazz, maxRows, offset, "id DESC"); } else if (formId == 128) { // regimen groups items = EncountersDAO.getAll(conn, formId, "SQL_RETRIEVE_ALL_ADMIN_PAGER" + formId, clazz, maxRows, offset, "name "); } else if (formId == 129) { // regimen items = EncountersDAO.getAll(conn, formId, "SQL_RETRIEVE_ALL_ADMIN_PAGER" + formId, clazz, maxRows, offset, "code "); } else if (formId == 130) { // item groups items = EncountersDAO.getAll(conn, formId, "SQL_RETRIEVE_ALL_ADMIN_PAGER" + formId, clazz, maxRows, offset, "name "); } else { if (className != null && className.equals("MenuItem")) { // items = DynaSiteObjects.getMenuItemList(); //must be sorted } else { if (formId == 181) { orderBy = "regimen_id DESC"; items = EncountersDAO.getAll(conn, formId, "SQL_RETRIEVE_ALL_ADMIN_PAGER" + formId, clazz, maxRows, offset, orderBy); } else { items = EncountersDAO.getAll(conn, formId, "SQL_RETRIEVE_ALL_ADMIN_PAGER" + formId, clazz, maxRows, offset, orderBy); } } } } break; case 8: // list - for patients items = EncountersDAO.getAll(conn, formId, "SQL_RETRIEVEALL" + formId, clazz, maxRows, offset); break; default: items = EncountersDAO.getAll(conn, formId, "SQL_RETRIEVEALL" + formId, clazz, maxRows, offset); break; } } catch (IOException e) { request.setAttribute("exception", e); return mapping.findForward("error"); } catch (ServletException e) { request.setAttribute("exception", e); return mapping.findForward("error"); } catch (SQLException e) { request.setAttribute("exception", e); return mapping.findForward("error"); } } request.setAttribute("maxRows", maxRows); nextRows = offset + maxRows; if (items.size() < maxRows) { if (offset == 0) { request.setAttribute("noNavigationWidget", "1"); } } else { request.setAttribute("offset", nextRows); } if (offset-maxRows >=0) { prevRows = offset-maxRows; request.setAttribute("prevRows", prevRows); } request.setAttribute("nextRows", nextRows); // Attach a map of encounter values that has enumerations already resolved. Form encForm = (Form) DynaSiteObjects.getForms().get(encounterForm.getId()); for (int i = 0; i < items.size(); i++) { encounter = (EncounterData) items.get(i); // Form encForm = (Form) DynaSiteObjects.getForms().get(encounter.getFormId()); encMap = PatientRecordUtils.getEncounterMap(encForm, encounter, "fieldId"); encounter.setEncounterMap(encMap); } if (items.size() > 0) { request.setAttribute("chartItems", items); request.setAttribute("formId", encounterForm.getId()); // loading of body onload DWRUtil.useLoadingMessage() request.setAttribute("dwr", 1); } // Process the dynamic dropdown lists. HashMap listMap = new HashMap(); Form inlineForm = null; // HashMap<Long,StockReport> balanceMap = null; // if (DynaSiteObjects.getStatusMap().get("balanceMap") != null) { // balanceMap = (HashMap<Long, StockReport>) DynaSiteObjects.getStatusMap().get("balanceMap"); // } // for (Iterator iterator = encounterForm.getPageItems().iterator(); iterator.hasNext();) { // PageItem pageItem = (PageItem) iterator.next(); // FormField formField = pageItem.getForm_field(); // String identifier = formField.getIdentifier(); // if (pageItem.getInputType().equals("dropdown") || pageItem.getInputType().equals("dropdown-add-one") || pageItem.getInputType().equals("dropdown_site")) { // String dropdownConstraint = null; // String pageItemDropdownConstraint = pageItem.getDropdownConstraint(); // if (pageItemDropdownConstraint != null && pageItemDropdownConstraint.endsWith("'siteAbbrev'")) { // String siteAbbrev = SessionUtil.getInstance(session).getClientSettings().getSite().getAbbreviation(); // dropdownConstraint = pageItemDropdownConstraint.replace("'siteAbbrev'", "'" + siteAbbrev + "'"); // } else { // dropdownConstraint = pageItemDropdownConstraint; // } // List<DropdownItem> list = WidgetUtils.getList(conn, pageItem.getDropdownTable(), pageItem.getDropdownColumn(), dropdownConstraint, pageItem.getDropdownOrderByClause(), DropdownItem.class, pageItem.getFkIdentifier()); // String formName = encForm.getClassname(); // if (formName.equals("StockControl")) { // for (DropdownItem dropdownItem : list) { // if (balanceMap != null) { // String itemIdStr = dropdownItem.getDropdownId(); // Long itemId = Long.valueOf(itemIdStr); // StockReport stockReport = balanceMap.get(itemId); // Integer balance = 0; // if (stockReport != null) { // balance = stockReport.getBalanceBF(); // String label = dropdownItem.getDropdownValue(); // if (balance <= 0) { // String value = dropdownItem.getDropdownValue(); // dropdownItem.setDropdownValue(value + " ** Out of Stock ** Bal: " + balance); // } else { // dropdownItem.setDropdownValue(label + " Bal: " + balance); // } // } // } // } // } // listMap.put(pageItem.getId(), list); // if (pageItem.getInputType().equals("dropdown-add-one")) { // String classNameString = StringManipulation.fixClassname(pageItem.getDropdownTable()); // Long inlineFormId = (Long) DynaSiteObjects.getFormNameMap().get(classNameString); // inlineForm = ((Form) DynaSiteObjects.getForms().get(new Long(inlineFormId))); // // Create a list of fieldnames for inline forms. // ArrayList<String> inlineFields = new ArrayList<String>(); // for (Iterator iterator2 = inlineForm.getPageItems().iterator(); iterator2.hasNext();) { // PageItem pageItem2 = (PageItem) iterator2.next(); // if (pageItem2.getForm_field().isEnabled() == true && !pageItem2.getForm_field().getType().equals("Display")) { // inlineFields.add(pageItem2.getForm_field().getIdentifier()); // } // } // request.setAttribute("inlineForm_"+identifier, inlineForm); // request.setAttribute("inlineFields_"+identifier, inlineFields); // // loading of body onload DWRUtil.useLoadingMessage() // request.setAttribute("dwr", 1); // } // } // } request.setAttribute("listMap", listMap); request.setAttribute("encounterForm", encounterForm); List sites = DynaSiteObjects.getClinics(); request.setAttribute("sites", sites); // if (Constants.STOCK_PROBLEMS_ENABLED != null && Constants.STOCK_PROBLEMS_ENABLED.equals("true")) { // //List<Task> stockAlertList = PatientRecordUtils.getStockAlerts(); // List<Task> stockAlertList = null; // if (DynaSiteObjects.getStatusMap().get("stockAlertList") != null) { // stockAlertList = (List<Task>) DynaSiteObjects.getStatusMap().get("stockAlertList"); // } // request.setAttribute("activeProblems", stockAlertList); // } } catch (Exception e) { e.printStackTrace(); } finally { if (conn != null && !conn.isClosed()) { conn.close(); } } return mapping.findForward("success"); } }
package org.nutz.zdoc.impl; import static org.junit.Assert.assertEquals; import static org.nutz.zdoc.ZDocEleType.*; import static org.nutz.zdoc.ZDocNodeType.*; import org.junit.Before; import org.junit.Test; import org.nutz.am.AmFactory; import org.nutz.zdoc.BaseParserTest; import org.nutz.zdoc.ZDocEleType; import org.nutz.zdoc.ZDocNode; public class MdParserTest extends BaseParserTest { @Before public void before() { parser = new MdParser(); } @Test public void test_blockquote_after_p() { String s = "**A**\n"; s += "> q"; ZDocNode root = PS(s); _C(root, NODE, 2, "{}", ""); _C(root, PARAGRAPH, 0, "{}", "A", 0); _C(root, BLOCKQUOTE, 1, "{}", "", 1); _C(root, PARAGRAPH, 0, "{}", "q ", 1, 0); } @Test public void test_simple_em() { String s = "X**Y**Z"; ZDocNode root = PS(s); _C(root, NODE, 1, "{}", ""); _C(root, PARAGRAPH, 0, "{}", "XYZ", 0); ZDocNode nd = root.node(0); assertEquals(ZDocEleType.INLINE, nd.eles().get(0).type()); assertEquals("X", nd.eles().get(0).text()); assertEquals(null, nd.eles().get(0).style("font-weight")); assertEquals(ZDocEleType.INLINE, nd.eles().get(1).type()); assertEquals("Y", nd.eles().get(1).text()); assertEquals("bold", nd.eles().get(1).style("font-weight")); assertEquals(ZDocEleType.INLINE, nd.eles().get(2).type()); assertEquals("Z", nd.eles().get(2).text()); assertEquals(null, nd.eles().get(2).style("font-weight")); } @Test public void test_blockquote_00() { String s = "#AAA\n"; s += "> T\n"; ZDocNode root = PS(s); _C(root, NODE, 1, "{}", ""); _C(root, HEADER, 1, "{tagName:'h1'}", "AAA", 0); _C(root, BLOCKQUOTE, 1, "{}", "", 0, 0); _C(root, PARAGRAPH, 0, "{}", "T ", 0, 0, 0); } @Test public void test_blockquote_01() { String s = "#AAA\n"; s += "> X\n"; s += "> > A\n"; s += "> > B\n"; s += "> Y\n"; ZDocNode root = PS(s); _C(root, NODE, 1, "{}", ""); _C(root, HEADER, 1, "{tagName:'h1'}", "AAA", 0); _C(root, BLOCKQUOTE, 3, "{}", "", 0, 0); _C(root, PARAGRAPH, 0, "{}", "X ", 0, 0, 0); _C(root, BLOCKQUOTE, 1, "{}", "", 0, 0, 1); _C(root, PARAGRAPH, 0, "{}", "A B ", 0, 0, 1, 0); _C(root, PARAGRAPH, 0, "{}", "Y ", 0, 0, 2); } @Test public void test_hierachy_li_00() { String s = "#AAA\n"; s += " 1. L0\n"; s += " 2. L1\n"; s += " 1. L11\n"; s += " 1. L2\n"; ZDocNode root = PS(s); _C(root, NODE, 1, "{}", ""); _C(root, HEADER, 1, "{tagName:'h1'}", "AAA", 0); _C(root, OL, 3, "{'$line-type':'OL','$line-indent':0}", "", 0, 0); _C(root, LI, 0, "{}", "L0", 0, 0, 0); _C(root, LI, 1, "{}", "L1", 0, 0, 1); _C(root, OL, 1, "{'$line-type':'OL','$line-indent':1}", "", 0, 0, 1, 0); _C(root, LI, 0, "{}", "L11", 0, 0, 1, 0, 0); _C(root, LI, 0, "{}", "L2", 0, 0, 2); } @Test public void test_code_00() { String code = "abc"; code += "\n|--|"; code += "\nyyy"; String s = "```md\n"; s += code; s += "\n```"; ZDocNode root = PS(s); _C(root, NODE, 1, "{}", ""); _C(root, CODE, 0, "{'code-type':'md'}", code, 0); } @Test public void test_code_01() { String s = " function(){\n"; s += " \talert('haha');\n"; s += " }"; ZDocNode root = PS(s); _C(root, NODE, 1, "{}", ""); _C(root, CODE, 0, "{}", s.replaceAll(" ", "").replaceAll("\t", " "), 0); } @Test public void test_link() { ZDocNode root = PS("A[](a.zdoc)B"); _C(root, NODE, 1, "{}", ""); _C(root, PARAGRAPH, 0, "{}", "AB", 0); ZDocNode nd = root.node(0); assertEquals(3, nd.eles().size()); _CE(nd, 0, INLINE, "{}", "A"); _CE(nd, 1, INLINE, "{href:'a.zdoc'}", ""); _CE(nd, 2, INLINE, "{}", "B"); } @Test public void test_link2() { ZDocNode root = PS("A[A](a.zdoc)B[](http://nutzam.com)C"); _C(root, NODE, 1, "{}", ""); _C(root, PARAGRAPH, 0, "{}", "AABC", 0); ZDocNode nd = root.node(0); assertEquals(5, nd.eles().size()); _CE(nd, 0, INLINE, "{}", "A"); _CE(nd, 1, INLINE, "{href:'a.zdoc'}", "A"); _CE(nd, 2, INLINE, "{}", "B"); _CE(nd, 3, INLINE, "{href:'http://nutzam.com'}", ""); _CE(nd, 4, INLINE, "{}", "C"); } @Test public void test_image() { ZDocNode root = PS("A![](a.png)B![](b.png)C"); _C(root, NODE, 1, "{}", ""); _C(root, PARAGRAPH, 0, "{}", "ABC", 0); ZDocNode nd = root.node(0); assertEquals(5, nd.eles().size()); _CE(nd, 0, INLINE, "{}", "A"); _CE(nd, 1, IMG, "{src:'a.png'}", ""); _CE(nd, 2, INLINE, "{}", "B"); _CE(nd, 3, IMG, "{src:'b.png'}", ""); _CE(nd, 4, INLINE, "{}", "C"); } @Test public void test_simple_table() { String str = "#AAAAAAA\n"; str += " H1 | H2 \n"; str += " --- | --- \n"; str += " C11 | C12 \n"; str += " C21 | C22 \n"; str += "\n"; str += "XYZ"; ZDocNode root = PS(str); assertEquals(1, root.children().size()); ZDocNode h1 = root.node(0); assertEquals("AAAAAAA", h1.text()); assertEquals(HEADER, h1.type()); assertEquals(2, h1.children().size()); ZDocNode table = h1.node(0); _C(table, TABLE, 3, "{$cols:['auto','auto']}", ""); _C(table, THEAD, 2, "{}", "", 0); _C(table, TH, 0, "{}", " H1 ", 0, 0); _C(table, TH, 0, "{}", " H2 ", 0, 1); _C(table, TR, 2, "{}", "", 1); _C(table, TD, 0, "{}", " C11 ", 1, 0); _C(table, TD, 0, "{}", " C12 ", 1, 1); _C(table, TR, 2, "{}", "", 2); _C(table, TD, 0, "{}", " C21 ", 2, 0); _C(table, TD, 0, "{}", " C22 ", 2, 1); _C(h1, PARAGRAPH, 0, "{}", "XYZ", 1); } @Override protected AmFactory genAmFactory() { return NewAmFactory("markdown"); } @Override protected String getRootAmName() { return "mdParagraph"; } }
package org.openapitools.model; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import org.openapitools.model.PipelinelatestRun; @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaInflectorServerCodegen", date = "2022-02-13T02:15:54.605692Z[Etc/UTC]") public class Pipeline { @JsonProperty("_class") private String propertyClass; @JsonProperty("organization") private String organization; @JsonProperty("name") private String name; @JsonProperty("displayName") private String displayName; @JsonProperty("fullName") private String fullName; @JsonProperty("weatherScore") private Integer weatherScore; @JsonProperty("estimatedDurationInMillis") private Integer estimatedDurationInMillis; @JsonProperty("latestRun") private PipelinelatestRun latestRun; /** **/ public Pipeline propertyClass(String propertyClass) { this.propertyClass = propertyClass; return this; } @ApiModelProperty(value = "") @JsonProperty("_class") public String getPropertyClass() { return propertyClass; } public void setPropertyClass(String propertyClass) { this.propertyClass = propertyClass; } /** **/ public Pipeline organization(String organization) { this.organization = organization; return this; } @ApiModelProperty(value = "") @JsonProperty("organization") public String getOrganization() { return organization; } public void setOrganization(String organization) { this.organization = organization; } /** **/ public Pipeline name(String name) { this.name = name; return this; } @ApiModelProperty(value = "") @JsonProperty("name") public String getName() { return name; } public void setName(String name) { this.name = name; } /** **/ public Pipeline displayName(String displayName) { this.displayName = displayName; return this; } @ApiModelProperty(value = "") @JsonProperty("displayName") public String getDisplayName() { return displayName; } public void setDisplayName(String displayName) { this.displayName = displayName; } /** **/ public Pipeline fullName(String fullName) { this.fullName = fullName; return this; } @ApiModelProperty(value = "") @JsonProperty("fullName") public String getFullName() { return fullName; } public void setFullName(String fullName) { this.fullName = fullName; } /** **/ public Pipeline weatherScore(Integer weatherScore) { this.weatherScore = weatherScore; return this; } @ApiModelProperty(value = "") @JsonProperty("weatherScore") public Integer getWeatherScore() { return weatherScore; } public void setWeatherScore(Integer weatherScore) { this.weatherScore = weatherScore; } /** **/ public Pipeline estimatedDurationInMillis(Integer estimatedDurationInMillis) { this.estimatedDurationInMillis = estimatedDurationInMillis; return this; } @ApiModelProperty(value = "") @JsonProperty("estimatedDurationInMillis") public Integer getEstimatedDurationInMillis() { return estimatedDurationInMillis; } public void setEstimatedDurationInMillis(Integer estimatedDurationInMillis) { this.estimatedDurationInMillis = estimatedDurationInMillis; } /** **/ public Pipeline latestRun(PipelinelatestRun latestRun) { this.latestRun = latestRun; return this; } @ApiModelProperty(value = "") @JsonProperty("latestRun") public PipelinelatestRun getLatestRun() { return latestRun; } public void setLatestRun(PipelinelatestRun latestRun) { this.latestRun = latestRun; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Pipeline pipeline = (Pipeline) o; return Objects.equals(propertyClass, pipeline.propertyClass) && Objects.equals(organization, pipeline.organization) && Objects.equals(name, pipeline.name) && Objects.equals(displayName, pipeline.displayName) && Objects.equals(fullName, pipeline.fullName) && Objects.equals(weatherScore, pipeline.weatherScore) && Objects.equals(estimatedDurationInMillis, pipeline.estimatedDurationInMillis) && Objects.equals(latestRun, pipeline.latestRun); } @Override public int hashCode() { return Objects.hash(propertyClass, organization, name, displayName, fullName, weatherScore, estimatedDurationInMillis, latestRun); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Pipeline {\n"); sb.append(" propertyClass: ").append(toIndentedString(propertyClass)).append("\n"); sb.append(" organization: ").append(toIndentedString(organization)).append("\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); sb.append(" fullName: ").append(toIndentedString(fullName)).append("\n"); sb.append(" weatherScore: ").append(toIndentedString(weatherScore)).append("\n"); sb.append(" estimatedDurationInMillis: ").append(toIndentedString(estimatedDurationInMillis)).append("\n"); sb.append(" latestRun: ").append(toIndentedString(latestRun)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * The MIT License * * Copyright (c) 2010, InfraDNA, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.model.labels; import com.thoughtworks.xstream.converters.MarshallingContext; import com.thoughtworks.xstream.converters.UnmarshallingContext; import com.thoughtworks.xstream.io.HierarchicalStreamReader; import com.thoughtworks.xstream.io.HierarchicalStreamWriter; import hudson.BulkChange; import hudson.CopyOnWrite; import hudson.XmlFile; import hudson.model.Action; import hudson.model.Descriptor.FormException; import hudson.model.Failure; import hudson.util.EditDistance; import jenkins.model.Jenkins; import hudson.model.Label; import hudson.model.Saveable; import hudson.model.listeners.SaveableListener; import hudson.util.DescribableList; import hudson.util.QuotedStringTokenizer; import hudson.util.VariableResolver; import hudson.util.XStream2; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import org.kohsuke.stapler.export.Exported; import org.kohsuke.stapler.interceptor.RequirePOST; import javax.servlet.ServletException; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.Vector; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.CheckForNull; import javax.annotation.Nullable; /** * Atomic single token label, like "foo" or "bar". * * @author Kohsuke Kawaguchi * @since 1.372 */ public class LabelAtom extends Label implements Saveable { private DescribableList<LabelAtomProperty,LabelAtomPropertyDescriptor> properties = new DescribableList<LabelAtomProperty,LabelAtomPropertyDescriptor>(this); @CopyOnWrite protected transient volatile List<Action> transientActions = new Vector<Action>(); public LabelAtom(String name) { super(name); } /** * If the label contains 'unsafe' chars, escape them. */ @Override public String getExpression() { return escape(name); } /** * {@inheritDoc} * * <p> * Note that this method returns a read-only view of {@link Action}s. * {@link LabelAtomProperty}s who want to add a project action * should do so by implementing {@link LabelAtomProperty#getActions(LabelAtom)}. */ @SuppressWarnings("deprecation") @Override public List<Action> getActions() { // add all the transient actions, too List<Action> actions = new Vector<Action>(super.getActions()); actions.addAll(transientActions); // return the read only list to cause a failure on plugins who try to add an action here return Collections.unmodifiableList(actions); } protected void updateTransientActions() { Vector<Action> ta = new Vector<Action>(); // add the config link if (!getApplicablePropertyDescriptors().isEmpty()) { // if there's no property descriptor, there's nothing interesting to configure. ta.add(new Action() { public String getIconFileName() { if (Jenkins.getInstance().hasPermission(Jenkins.ADMINISTER)) return "setting.png"; else return null; } public String getDisplayName() { return "Configure"; } public String getUrlName() { return "configure"; } }); } for (LabelAtomProperty p : properties) ta.addAll(p.getActions(this)); transientActions = ta; } /** * Properties associated with this label. */ public DescribableList<LabelAtomProperty, LabelAtomPropertyDescriptor> getProperties() { return properties; } @Exported public List<LabelAtomProperty> getPropertiesList() { return properties.toList(); } @Override public boolean matches(VariableResolver<Boolean> resolver) { return resolver.resolve(name); } @Override public <V, P> V accept(LabelVisitor<V, P> visitor, P param) { return visitor.onAtom(this,param); } @Override public Set<LabelAtom> listAtoms() { return Collections.singleton(this); } @Override public LabelOperatorPrecedence precedence() { return LabelOperatorPrecedence.ATOM; } /*package*/ XmlFile getConfigFile() { return new XmlFile(XSTREAM, new File(Jenkins.getInstance().root, "labels/"+name+".xml")); } public void save() throws IOException { if(BulkChange.contains(this)) return; try { getConfigFile().write(this); SaveableListener.fireOnChange(this, getConfigFile()); } catch (IOException e) { LOGGER.log(Level.WARNING, "Failed to save "+getConfigFile(),e); } } public void load() { XmlFile file = getConfigFile(); if(file.exists()) { try { file.unmarshal(this); } catch (IOException e) { LOGGER.log(Level.WARNING, "Failed to load "+file, e); } } properties.setOwner(this); updateTransientActions(); } /** * Returns all the {@link LabelAtomPropertyDescriptor}s that can be potentially configured * on this label. */ public List<LabelAtomPropertyDescriptor> getApplicablePropertyDescriptors() { return LabelAtomProperty.all(); } /** * Accepts the update to the node configuration. */ @RequirePOST public void doConfigSubmit( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException, FormException { final Jenkins app = Jenkins.getInstance(); app.checkPermission(Jenkins.ADMINISTER); properties.rebuild(req, req.getSubmittedForm(), getApplicablePropertyDescriptors()); updateTransientActions(); save(); // take the user back to the label top page. rsp.sendRedirect2("."); } /** * Obtains an atom by its {@linkplain #getName() name}. * @see Jenkins#getLabelAtom */ public static @Nullable LabelAtom get(@CheckForNull String l) { return Jenkins.getInstance().getLabelAtom(l); } public static LabelAtom findNearest(String name) { List<String> candidates = new ArrayList<String>(); for (LabelAtom a : Jenkins.getInstance().getLabelAtoms()) { candidates.add(a.getName()); } return get(EditDistance.findNearest(name, candidates)); } public static boolean needsEscape(String name) { try { Jenkins.checkGoodName(name); // additional restricted chars for( int i=0; i<name.length(); i++ ) { char ch = name.charAt(i); if(" ()\t\n".indexOf(ch)!=-1) return true; } return false; } catch (Failure failure) { return true; } } public static String escape(String name) { if (needsEscape(name)) return QuotedStringTokenizer.quote(name); return name; } private static final Logger LOGGER = Logger.getLogger(LabelAtom.class.getName()); private static final XStream2 XSTREAM = new XStream2(); static { // Don't want Label.ConverterImpl to be used: XSTREAM.registerConverter(new LabelAtomConverter(), 100); } // class name is not ConverterImpl, to avoid getting picked up by AssociatedConverterImpl private static class LabelAtomConverter extends XStream2.PassthruConverter<LabelAtom> { private Label.ConverterImpl leafLabelConverter = new Label.ConverterImpl(); private LabelAtomConverter() { super(XSTREAM); } public boolean canConvert(Class type) { return LabelAtom.class.isAssignableFrom(type); } public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { if (context.get(IN_NESTED)==null) { context.put(IN_NESTED,true); try { super.marshal(source,writer,context); } finally { context.put(IN_NESTED,false); } } else leafLabelConverter.marshal(source,writer,context); } public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { if (context.get(IN_NESTED)==null) { context.put(IN_NESTED,true); try { return super.unmarshal(reader,context); } finally { context.put(IN_NESTED,false); } } else return leafLabelConverter.unmarshal(reader,context); } @Override protected void callback(LabelAtom obj, UnmarshallingContext context) { // noop } private static final Object IN_NESTED = "VisitingInnerLabelAtom"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.core.file; import static org.apache.accumulo.core.file.blockfile.impl.CacheProvider.NULL_PROVIDER; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Objects; import java.util.Set; import org.apache.accumulo.core.Constants; import org.apache.accumulo.core.conf.AccumuloConfiguration; import org.apache.accumulo.core.conf.Property; import org.apache.accumulo.core.data.ByteSequence; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.file.blockfile.impl.CacheProvider; import org.apache.accumulo.core.file.rfile.RFile; import org.apache.accumulo.core.spi.crypto.CryptoService; import org.apache.accumulo.core.util.ratelimit.RateLimiter; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.mapred.FileOutputCommitter; import com.google.common.cache.Cache; public abstract class FileOperations { private static final String HADOOP_JOBHISTORY_LOCATION = "_logs"; // dir related to // hadoop.job.history.user.location private static final HashSet<String> validExtensions = new HashSet<>(Arrays.asList(Constants.MAPFILE_EXTENSION, RFile.EXTENSION)); // Sometimes we want to know what files accumulo bulk processing creates private static final HashSet<String> bulkWorkingFiles = new HashSet<>(Arrays.asList(Constants.BULK_LOAD_MAPPING, Constants.BULK_RENAME_FILE, FileOutputCommitter.SUCCEEDED_FILE_NAME, HADOOP_JOBHISTORY_LOCATION)); public static Set<String> getValidExtensions() { return validExtensions; } public static Set<String> getBulkWorkingFiles() { return bulkWorkingFiles; } public static String getNewFileExtension(AccumuloConfiguration acuconf) { return acuconf.get(Property.TABLE_FILE_TYPE); } public static FileOperations getInstance() { return new DispatchingFileFactory(); } // // Abstract methods (to be implemented by subclasses) // protected abstract long getFileSize(FileOptions options) throws IOException; protected abstract FileSKVWriter openWriter(FileOptions options) throws IOException; protected abstract FileSKVIterator openIndex(FileOptions options) throws IOException; protected abstract FileSKVIterator openScanReader(FileOptions options) throws IOException; protected abstract FileSKVIterator openReader(FileOptions options) throws IOException; // // File operations // /** * Construct an operation object allowing one to create a writer for a file. <br> * Syntax: * * <pre> * FileSKVWriter writer = fileOperations.newWriterBuilder() * .forFile(...) * .withTableConfiguration(...) * .withRateLimiter(...) // optional * .withCompression(...) // optional * .build(); * </pre> */ public WriterBuilder newWriterBuilder() { return new WriterBuilder(); } /** * Construct an operation object allowing one to create an index iterator for a file. <br> * Syntax: * * <pre> * FileSKVIterator iterator = fileOperations.newIndexReaderBuilder() * .forFile(...) * .withTableConfiguration(...) * .withRateLimiter(...) // optional * .withBlockCache(...) // optional * .build(); * </pre> */ public IndexReaderBuilder newIndexReaderBuilder() { return new IndexReaderBuilder(); } /** * Construct an operation object allowing one to create a "scan" reader for a file. Scan readers * do not have any optimizations for seeking beyond their initial position. This is useful for * file operations that only need to scan data within a range and do not need to seek. Therefore * file metadata such as indexes does not need to be kept in memory while the file is scanned. * Also seek optimizations like bloom filters do not need to be loaded. <br> * Syntax: * * <pre> * FileSKVIterator scanner = fileOperations.newScanReaderBuilder() * .forFile(...) * .withTableConfiguration(...) * .overRange(...) * .withRateLimiter(...) // optional * .withBlockCache(...) // optional * .build(); * </pre> */ public ScanReaderBuilder newScanReaderBuilder() { return new ScanReaderBuilder(); } /** * Construct an operation object allowing one to create a reader for a file. A reader constructed * in this manner fully supports seeking, and also enables any optimizations related to seeking * (e.g. Bloom filters). <br> * Syntax: * * <pre> * FileSKVIterator scanner = fileOperations.newReaderBuilder() * .forFile(...) * .withTableConfiguration(...) * .withRateLimiter(...) // optional * .withBlockCache(...) // optional * .seekToBeginning(...) // optional * .build(); * </pre> */ public ReaderBuilder newReaderBuilder() { return new ReaderBuilder(); } public class FileOptions { // objects used by all public final AccumuloConfiguration tableConfiguration; public final String filename; public final FileSystem fs; public final Configuration fsConf; public final RateLimiter rateLimiter; // writer only objects public final String compression; public final FSDataOutputStream outputStream; public final boolean enableAccumuloStart; // reader only objects public final CacheProvider cacheProvider; public final Cache<String,Long> fileLenCache; public final boolean seekToBeginning; public final CryptoService cryptoService; // scan reader only objects public final Range range; public final Set<ByteSequence> columnFamilies; public final boolean inclusive; public FileOptions(AccumuloConfiguration tableConfiguration, String filename, FileSystem fs, Configuration fsConf, RateLimiter rateLimiter, String compression, FSDataOutputStream outputStream, boolean enableAccumuloStart, CacheProvider cacheProvider, Cache<String,Long> fileLenCache, boolean seekToBeginning, CryptoService cryptoService, Range range, Set<ByteSequence> columnFamilies, boolean inclusive) { this.tableConfiguration = tableConfiguration; this.filename = filename; this.fs = fs; this.fsConf = fsConf; this.rateLimiter = rateLimiter; this.compression = compression; this.outputStream = outputStream; this.enableAccumuloStart = enableAccumuloStart; this.cacheProvider = cacheProvider; this.fileLenCache = fileLenCache; this.seekToBeginning = seekToBeginning; this.cryptoService = Objects.requireNonNull(cryptoService); this.range = range; this.columnFamilies = columnFamilies; this.inclusive = inclusive; } public AccumuloConfiguration getTableConfiguration() { return tableConfiguration; } public String getFilename() { return filename; } public FileSystem getFileSystem() { return fs; } public Configuration getConfiguration() { return fsConf; } public RateLimiter getRateLimiter() { return rateLimiter; } public String getCompression() { return compression; } public FSDataOutputStream getOutputStream() { return outputStream; } public boolean isAccumuloStartEnabled() { return enableAccumuloStart; } public CacheProvider getCacheProvider() { return cacheProvider; } public Cache<String,Long> getFileLenCache() { return fileLenCache; } public boolean isSeekToBeginning() { return seekToBeginning; } public CryptoService getCryptoService() { return cryptoService; } public Range getRange() { return range; } public Set<ByteSequence> getColumnFamilies() { return columnFamilies; } public boolean isRangeInclusive() { return inclusive; } } /** * Helper class extended by both writers and readers. */ public class FileHelper { private AccumuloConfiguration tableConfiguration; private String filename; private FileSystem fs; private Configuration fsConf; private RateLimiter rateLimiter; private CryptoService cryptoService; protected FileHelper fs(FileSystem fs) { this.fs = Objects.requireNonNull(fs); return this; } protected FileHelper fsConf(Configuration fsConf) { this.fsConf = Objects.requireNonNull(fsConf); return this; } protected FileHelper filename(String filename) { this.filename = Objects.requireNonNull(filename); return this; } protected FileHelper tableConfiguration(AccumuloConfiguration tableConfiguration) { this.tableConfiguration = Objects.requireNonNull(tableConfiguration); return this; } protected FileHelper rateLimiter(RateLimiter rateLimiter) { this.rateLimiter = rateLimiter; return this; } protected FileHelper cryptoService(CryptoService cs) { this.cryptoService = Objects.requireNonNull(cs); return this; } protected FileOptions toWriterBuilderOptions(String compression, FSDataOutputStream outputStream, boolean startEnabled) { return new FileOptions(tableConfiguration, filename, fs, fsConf, rateLimiter, compression, outputStream, startEnabled, NULL_PROVIDER, null, false, cryptoService, null, null, true); } protected FileOptions toReaderBuilderOptions(CacheProvider cacheProvider, Cache<String,Long> fileLenCache, boolean seekToBeginning) { return new FileOptions(tableConfiguration, filename, fs, fsConf, rateLimiter, null, null, false, cacheProvider == null ? NULL_PROVIDER : cacheProvider, fileLenCache, seekToBeginning, cryptoService, null, null, true); } protected FileOptions toIndexReaderBuilderOptions(Cache<String,Long> fileLenCache) { return new FileOptions(tableConfiguration, filename, fs, fsConf, rateLimiter, null, null, false, NULL_PROVIDER, fileLenCache, false, cryptoService, null, null, true); } protected FileOptions toScanReaderBuilderOptions(Range range, Set<ByteSequence> columnFamilies, boolean inclusive) { return new FileOptions(tableConfiguration, filename, fs, fsConf, rateLimiter, null, null, false, NULL_PROVIDER, null, false, cryptoService, range, columnFamilies, inclusive); } protected AccumuloConfiguration getTableConfiguration() { return tableConfiguration; } } /** * Operation object for constructing a writer. */ public class WriterBuilder extends FileHelper implements WriterTableConfiguration { private String compression; private FSDataOutputStream outputStream; private boolean enableAccumuloStart = true; public WriterTableConfiguration forOutputStream(String extension, FSDataOutputStream outputStream, Configuration fsConf, CryptoService cs) { this.outputStream = outputStream; filename("foo" + extension).fsConf(fsConf).cryptoService(cs); return this; } public WriterTableConfiguration forFile(String filename, FileSystem fs, Configuration fsConf, CryptoService cs) { filename(filename).fs(fs).fsConf(fsConf).cryptoService(cs); return this; } @Override public WriterBuilder withTableConfiguration(AccumuloConfiguration tableConfiguration) { tableConfiguration(tableConfiguration); return this; } public WriterBuilder withStartDisabled() { this.enableAccumuloStart = false; return this; } public WriterBuilder withCompression(String compression) { this.compression = compression; return this; } public WriterBuilder withRateLimiter(RateLimiter rateLimiter) { rateLimiter(rateLimiter); return this; } public FileSKVWriter build() throws IOException { return openWriter(toWriterBuilderOptions(compression, outputStream, enableAccumuloStart)); } } public interface WriterTableConfiguration { public WriterBuilder withTableConfiguration(AccumuloConfiguration tableConfiguration); } /** * Options common to all {@code FileOperations} which perform reads. */ public class ReaderBuilder extends FileHelper implements ReaderTableConfiguration { private CacheProvider cacheProvider; private Cache<String,Long> fileLenCache; private boolean seekToBeginning = false; public ReaderTableConfiguration forFile(String filename, FileSystem fs, Configuration fsConf, CryptoService cs) { filename(filename).fs(fs).fsConf(fsConf).cryptoService(cs); return this; } @Override public ReaderBuilder withTableConfiguration(AccumuloConfiguration tableConfiguration) { tableConfiguration(tableConfiguration); return this; } /** * (Optional) Set the block cache pair to be used to optimize reads within the constructed * reader. */ public ReaderBuilder withCacheProvider(CacheProvider cacheProvider) { this.cacheProvider = cacheProvider; return this; } public ReaderBuilder withFileLenCache(Cache<String,Long> fileLenCache) { this.fileLenCache = fileLenCache; return this; } public ReaderBuilder withRateLimiter(RateLimiter rateLimiter) { rateLimiter(rateLimiter); return this; } /** * Seek the constructed iterator to the beginning of its domain before returning. Equivalent to * {@code seekToBeginning(true)}. */ public ReaderBuilder seekToBeginning() { seekToBeginning(true); return this; } /** If true, seek the constructed iterator to the beginning of its domain before returning. */ public ReaderBuilder seekToBeginning(boolean seekToBeginning) { this.seekToBeginning = seekToBeginning; return this; } /** Execute the operation, constructing the specified file reader. */ public FileSKVIterator build() throws IOException { return openReader(toReaderBuilderOptions(cacheProvider, fileLenCache, seekToBeginning)); } } public interface ReaderTableConfiguration { ReaderBuilder withTableConfiguration(AccumuloConfiguration tableConfiguration); } /** * Operation object for opening an index. */ public class IndexReaderBuilder extends FileHelper implements IndexReaderTableConfiguration { private Cache<String,Long> fileLenCache = null; public IndexReaderTableConfiguration forFile(String filename, FileSystem fs, Configuration fsConf, CryptoService cs) { filename(filename).fs(fs).fsConf(fsConf).cryptoService(cs); return this; } @Override public IndexReaderBuilder withTableConfiguration(AccumuloConfiguration tableConfiguration) { tableConfiguration(tableConfiguration); return this; } public IndexReaderBuilder withFileLenCache(Cache<String,Long> fileLenCache) { this.fileLenCache = fileLenCache; return this; } public FileSKVIterator build() throws IOException { return openIndex(toIndexReaderBuilderOptions(fileLenCache)); } } public interface IndexReaderTableConfiguration { IndexReaderBuilder withTableConfiguration(AccumuloConfiguration tableConfiguration); } /** Operation object for opening a scan reader. */ public class ScanReaderBuilder extends FileHelper implements ScanReaderTableConfiguration { private Range range; private Set<ByteSequence> columnFamilies; private boolean inclusive; public ScanReaderTableConfiguration forFile(String filename, FileSystem fs, Configuration fsConf, CryptoService cs) { filename(filename).fs(fs).fsConf(fsConf).cryptoService(cs); return this; } @Override public ScanReaderBuilder withTableConfiguration(AccumuloConfiguration tableConfiguration) { tableConfiguration(tableConfiguration); return this; } /** Set the range over which the constructed iterator will search. */ public ScanReaderBuilder overRange(Range range, Set<ByteSequence> columnFamilies, boolean inclusive) { Objects.requireNonNull(range); Objects.requireNonNull(columnFamilies); this.range = range; this.columnFamilies = columnFamilies; this.inclusive = inclusive; return this; } /** Execute the operation, constructing a scan iterator. */ public FileSKVIterator build() throws IOException { return openScanReader(toScanReaderBuilderOptions(range, columnFamilies, inclusive)); } } public interface ScanReaderTableConfiguration { ScanReaderBuilder withTableConfiguration(AccumuloConfiguration tableConfiguration); } }
package jk_5.nailed.api.text.format; import com.google.common.base.Objects; import com.google.common.base.Optional; import jk_5.nailed.api.text.Text; import jk_5.nailed.api.util.text.OptBool; import javax.annotation.Nullable; import static com.google.common.base.Preconditions.checkNotNull; /** * Represents an immutable text style of a {@link Text}. It is a utility that is * not normally present in Minecraft. It can be either empty, a {@link Base} * with an additional legacy formatting code or a composite. * * <p>Combined styles can be created using {@link TextStyles#of(TextStyle...)} * or using one of the {@link #and(TextStyle...)}, {@link #andNot(TextStyle...)} * or {@link #negate()} method.</p> * * <p>Each individual style within a TextStyle, e.g. bold, italic is not just a * boolean, but an {@code Optional&lt;Boolean&gt;} since it can be unapplied * (or, in Optional terms, "absent"). These styles will hereafter be referred to * as properties.<br> See the {@link OptBool} utility class for working with * properties.</p> * * <p>Implementation note: Absent styles should not appear in the final chat * component JSON. Properties that are set to true or false should appear, even * if they override inherited properties.</p> * * @see TextStyles */ public class TextStyle { /** * Whether text where this style is applied is bolded. */ protected final Optional<Boolean> bold; /** * Whether text where this style is applied is italicized. */ protected final Optional<Boolean> italic; /** * Whether text where this style is applied is underlined. */ protected final Optional<Boolean> underline; /** * Whether text where this style is applied has a strikethrough. */ protected final Optional<Boolean> strikethrough; /** * Whether text where this style is applied is obfuscated. */ protected final Optional<Boolean> obfuscated; /** * Constructs a new {@link TextStyle}. * * @param bold Whether text where this style is applied is bolded * @param italic Whether text where this style is applied is italicized * @param underline Whether text where this style is applied is underlined * @param obfuscated Whether text where this style is applied is obfuscated * @param strikethrough Whether text where this style is applied has a * strikethrough */ public TextStyle(@Nullable Boolean bold, @Nullable Boolean italic, @Nullable Boolean underline, @Nullable Boolean strikethrough, @Nullable Boolean obfuscated) { this( OptBool.of(bold), OptBool.of(italic), OptBool.of(underline), OptBool.of(strikethrough), OptBool.of(obfuscated) ); } /** * Constructs a new {@link TextStyle}. * * @param bold Whether text where this style is applied is bolded * @param italic Whether text where this style is applied is italicized * @param underline Whether text where this style is applied is underlined * @param obfuscated Whether text where this style is applied is obfuscated * @param strikethrough Whether text where this style is applied has a * strikethrough */ private TextStyle(Optional<Boolean> bold, Optional<Boolean> italic, Optional<Boolean> underline, Optional<Boolean> strikethrough, Optional<Boolean> obfuscated) { this.bold = bold; this.italic = italic; this.underline = underline; this.obfuscated = obfuscated; this.strikethrough = strikethrough; } /** * Constructs an empty {@link TextStyle}. */ TextStyle() { this( OptBool.ABSENT, OptBool.ABSENT, OptBool.ABSENT, OptBool.ABSENT, OptBool.ABSENT ); } /** * Returns whether this {@link TextStyle} is a composite of multiple text * styles. * * @return {@code true} if this text style is a composite */ public boolean isComposite() { // Return true by default as the TextStyle class is composite by default return true; } /** * Returns whether this {@link TextStyle} has no set properties. * * @return {@code true} if this style is empty */ public boolean isEmpty() { return !(this.bold.isPresent() || this.italic.isPresent() || this.underline.isPresent() || this.strikethrough.isPresent() || this.obfuscated.isPresent()); } /** * Returns a new {@link TextStyle} with the bold property changed. * * @param bold Whether text where the new style is applied is bolded * @return The new text style */ public TextStyle bold(@Nullable Boolean bold) { return new TextStyle( OptBool.of(bold), this.italic, this.underline, this.strikethrough, this.obfuscated ); } /** * Returns a new {@link TextStyle} with the italic property changed. * * @param italic Whether text where the new style is applied is italicized * @return The new text style */ public TextStyle italic(@Nullable Boolean italic) { return new TextStyle( this.bold, OptBool.of(italic), this.underline, this.strikethrough, this.obfuscated ); } /** * Returns a new {@link TextStyle} with the underline property changed. * * @param underline Whether text where the new style is applied is underline * @return The new text style */ public TextStyle underline(@Nullable Boolean underline) { return new TextStyle( this.bold, this.italic, OptBool.of(underline), this.strikethrough, this.obfuscated ); } /** * Returns a new {@link TextStyle} with the strikethrough property changed. * * @param strikethrough Whether text where the new style is applied has a * strikethrough * @return The new text style */ public TextStyle strikethrough(@Nullable Boolean strikethrough) { return new TextStyle( this.bold, this.italic, this.underline, OptBool.of(strikethrough), this.obfuscated ); } /** * Returns a new {@link TextStyle} with the obfuscated property changed. * * @param obfuscated Whether text where the new style is applied is * obfuscated * @return The new text style */ public TextStyle obfuscated(@Nullable Boolean obfuscated) { return new TextStyle( this.bold, this.italic, this.underline, this.strikethrough, OptBool.of(obfuscated) ); } /** * Checks for whether text where this style is applied is bolded. * * @return The value for the bold property, or {@link Optional#absent()} */ public Optional<Boolean> isBold() { return this.bold; } /** * Checks for whether text where this style is applied is italicized. * * @return The value for the italic property, or {@link Optional#absent()} */ public Optional<Boolean> isItalic() { return this.italic; } /** * Checks for whether text where this style is applied has an underline. * * @return The value for the underline property, or * {@link Optional#absent()} */ public Optional<Boolean> hasUnderline() { return this.underline; } /** * Checks for whether text where this style is applied has a strikethrough. * * @return The value for the strikethrough property, or * {@link Optional#absent()} */ public Optional<Boolean> hasStrikethrough() { return this.strikethrough; } /** * Checks for whether text where this style is obfuscated. * * @return The value for the obfuscated property, or * {@link Optional#absent()} */ public Optional<Boolean> isObfuscated() { return this.obfuscated; } /** * Returns whether the given {@link TextStyle} is contained in this * {@link TextStyle}. * * <p>For example, a {@link TextStyle} with {@code bold: true, italic: * true}} would return {@code true} for <code>contains( * {@link TextStyles#BOLD})</code> and <code>contains( * {@link TextStyles#ITALIC}).</code></p> * * <p>If the specified {@link TextStyle} is a composite of multiple styles * it returns {@code true} if this style has at least all of the properties * set in the specified style.</p> * * @param styles The text styles to check * @return {@code true} if the given text styles are contained in this text * style */ public boolean contains(TextStyle... styles) { for (TextStyle style : checkNotNull(styles, "styles")) { checkNotNull(style, "style"); if (!propContains(this.bold, style.bold) || !propContains(this.italic, style.italic) || !propContains(this.underline, style.underline) || !propContains(this.strikethrough, style.strikethrough) || !propContains(this.obfuscated, style.obfuscated)) { return false; } } return true; } /** * Negates this {@link TextStyle}. This is useful for undoing text styles * that are inherited from parent messages. * * @return The inverse of this text style */ public TextStyle negate() { // Do a negation of each property return new TextStyle( propNegate(this.obfuscated), propNegate(this.bold), propNegate(this.strikethrough), propNegate(this.underline), propNegate(this.italic) ); } /** * Composes this {@link TextStyle} with the specified text styles. * * @param styles The text styles to compose this one with * @return A new text style composed out of the given text styles */ public TextStyle and(TextStyle... styles) { return compose(styles, false); } /** * Composes this {@link TextStyle} with the passed in TextStyles, but * negates them before composition. This is the same as negating all the * passed in {@link TextStyle} and then using the {@link #and(TextStyle...)} * method. * * @param styles The text styles to compose this one with * @return A new text style composed out of the given text styles */ public TextStyle andNot(TextStyle... styles) { return compose(styles, true); } /** * Utility method to compose the current TextStyle with the given styles, * with optional negation. * * @param styles The styles to compose with * @param negate Whether or not to negate the passed-in styles * @return The composed style */ private TextStyle compose(TextStyle[] styles, boolean negate) { checkNotNull(styles, "styles"); if (styles.length == 0) { return this; } else if (this.isEmpty() && styles.length == 1) { TextStyle style = checkNotNull(styles[0], "style"); return negate ? style.negate() : style; } Optional<Boolean> boldAcc = this.bold; Optional<Boolean> italicAcc = this.italic; Optional<Boolean> underlineAcc = this.underline; Optional<Boolean> strikethroughAcc = this.strikethrough; Optional<Boolean> obfuscatedAcc = this.obfuscated; if (negate) { for (TextStyle style : styles) { checkNotNull(style, "style"); boldAcc = propCompose(boldAcc, propNegate(style.bold)); italicAcc = propCompose(italicAcc, propNegate(style.italic)); underlineAcc = propCompose(underlineAcc, propNegate(style.underline)); strikethroughAcc = propCompose(strikethroughAcc, propNegate(style.strikethrough)); obfuscatedAcc = propCompose(obfuscatedAcc, propNegate(style.obfuscated)); } } else { for (TextStyle style : styles) { checkNotNull(style, "style"); boldAcc = propCompose(boldAcc, style.bold); italicAcc = propCompose(italicAcc, style.italic); underlineAcc = propCompose(underlineAcc, style.underline); strikethroughAcc = propCompose(strikethroughAcc, style.strikethrough); obfuscatedAcc = propCompose(obfuscatedAcc, style.obfuscated); } } return new TextStyle( boldAcc, italicAcc, underlineAcc, strikethroughAcc, obfuscatedAcc ); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof TextStyle)) { return false; } TextStyle that = (TextStyle) o; return this.bold.equals(that.bold) && this.italic.equals(that.italic) && this.underline.equals(that.underline) && this.obfuscated.equals(that.obfuscated) && this.strikethrough.equals(that.strikethrough); } @Override public int hashCode() { return Objects.hashCode(this.bold, this.italic, this.underline, this.obfuscated, this.strikethrough); } @Override public String toString() { return Objects.toStringHelper(this) .add("bold", this.bold) .add("italic", this.italic) .add("underline", this.underline) .add("strikethrough", this.strikethrough) .add("obfuscated", this.obfuscated) .toString(); } /** * Utility method to check if the given "super-property" contains the given * "sub-property". * * @param superprop The super property * @param subprop The sub property * @return True if the property is contained, otherwise false */ private static boolean propContains(Optional<Boolean> superprop, Optional<Boolean> subprop) { return !subprop.isPresent() || superprop.equals(subprop); } /** * Utility method to negate a property if it is not null. * * @param prop The property to negate * @return The negated property, or {@link Optional#absent()} */ private static Optional<Boolean> propNegate(Optional<Boolean> prop) { if (prop.isPresent()) { return OptBool.of(!prop.get()); } else { return OptBool.ABSENT; } } /** * Utility method to perform a compose operation between two properties. * * @param prop1 The first property * @param prop2 The second property * @return The composition of the two properties */ private static Optional<Boolean> propCompose(Optional<Boolean> prop1, Optional<Boolean> prop2) { if (!prop1.isPresent()) { return prop2; } else if (!prop2.isPresent()) { return prop1; } else if (!prop1.equals(prop2)) { return OptBool.ABSENT; } else { return prop1; } } /** * Represents a {@link TextStyle} that is not a composite, for example * {@link TextStyles#BOLD}. It is a base text style in Minecraft with a name * and a legacy formatting code. * * @see TextStyle * @see Base */ public abstract static class Base extends TextStyle implements BaseFormatting { /** * Constructs a new {@link Base}. * * @param bold Whether text where this style is applied is bolded * @param italic Whether text where this style is applied is italicized * @param underline Whether text where this style is applied is * underlined * @param obfuscated Whether text where this style is applied is * obfuscated * @param strikethrough Whether text where this style is applied has a * strikethrough */ protected Base(@Nullable Boolean bold, @Nullable Boolean italic, @Nullable Boolean underline, @Nullable Boolean strikethrough, @Nullable Boolean obfuscated) { super( bold, italic, underline, strikethrough, obfuscated ); } @Override public boolean isComposite() { // By definition, base TextStyles are not composites return false; } } }
package org.springframework.data.neo4j.transaction; import static org.junit.Assert.*; import static org.springframework.transaction.event.TransactionPhase.*; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.util.*; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.event.EventListener; import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.annotation.EnableTransactionManagement; import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.event.TransactionalEventListener; import org.springframework.transaction.event.TransactionalEventListenerFactory; import org.springframework.transaction.support.*; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; /** * These tests are copied from * <a href="https://github.com/spring-projects/spring-framework/blob/master/spring-tx/src/test/java/org/springframework/transaction/event/TransactionalEventListenerTests.java">the spring framework transactional event listener tests</a> * and modified to work with a version of the Neo4jTransactionManager. * * @author Mark Angrish * @see DATAGRAPH-883 */ public class TransactionalEventListenerTests { private ConfigurableApplicationContext context; private EventCollector eventCollector; private TransactionTemplate transactionTemplate = new TransactionTemplate(new CallCountingTransactionManager()); @Rule public final ExpectedException thrown = ExpectedException.none(); @Before public void closeContext() { if (this.context != null) { this.context.close(); } } @Test public void immediately() { load(ImmediateTestListener.class); this.transactionTemplate.execute(status -> { TransactionalEventListenerTests.this.getContext().publishEvent("test"); TransactionalEventListenerTests.this.getEventCollector().assertEvents(EventCollector.IMMEDIATELY, "test"); TransactionalEventListenerTests.this.getEventCollector().assertTotalEventsCount(1); return null; }); getEventCollector().assertEvents(EventCollector.IMMEDIATELY, "test"); getEventCollector().assertTotalEventsCount(1); } @Test public void immediatelyImpactsCurrentTransaction() { load(ImmediateTestListener.class, BeforeCommitTestListener.class); try { this.transactionTemplate.execute(status -> { TransactionalEventListenerTests.this.getContext().publishEvent("FAIL"); fail("Should have thrown an exception at this point"); return null; }); } catch (IllegalStateException e) { assertTrue(e.getMessage().contains("Test exception")); assertTrue(e.getMessage().contains(EventCollector.IMMEDIATELY)); } getEventCollector().assertEvents(EventCollector.IMMEDIATELY, "FAIL"); getEventCollector().assertTotalEventsCount(1); } @Test public void afterCompletionCommit() { load(AfterCompletionTestListener.class); this.transactionTemplate.execute(status -> { TransactionalEventListenerTests.this.getContext().publishEvent("test"); TransactionalEventListenerTests.this.getEventCollector().assertNoEventReceived(); return null; }); getEventCollector().assertEvents(EventCollector.AFTER_COMPLETION, "test"); getEventCollector().assertTotalEventsCount(1); // After rollback not invoked } @Test public void afterCompletionRollback() { load(AfterCompletionTestListener.class); this.transactionTemplate.execute(status -> { TransactionalEventListenerTests.this.getContext().publishEvent("test"); TransactionalEventListenerTests.this.getEventCollector().assertNoEventReceived(); status.setRollbackOnly(); return null; }); getEventCollector().assertEvents(EventCollector.AFTER_COMPLETION, "test"); getEventCollector().assertTotalEventsCount(1); // After rollback not invoked } @Test public void afterCommit() { load(AfterCompletionExplicitTestListener.class); this.transactionTemplate.execute(status -> { TransactionalEventListenerTests.this.getContext().publishEvent("test"); TransactionalEventListenerTests.this.getEventCollector().assertNoEventReceived(); return null; }); getEventCollector().assertEvents(EventCollector.AFTER_COMMIT, "test"); getEventCollector().assertTotalEventsCount(1); // After rollback not invoked } @Test public void afterCommitWithTransactionalComponentListenerProxiedViaDynamicProxy() { load(AfterCompletionTestListener.class, TransactionalComponentTestListener.class); this.transactionTemplate.execute(status -> { TransactionalEventListenerTests.this.getContext().publishEvent("SKIP"); TransactionalEventListenerTests.this.getEventCollector().assertNoEventReceived(); return null; }); getEventCollector().assertNoEventReceived(); } @Test public void afterRollback() { load(AfterCompletionExplicitTestListener.class); this.transactionTemplate.execute(status -> { TransactionalEventListenerTests.this.getContext().publishEvent("test"); TransactionalEventListenerTests.this.getEventCollector().assertNoEventReceived(); status.setRollbackOnly(); return null; }); getEventCollector().assertEvents(EventCollector.AFTER_ROLLBACK, "test"); getEventCollector().assertTotalEventsCount(1); // After commit not invoked } @Test public void beforeCommit() { load(BeforeCommitTestListener.class); this.transactionTemplate.execute(status -> { TransactionSynchronizationManager.registerSynchronization(new EventTransactionSynchronization(10) { @Override public void beforeCommit(boolean readOnly) { getEventCollector().assertNoEventReceived(); // Not seen yet } }); TransactionSynchronizationManager.registerSynchronization(new EventTransactionSynchronization(20) { @Override public void beforeCommit(boolean readOnly) { getEventCollector().assertEvents(EventCollector.BEFORE_COMMIT, "test"); getEventCollector().assertTotalEventsCount(1); } }); TransactionalEventListenerTests.this.getContext().publishEvent("test"); TransactionalEventListenerTests.this.getEventCollector().assertNoEventReceived(); return null; }); getEventCollector().assertEvents(EventCollector.BEFORE_COMMIT, "test"); getEventCollector().assertTotalEventsCount(1); } @Test public void beforeCommitWithException() { // Validates the custom synchronization is invoked load(BeforeCommitTestListener.class); try { this.transactionTemplate.execute(status -> { TransactionSynchronizationManager.registerSynchronization(new EventTransactionSynchronization(10) { @Override public void beforeCommit(boolean readOnly) { throw new IllegalStateException("test"); } }); TransactionalEventListenerTests.this.getContext().publishEvent("test"); TransactionalEventListenerTests.this.getEventCollector().assertNoEventReceived(); return null; }); fail("Should have thrown an exception"); } catch (IllegalStateException e) { // Test exception - ignore } getEventCollector().assertNoEventReceived(); // Before commit not invoked } @Test public void regularTransaction() { load(ImmediateTestListener.class, BeforeCommitTestListener.class, AfterCompletionExplicitTestListener.class); this.transactionTemplate.execute(new TransactionCallback<Object>() { @Override public Object doInTransaction(TransactionStatus status) { TransactionSynchronizationManager.registerSynchronization(new EventTransactionSynchronization(10) { @Override public void beforeCommit(boolean readOnly) { getEventCollector().assertTotalEventsCount(1); // Immediate event getEventCollector().assertEvents(EventCollector.IMMEDIATELY, "test"); } }); TransactionSynchronizationManager.registerSynchronization(new EventTransactionSynchronization(20) { @Override public void beforeCommit(boolean readOnly) { getEventCollector().assertEvents(EventCollector.BEFORE_COMMIT, "test"); getEventCollector().assertTotalEventsCount(2); } }); TransactionalEventListenerTests.this.getContext().publishEvent("test"); TransactionalEventListenerTests.this.getEventCollector().assertTotalEventsCount(1); return null; } }); getEventCollector().assertEvents(EventCollector.AFTER_COMMIT, "test"); getEventCollector().assertTotalEventsCount(3); // Immediate, before commit, after commit } @Test public void noTransaction() { load(BeforeCommitTestListener.class, AfterCompletionTestListener.class, AfterCompletionExplicitTestListener.class); this.context.publishEvent("test"); getEventCollector().assertTotalEventsCount(0); } @Test public void noTransactionWithFallbackExecution() { load(FallbackExecutionTestListener.class); this.context.publishEvent("test"); this.eventCollector.assertEvents(EventCollector.BEFORE_COMMIT, "test"); this.eventCollector.assertEvents(EventCollector.AFTER_COMMIT, "test"); this.eventCollector.assertEvents(EventCollector.AFTER_ROLLBACK, "test"); this.eventCollector.assertEvents(EventCollector.AFTER_COMPLETION, "test"); getEventCollector().assertTotalEventsCount(4); } @Test public void conditionFoundOnTransactionalEventListener() { load(ImmediateTestListener.class); this.transactionTemplate.execute(new TransactionCallback<Object>() { @Override public Object doInTransaction(TransactionStatus status) { TransactionalEventListenerTests.this.getContext().publishEvent("SKIP"); TransactionalEventListenerTests.this.getEventCollector().assertNoEventReceived(); return null; } }); getEventCollector().assertNoEventReceived(); } @Test public void afterCommitMetaAnnotation() throws Exception { load(AfterCommitMetaAnnotationTestListener.class); this.transactionTemplate.execute(new TransactionCallback<Object>() { @Override public Object doInTransaction(TransactionStatus status) { TransactionalEventListenerTests.this.getContext().publishEvent("test"); TransactionalEventListenerTests.this.getEventCollector().assertNoEventReceived(); return null; } }); getEventCollector().assertEvents(EventCollector.AFTER_COMMIT, "test"); getEventCollector().assertTotalEventsCount(1); } @Test public void conditionFoundOnMetaAnnotation() { load(AfterCommitMetaAnnotationTestListener.class); this.transactionTemplate.execute(new TransactionCallback<Object>() { @Override public Object doInTransaction(TransactionStatus status) { TransactionalEventListenerTests.this.getContext().publishEvent("SKIP"); TransactionalEventListenerTests.this.getEventCollector().assertNoEventReceived(); return null; } }); getEventCollector().assertNoEventReceived(); } protected EventCollector getEventCollector() { return eventCollector; } protected ConfigurableApplicationContext getContext() { return context; } private void load(Class<?>... classes) { List<Class<?>> allClasses = new ArrayList<>(); allClasses.add(BasicConfiguration.class); allClasses.addAll(Arrays.asList(classes)); doLoad(allClasses.toArray(new Class<?>[allClasses.size()])); } private void doLoad(Class<?>... classes) { this.context = new AnnotationConfigApplicationContext(classes); this.eventCollector = this.context.getBean(EventCollector.class); } @Configuration static class BasicConfiguration { @Bean // set automatically with tx management public TransactionalEventListenerFactory transactionalEventListenerFactory() { return new TransactionalEventListenerFactory(); } @Bean public EventCollector eventCollector() { return new EventCollector(); } } @EnableTransactionManagement @Configuration static class TransactionalConfiguration { @Bean public CallCountingTransactionManager transactionManager() { return new CallCountingTransactionManager(); } } static class EventCollector { public static final String IMMEDIATELY = "IMMEDIATELY"; public static final String BEFORE_COMMIT = "BEFORE_COMMIT"; public static final String AFTER_COMPLETION = "AFTER_COMPLETION"; public static final String AFTER_COMMIT = "AFTER_COMMIT"; public static final String AFTER_ROLLBACK = "AFTER_ROLLBACK"; public static final String[] ALL_PHASES = {IMMEDIATELY, BEFORE_COMMIT, AFTER_COMMIT, AFTER_ROLLBACK}; private final MultiValueMap<String, Object> events = new LinkedMultiValueMap<>(); public void addEvent(String phase, Object event) { this.events.add(phase, event); } public List<Object> getEvents(String phase) { List<Object> v; return (((v = events.get(phase)) != null) || events.containsKey(phase)) ? v : Collections.emptyList(); } public void assertNoEventReceived(String... phases) { if (phases.length == 0) { // All values if none set phases = ALL_PHASES; } for (String phase : phases) { List<Object> eventsForPhase = getEvents(phase); assertEquals("Expected no events for phase '" + phase + "' " + "but got " + eventsForPhase + ":", 0, eventsForPhase.size()); } } public void assertEvents(String phase, Object... expected) { List<Object> actual = getEvents(phase); assertEquals("wrong number of events for phase '" + phase + "'", expected.length, actual.size()); for (int i = 0; i < expected.length; i++) { assertEquals("Wrong event for phase '" + phase + "' at index " + i, expected[i], actual.get(i)); } } public void assertTotalEventsCount(int number) { int size = 0; for (Map.Entry<String, List<Object>> entry : this.events.entrySet()) { size += entry.getValue().size(); } assertEquals("Wrong number of total events (" + this.events.size() + ") " + "registered phase(s)", number, size); } } static abstract class BaseTransactionalTestListener { static final String FAIL_MSG = "FAIL"; @Autowired private EventCollector eventCollector; public void handleEvent(String phase, String data) { this.eventCollector.addEvent(phase, data); if (FAIL_MSG.equals(data)) { throw new IllegalStateException("Test exception on phase '" + phase + "'"); } } } @Component static class ImmediateTestListener extends BaseTransactionalTestListener { @EventListener(condition = "!'SKIP'.equals(#data)") public void handleImmediately(String data) { handleEvent(EventCollector.IMMEDIATELY, data); } } @Component static class AfterCompletionTestListener extends BaseTransactionalTestListener { @TransactionalEventListener(phase = AFTER_COMPLETION) public void handleAfterCompletion(String data) { handleEvent(EventCollector.AFTER_COMPLETION, data); } } @Component static class AfterCompletionExplicitTestListener extends BaseTransactionalTestListener { @TransactionalEventListener(phase = AFTER_COMMIT) public void handleAfterCommit(String data) { handleEvent(EventCollector.AFTER_COMMIT, data); } @TransactionalEventListener(phase = AFTER_ROLLBACK) public void handleAfterRollback(String data) { handleEvent(EventCollector.AFTER_ROLLBACK, data); } } @Transactional @Component static interface TransactionalComponentTestListenerInterface { // Cannot use #data in condition due to dynamic proxy. @TransactionalEventListener(condition = "!'SKIP'.equals(#p0)") void handleAfterCommit(String data); } static class TransactionalComponentTestListener extends BaseTransactionalTestListener implements TransactionalComponentTestListenerInterface { @Override public void handleAfterCommit(String data) { handleEvent(EventCollector.AFTER_COMMIT, data); } } @Component static class BeforeCommitTestListener extends BaseTransactionalTestListener { @TransactionalEventListener(phase = BEFORE_COMMIT) @Order(15) public void handleBeforeCommit(String data) { handleEvent(EventCollector.BEFORE_COMMIT, data); } } @Component static class FallbackExecutionTestListener extends BaseTransactionalTestListener { @TransactionalEventListener(phase = BEFORE_COMMIT, fallbackExecution = true) public void handleBeforeCommit(String data) { handleEvent(EventCollector.BEFORE_COMMIT, data); } @TransactionalEventListener(phase = AFTER_COMMIT, fallbackExecution = true) public void handleAfterCommit(String data) { handleEvent(EventCollector.AFTER_COMMIT, data); } @TransactionalEventListener(phase = AFTER_ROLLBACK, fallbackExecution = true) public void handleAfterRollback(String data) { handleEvent(EventCollector.AFTER_ROLLBACK, data); } @TransactionalEventListener(phase = AFTER_COMPLETION, fallbackExecution = true) public void handleAfterCompletion(String data) { handleEvent(EventCollector.AFTER_COMPLETION, data); } } @TransactionalEventListener(phase = AFTER_COMMIT, condition = "!'SKIP'.equals(#p0)") @Target(ElementType.METHOD) @Retention(RetentionPolicy.RUNTIME) @interface AfterCommitEventListener { } @Component static class AfterCommitMetaAnnotationTestListener extends BaseTransactionalTestListener { @AfterCommitEventListener public void handleAfterCommit(String data) { handleEvent(EventCollector.AFTER_COMMIT, data); } } static class EventTransactionSynchronization extends TransactionSynchronizationAdapter { private final int order; EventTransactionSynchronization(int order) { this.order = order; } @Override public int getOrder() { return order; } } @SuppressWarnings("serial") static class CallCountingTransactionManager extends AbstractPlatformTransactionManager { public TransactionDefinition lastDefinition; public int begun; public int commits; public int rollbacks; public int inflight; @Override protected Object doGetTransaction() { return new Object(); } @Override protected void doBegin(Object transaction, TransactionDefinition definition) { this.lastDefinition = definition; ++begun; ++inflight; } @Override protected void doCommit(DefaultTransactionStatus status) { ++commits; --inflight; } @Override protected void doRollback(DefaultTransactionStatus status) { ++rollbacks; --inflight; } public void clear() { begun = commits = rollbacks = inflight = 0; } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.commander; import com.intellij.diff.actions.CompareFilesAction; import com.intellij.ide.PsiCopyPasteManager; import com.intellij.ide.TwoPaneIdeView; import com.intellij.ide.projectView.ProjectViewNode; import com.intellij.ide.projectView.impl.AbstractProjectTreeStructure; import com.intellij.ide.projectView.impl.ProjectAbstractTreeStructureBase; import com.intellij.ide.util.treeView.AbstractTreeNode; import com.intellij.ide.util.treeView.AlphaComparator; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.*; import com.intellij.openapi.keymap.KeymapManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.openapi.wm.*; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtilCore; import com.intellij.ui.AutoScrollToSourceHandler; import com.intellij.util.SmartList; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import javax.swing.*; import javax.swing.event.ListDataEvent; import javax.swing.event.ListDataListener; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.awt.event.*; import java.util.List; import static com.intellij.openapi.wm.ex.IdeFocusTraversalPolicy.getPreferredFocusedComponent; /** * @author Eugene Belyaev */ @State(name = "Commander", storages = @Storage(StoragePathMacros.WORKSPACE_FILE)) public class Commander extends JPanel implements PersistentStateComponent<Element>, DataProvider, TwoPaneIdeView, Disposable { private final Project myProject; private CommanderPanel myLeftPanel; private CommanderPanel myRightPanel; private final Splitter mySplitter; private final ListSelectionListener mySelectionListener; private final ListDataListener myListDataListener; public boolean MOVE_FOCUS = true; // internal option: move focus to editor when class/file/...etc. is created private Element myElement; private final FocusWatcher myFocusWatcher; private final CommanderHistory myHistory; private boolean myAutoScrollMode; private final ToolWindowManager myToolWindowManager; @NonNls private static final String ACTION_BACKCOMMAND = "backCommand"; @NonNls private static final String ACTION_FORWARDCOMMAND = "forwardCommand"; @NonNls private static final String ELEMENT_LEFTPANEL = "leftPanel"; @NonNls private static final String ATTRIBUTE_MOVE_FOCUS = "MOVE_FOCUS"; @NonNls private static final String ELEMENT_OPTION = "OPTION"; @NonNls private static final String ATTRIBUTE_PROPORTION = "proportion"; @NonNls private static final String ELEMENT_SPLITTER = "splitter"; @NonNls private static final String ELEMENT_RIGHTPANEL = "rightPanel"; @NonNls private static final String ATTRIBUTE_URL = "url"; @NonNls private static final String ATTRIBUTE_CLASS = "class"; /** * FOR USE IN TESTS ONLY!!! * @param project * @param keymapManager */ public Commander(final Project project, KeymapManager keymapManager) { this(project, keymapManager, null); } public Commander(final Project project, KeymapManager keymapManager, final ToolWindowManager toolWindowManager) { super(new BorderLayout()); myProject = project; myToolWindowManager = toolWindowManager; final AbstractAction backAction = new AbstractAction() { @Override public void actionPerformed(final ActionEvent e) { myHistory.back(); } }; final AbstractAction fwdAction = new AbstractAction() { @Override public void actionPerformed(final ActionEvent e) { myHistory.forward(); } }; final ActionMap actionMap = getActionMap(); actionMap.put(ACTION_BACKCOMMAND, backAction); actionMap.put(ACTION_FORWARDCOMMAND, fwdAction); for (KeyStroke stroke : getKeyStrokes(IdeActions.ACTION_GOTO_BACK, keymapManager)) { //getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(stroke, "backCommand"); //getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT).put(stroke, "backCommand"); registerKeyboardAction(backAction, ACTION_BACKCOMMAND, stroke, JComponent.WHEN_IN_FOCUSED_WINDOW); registerKeyboardAction(backAction, ACTION_BACKCOMMAND, stroke, JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); } for (KeyStroke stroke : getKeyStrokes(IdeActions.ACTION_GOTO_FORWARD, keymapManager)) { //getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(stroke, "forwardCommand"); //getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT).put(stroke, "forwardCommand"); registerKeyboardAction(fwdAction, ACTION_FORWARDCOMMAND, stroke, JComponent.WHEN_IN_FOCUSED_WINDOW); registerKeyboardAction(fwdAction, ACTION_FORWARDCOMMAND, stroke, JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); } myHistory = new CommanderHistory(this); mySelectionListener = new ListSelectionListener() { @Override public void valueChanged(final ListSelectionEvent e) { updateToolWindowTitle(); } }; myListDataListener = new ListDataListener() { @Override public void intervalAdded(final ListDataEvent e) { updateToolWindowTitle(); } @Override public void intervalRemoved(final ListDataEvent e) { updateToolWindowTitle(); } @Override public void contentsChanged(final ListDataEvent e) { updateToolWindowTitle(); } }; myFocusWatcher = new FocusWatcher(); myLeftPanel = createPanel(); myLeftPanel.addHistoryListener(new CommanderHistoryListener() { @Override public void historyChanged(final PsiElement selectedElement, final boolean elementExpanded) { getCommandHistory().saveState(selectedElement, elementExpanded, true); } }); myRightPanel = createPanel(); myRightPanel.addHistoryListener(new CommanderHistoryListener() { @Override public void historyChanged(final PsiElement selectedElement, final boolean elementExpanded) { getCommandHistory().saveState(selectedElement, elementExpanded, false); } }); mySplitter = new Splitter(); mySplitter.setFirstComponent(myLeftPanel); mySplitter.setSecondComponent(myRightPanel); add(mySplitter, BorderLayout.CENTER); final AutoScrollToSourceHandler handler = new AutoScrollToSourceHandler() { @Override protected boolean isAutoScrollMode() { return myAutoScrollMode; } @Override protected void setAutoScrollMode(boolean state) { myAutoScrollMode = state; } }; handler.install(myLeftPanel.getList()); handler.install(myRightPanel.getList()); final boolean shouldAddToolbar = !ApplicationManager.getApplication().isUnitTestMode(); if (shouldAddToolbar) { final DefaultActionGroup toolbarActions = createToolbarActions(); toolbarActions.add(handler.createToggleAction()); final ActionToolbar toolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.COMMANDER_TOOLBAR, toolbarActions, true); add(toolbar.getComponent(), BorderLayout.NORTH); } myFocusWatcher.install(this); } public static Commander getInstance(final Project project) { return ServiceManager.getService(project, Commander.class); } public CommanderHistory getCommandHistory() { return myHistory; } private void processConfigurationElement() { if (myElement == null) return; Element element = myElement.getChild(ELEMENT_LEFTPANEL); if (element != null) { final PsiElement parentElement = readParentElement(element); if (parentElement != null) { myLeftPanel.getBuilder().enterElement(parentElement, PsiUtilCore.getVirtualFile(parentElement)); } } element = myElement.getChild(ELEMENT_RIGHTPANEL); if (element != null) { final PsiElement parentElement = readParentElement(element); if (parentElement != null) { myRightPanel.getBuilder().enterElement(parentElement, PsiUtilCore.getVirtualFile(parentElement)); } } element = myElement.getChild(ELEMENT_SPLITTER); if (element != null) { final String attribute = element.getAttributeValue(ATTRIBUTE_PROPORTION); if (attribute != null) { try { final float proportion = Float.valueOf(attribute).floatValue(); if (proportion >= 0 && proportion <= 1) { mySplitter.setProportion(proportion); } } catch (NumberFormatException ignored) { } } } element = myElement.getChild(ELEMENT_OPTION); if (element != null) { //noinspection HardCodedStringLiteral MOVE_FOCUS = !"false".equals(element.getAttributeValue(ATTRIBUTE_MOVE_FOCUS)); } myLeftPanel.setActive(false); myRightPanel.setActive(false); myLeftPanel.setMoveFocus(MOVE_FOCUS); myRightPanel.setMoveFocus(MOVE_FOCUS); myElement = null; } private static KeyStroke[] getKeyStrokes(String actionId, KeymapManager keymapManager) { List<KeyStroke> strokes = new SmartList<>(); for (Shortcut shortcut : keymapManager.getActiveKeymap().getShortcuts(actionId)) { if (shortcut instanceof KeyboardShortcut) { strokes.add(((KeyboardShortcut)shortcut).getFirstKeyStroke()); } } return strokes.toArray(new KeyStroke[strokes.size()]); } private DefaultActionGroup createToolbarActions() { final ActionManager actionManager = ActionManager.getInstance(); final DefaultActionGroup group = new DefaultActionGroup(); final AnAction backAction = new AnAction() { @Override public void actionPerformed(AnActionEvent e) { myHistory.back(); } @Override public void update(AnActionEvent e) { super.update(e); e.getPresentation().setEnabled(myHistory.canGoBack()); } }; backAction.copyFrom(actionManager.getAction(IdeActions.ACTION_GOTO_BACK)); group.add(backAction); final AnAction forwardAction = new AnAction() { @Override public void actionPerformed(AnActionEvent e) { myHistory.forward(); } @Override public void update(AnActionEvent e) { super.update(e); e.getPresentation().setEnabled(myHistory.canGoForward()); } }; forwardAction.copyFrom(actionManager.getAction(IdeActions.ACTION_GOTO_FORWARD)); group.add(forwardAction); group.add(actionManager.getAction("CommanderSwapPanels")); group.add(actionManager.getAction("CommanderSyncViews")); return group; } private CommanderPanel createPanel() { final CommanderPanel panel = new CommanderPanel(myProject, true, false); panel.getList().addKeyListener(new KeyAdapter() { @Override public void keyPressed(final KeyEvent e) { if (KeyEvent.VK_ESCAPE == e.getKeyCode()) { if (e.isConsumed()) return; final PsiCopyPasteManager copyPasteManager = PsiCopyPasteManager.getInstance(); final boolean[] isCopied = new boolean[1]; if (copyPasteManager.getElements(isCopied) != null && !isCopied[0]) { copyPasteManager.clear(); e.consume(); } } } }); final ProjectAbstractTreeStructureBase treeStructure = createProjectTreeStructure(); panel.setBuilder(new ProjectListBuilder(myProject, panel, treeStructure, AlphaComparator.INSTANCE, true)); panel.setProjectTreeStructure(treeStructure); final FocusAdapter focusListener = new FocusAdapter() { @Override public void focusGained(final FocusEvent e) { updateToolWindowTitle(panel); } }; final JList list = panel.getList(); list.addFocusListener(focusListener); list.getSelectionModel().addListSelectionListener(mySelectionListener); list.getModel().addListDataListener(myListDataListener); Disposer.register(this, new Disposable() { @Override public void dispose() { list.removeFocusListener(focusListener); list.getSelectionModel().removeListSelectionListener(mySelectionListener); list.getModel().removeListDataListener(myListDataListener); } }); return panel; } protected AbstractProjectTreeStructure createProjectTreeStructure() { return new AbstractProjectTreeStructure(myProject) { @Override public boolean isShowMembers() { return true; } @Override public boolean isHideEmptyMiddlePackages() { return false; } @Override public boolean isFlattenPackages() { return false; } @Override public boolean isAbbreviatePackageNames() { return false; } @Override public boolean isShowLibraryContents() { return false; } @Override public boolean isShowModules() { return false; } }; } /** * invoked in AWT thread */ private void updateToolWindowTitle() { final CommanderPanel panel = getActivePanel(); updateToolWindowTitle(panel); } protected void updateToolWindowTitle(CommanderPanel activePanel) { final ToolWindow toolWindow = myToolWindowManager.getToolWindow(ToolWindowId.COMMANDER); if (toolWindow != null) { final AbstractTreeNode node = activePanel.getSelectedNode(); if (node instanceof ProjectViewNode) { toolWindow.setTitle(((ProjectViewNode)node).getTitle()); } } } public boolean isLeftPanelActive() { return isPanelActive(myLeftPanel); } boolean isPanelActive(final CommanderPanel panel) { return panel.getList() == myFocusWatcher.getFocusedComponent(); } public void selectElementInLeftPanel(final Object element, VirtualFile virtualFile) { myLeftPanel.getBuilder().selectElement(element, virtualFile); if (!isPanelActive(myLeftPanel)) { switchActivePanel(); } } public void selectElementInRightPanel(final Object element, VirtualFile virtualFile) { myRightPanel.getBuilder().selectElement(element, virtualFile); if (!isPanelActive(myRightPanel)) { switchActivePanel(); } } public void switchActivePanel() { final CommanderPanel activePanel = getActivePanel(); final CommanderPanel inactivePanel = getInactivePanel(); inactivePanel.setActive(true); activePanel.setActive(false); IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> { IdeFocusManager.getGlobalInstance().requestFocus(getPreferredFocusedComponent(inactivePanel), true); }); } public void enterElementInActivePanel(final PsiElement element) { final CommanderPanel activePanel = isLeftPanelActive() ? myLeftPanel : myRightPanel; activePanel.getBuilder().enterElement(element, PsiUtilCore.getVirtualFile(element)); } public void swapPanels() { mySplitter.swapComponents(); final CommanderPanel tmpPanel = myLeftPanel; myLeftPanel = myRightPanel; myRightPanel = tmpPanel; } public void syncViews() { final CommanderPanel activePanel; final CommanderPanel passivePanel; if (isLeftPanelActive()) { activePanel = myLeftPanel; passivePanel = myRightPanel; } else { activePanel = myRightPanel; passivePanel = myLeftPanel; } ProjectViewNode element = (ProjectViewNode)activePanel.getBuilder().getParentNode(); passivePanel.getBuilder().enterElement(element); } public CommanderPanel getActivePanel() { return isLeftPanelActive() ? myLeftPanel : myRightPanel; } public CommanderPanel getInactivePanel() { return !isLeftPanelActive() ? myLeftPanel : myRightPanel; } @Override public Object getData(final String dataId) { if (PlatformDataKeys.HELP_ID.is(dataId)) { return HelpID.COMMANDER; } else if (CommonDataKeys.PROJECT.is(dataId)) { return myProject; } else if (LangDataKeys.TARGET_PSI_ELEMENT.is(dataId)) { final AbstractTreeNode parentElement = getInactivePanel().getBuilder().getParentNode(); if (parentElement == null) return null; final Object element = parentElement.getValue(); return element instanceof PsiElement && ((PsiElement)element).isValid()? element : null; } else if (CompareFilesAction.DIFF_REQUEST.is(dataId)) { PsiElement primary = getActivePanel().getSelectedElement(); PsiElement secondary = getInactivePanel().getSelectedElement(); if (primary != null && secondary != null && primary.isValid() && secondary.isValid() && !PsiTreeUtil.isAncestor(primary, secondary, false) && !PsiTreeUtil.isAncestor(secondary, primary, false)) { return PsiDiffContentFactory.comparePsiElements(primary, secondary); } return null; } else { return getActivePanel().getDataImpl(dataId); } } @Override public Element getState() { Element element = new Element("commander"); if (myLeftPanel == null || myRightPanel == null) { return element; } PsiDocumentManager.getInstance(myProject).commitAllDocuments(); Element e = new Element(ELEMENT_LEFTPANEL); element.addContent(e); writePanel(myLeftPanel, e); e = new Element(ELEMENT_RIGHTPANEL); element.addContent(e); writePanel(myRightPanel, e); e = new Element(ELEMENT_SPLITTER); element.addContent(e); e.setAttribute(ATTRIBUTE_PROPORTION, Float.toString(mySplitter.getProportion())); if (!MOVE_FOCUS) { e = new Element(ELEMENT_OPTION); element.addContent(e); //noinspection HardCodedStringLiteral e.setAttribute(ATTRIBUTE_MOVE_FOCUS, "false"); } return element; } private static void writePanel(final CommanderPanel panel, final Element element) { /*TODO[anton,vova]: it's a patch!!!*/ final AbstractListBuilder builder = panel.getBuilder(); if (builder == null) return; final AbstractTreeNode parentNode = builder.getParentNode(); final Object parentElement = parentNode != null? parentNode.getValue() : null; if (parentElement instanceof PsiDirectory) { final PsiDirectory directory = (PsiDirectory) parentElement; element.setAttribute(ATTRIBUTE_URL, directory.getVirtualFile().getUrl()); } else if (parentElement instanceof PsiClass) { for (PsiElement e = (PsiElement) parentElement; e != null && e.isValid(); e = e.getParent()) { if (e instanceof PsiClass) { final String qualifiedName = ((PsiClass) e).getQualifiedName(); if (qualifiedName != null) { element.setAttribute(ATTRIBUTE_CLASS, qualifiedName); break; } } } } } @Override public void loadState(Element state) { myElement = state; processConfigurationElement(); myElement = null; } private PsiElement readParentElement(final Element element) { if (element.getAttributeValue(ATTRIBUTE_URL) != null) { final String url = element.getAttributeValue(ATTRIBUTE_URL); final VirtualFile file = VirtualFileManager.getInstance().findFileByUrl(url); return file != null ? PsiManager.getInstance(myProject).findDirectory(file) : null; } if (element.getAttributeValue(ATTRIBUTE_CLASS) != null) { final String className = element.getAttributeValue(ATTRIBUTE_CLASS); return className != null ? JavaPsiFacade.getInstance(myProject).findClass(className, GlobalSearchScope.allScope(myProject)) : null; } return null; } @Override public void dispose() { if (myLeftPanel == null) { // not opened project (default?) return; } myLeftPanel.dispose(); myRightPanel.dispose(); myHistory.clearHistory(); } public CommanderPanel getRightPanel() { return myRightPanel; } public CommanderPanel getLeftPanel() { return myLeftPanel; } @Override public void selectElement(PsiElement element, boolean selectInActivePanel) { CommanderPanel panel = selectInActivePanel ? getActivePanel() : getInactivePanel(); panel.getBuilder().selectElement(element, PsiUtilCore.getVirtualFile(element)); } }
/** * Appcelerator Titanium Mobile * Copyright (c) 2009-2010 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the Apache Public License * Please see the LICENSE included with this distribution for details. */ package ti.modules.titanium.network.socket; import java.io.IOException; import java.io.InputStream; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.UnknownHostException; import org.appcelerator.kroll.KrollDict; import org.appcelerator.kroll.KrollProxy; import org.appcelerator.kroll.annotations.Kroll; import org.appcelerator.titanium.TiContext; import org.appcelerator.titanium.io.TiStream; import org.appcelerator.titanium.kroll.KrollCallback; import org.appcelerator.titanium.util.Log; import org.appcelerator.titanium.util.TiConfig; import org.appcelerator.titanium.util.TiConvert; import org.appcelerator.titanium.util.TiStreamHelper; import ti.modules.titanium.BufferProxy; @Kroll.proxy(creatableInModule=SocketModule.class) public class TCPProxy extends KrollProxy implements TiStream { private static final String LCAT = "TCPProxy"; private static final boolean DBG = TiConfig.LOGD; //private boolean initialized = false; private Socket clientSocket = null; private ServerSocket serverSocket = null; private boolean accepting = false; private KrollDict acceptOptions = null; private int state = 0; private InputStream inputStream = null; public TCPProxy(TiContext context) { super(context); state = SocketModule.INITIALIZED; } @Kroll.method public void connect() throws Exception { if ((state != SocketModule.LISTENING) && (state != SocketModule.CONNECTED)) { Object host = getProperty("host"); Object port = getProperty("port"); if((host != null) && (port != null)) { new ConnectedSocketThread().start(); } else { throw new IllegalArgumentException("unable to call connect, socket must have a valid host and port"); } } else { throw new Exception("Unable to call connect on socket in <" + state + "> state"); } } @Kroll.method public void listen() throws Exception { if ((state != SocketModule.LISTENING) && (state != SocketModule.CONNECTED)) { Object port = getProperty("port"); Object listenQueueSize = getProperty("listenQueueSize"); try { if ((port != null) && (listenQueueSize != null)) { serverSocket = new ServerSocket(TiConvert.toInt(port), TiConvert.toInt(listenQueueSize)); } else if (port != null) { serverSocket = new ServerSocket(TiConvert.toInt(port)); } else { serverSocket = new ServerSocket(); } new ListeningSocketThread().start(); state = SocketModule.LISTENING; } catch (IOException e) { e.printStackTrace(); state = SocketModule.ERROR; throw new Exception("Unable to listen, IO error"); } } else { throw new Exception("Unable to call listen on socket in <" + state + "> state"); } } @Kroll.method public void accept(KrollDict acceptOptions) throws Exception { if(state != SocketModule.LISTENING) { throw new Exception("Socket is not listening, unable to call accept"); } this.acceptOptions = acceptOptions; accepting = true; } private void closeSocket() throws IOException { if (clientSocket != null) { clientSocket.close(); clientSocket = null; } if (serverSocket != null) { serverSocket.close(); serverSocket = null; } } @Kroll.setProperty @Kroll.method public void setHost(String host) { setSocketProperty("host", host); } @Kroll.setProperty @Kroll.method public void setPort(int port) { setSocketProperty("port", port); } @Kroll.setProperty @Kroll.method public void setTimeout(int timeout) { setSocketProperty("timeout", timeout); } @Kroll.setProperty @Kroll.method public void setOptions() { // not implemented yet - reserved for future use Log.i(LCAT, "setting options on socket is not supported yet"); } @Kroll.setProperty @Kroll.method public void setListenQueueSize(int listenQueueSize) { setSocketProperty("listenQueueSize", listenQueueSize); } @Kroll.setProperty @Kroll.method public void setConnected(KrollCallback connected) { setSocketProperty("connected", connected); } @Kroll.setProperty @Kroll.method public void setError(KrollCallback error) { setSocketProperty("error", error); } @Kroll.setProperty @Kroll.method public void setAccepted(KrollCallback accepted) { setSocketProperty("accepted", accepted); } private void setSocketProperty(String propertyName, Object propertyValue) { if ((state != SocketModule.LISTENING) && (state != SocketModule.CONNECTED)) { setProperty(propertyName, propertyValue); } else { Log.e(LCAT, "Unable to set property <" + propertyName + "> on socket in <" + state + "> state"); } } @Kroll.getProperty @Kroll.method public int getState() { return state; } private class ConnectedSocketThread extends Thread { public ConnectedSocketThread() { super("ConnectedSocketThread"); } public void run() { String host = TiConvert.toString(getProperty("host")); Object timeoutProperty = getProperty("timeout"); try { if (timeoutProperty != null) { int timeout = TiConvert.toInt(timeoutProperty); clientSocket = new Socket(); clientSocket.setSoTimeout(timeout); clientSocket.connect(new InetSocketAddress(host, TiConvert.toInt(getProperty("port"))), timeout); } else { clientSocket = new Socket(host, TiConvert.toInt(getProperty("port"))); } updateState(SocketModule.CONNECTED, "connected", buildConnectedCallbackArgs()); } catch (UnknownHostException e) { e.printStackTrace(); updateState(SocketModule.ERROR, "error", buildErrorCallbackArgs("Unable to connect, unknown host <" + host + ">", 0)); } catch (IOException e) { e.printStackTrace(); updateState(SocketModule.ERROR, "error", buildErrorCallbackArgs("Unable to connect, IO error", 0)); } } } private class ListeningSocketThread extends Thread { public ListeningSocketThread() { super("ListeningSocketThread"); } public void run() { while(true) { if(accepting) { try { Socket acceptedSocket = serverSocket.accept(); TCPProxy acceptedTcpProxy = new TCPProxy(context); acceptedTcpProxy.clientSocket = acceptedSocket; acceptedTcpProxy.setProperty("host", acceptedTcpProxy.clientSocket.getInetAddress()); acceptedTcpProxy.setProperty("port", acceptedTcpProxy.clientSocket.getPort()); Object optionValue; if((optionValue = acceptOptions.get("timeout")) != null) { acceptedTcpProxy.setProperty("timeout", TiConvert.toInt(optionValue)); } if((optionValue = acceptOptions.get("error")) != null) { if(optionValue instanceof KrollCallback) { acceptedTcpProxy.setProperty("error", (KrollCallback) optionValue); } } acceptedTcpProxy.state = SocketModule.CONNECTED; Object callback = getProperty("accepted"); if(callback instanceof KrollCallback) { ((KrollCallback) callback).callAsync(buildAcceptedCallbackArgs(acceptedTcpProxy)); } accepting = false; } catch (IOException e) { if (state == SocketModule.LISTENING) { e.printStackTrace(); updateState(SocketModule.ERROR, "error", buildErrorCallbackArgs("Unable to accept new connection, IO error", 0)); } break; } } else { try { sleep(500); } catch (InterruptedException e) { e.printStackTrace(); Log.e(LCAT, "listening thread interrupted"); } } } } } private KrollDict buildConnectedCallbackArgs() { KrollDict callbackArgs = new KrollDict(); callbackArgs.put("socket", this); return callbackArgs; } private KrollDict buildErrorCallbackArgs(String error, int errorCode) { KrollDict callbackArgs = new KrollDict(); callbackArgs.put("socket", this); callbackArgs.put("error", error); callbackArgs.put("errorCode", errorCode); return callbackArgs; } private KrollDict buildAcceptedCallbackArgs(TCPProxy acceptedTcpProxy) { KrollDict callbackArgs = new KrollDict(); callbackArgs.put("socket", this); callbackArgs.put("inbound", acceptedTcpProxy); return callbackArgs; } public void updateState(int state, String callbackName, KrollDict callbackArgs) { this.state = state; if (state == SocketModule.ERROR) { try { if (clientSocket != null) { clientSocket.close(); } if (serverSocket != null) { serverSocket.close(); } } catch (IOException e) { Log.w(LCAT, "unable to close socket in error state"); } } Object callback = getProperty(callbackName); if(callback instanceof KrollCallback) { ((KrollCallback) callback).callAsync(callbackArgs); } } @Kroll.method public boolean isConnected() { if(state == SocketModule.CONNECTED) { return true; } return false; } // TiStream interface methods @Kroll.method public int read(Object args[]) throws IOException { if(!isConnected()) { throw new IOException("Unable to read from socket, not connected"); } BufferProxy bufferProxy = null; int offset = 0; int length = 0; if(args.length == 1 || args.length == 3) { if(args.length > 0) { if(args[0] instanceof BufferProxy) { bufferProxy = (BufferProxy) args[0]; length = bufferProxy.getLength(); } else { throw new IllegalArgumentException("Invalid buffer argument"); } } if(args.length == 3) { if(args[1] instanceof Integer) { offset = ((Integer)args[1]).intValue(); } else if(args[1] instanceof Double) { offset = ((Double)args[1]).intValue(); } else { throw new IllegalArgumentException("Invalid offset argument"); } if(args[2] instanceof Integer) { length = ((Integer)args[2]).intValue(); } else if(args[2] instanceof Double) { length = ((Double)args[2]).intValue(); } else { throw new IllegalArgumentException("Invalid length argument"); } } } else { throw new IllegalArgumentException("Invalid number of arguments"); } if (inputStream == null) { inputStream = clientSocket.getInputStream(); } try { return TiStreamHelper.read(inputStream, bufferProxy, offset, length); } catch (IOException e) { e.printStackTrace(); closeSocket(); updateState(SocketModule.ERROR, "error", buildErrorCallbackArgs("Unable to read from socket, IO error", 0)); throw new IOException("Unable to read from socket, IO error"); } } @Kroll.method public int write(Object args[]) throws IOException { if(!isConnected()) { throw new IOException("Unable to write to socket, not connected"); } BufferProxy bufferProxy = null; int offset = 0; int length = 0; if(args.length == 1 || args.length == 3) { if(args.length > 0) { if(args[0] instanceof BufferProxy) { bufferProxy = (BufferProxy) args[0]; length = bufferProxy.getLength(); } else { throw new IllegalArgumentException("Invalid buffer argument"); } } if(args.length == 3) { if(args[1] instanceof Integer) { offset = ((Integer)args[1]).intValue(); } else if(args[1] instanceof Double) { offset = ((Double)args[1]).intValue(); } else { throw new IllegalArgumentException("Invalid offset argument"); } if(args[2] instanceof Integer) { length = ((Integer)args[2]).intValue(); } else if(args[2] instanceof Double) { length = ((Double)args[2]).intValue(); } else { throw new IllegalArgumentException("Invalid length argument"); } } } else { throw new IllegalArgumentException("Invalid number of arguments"); } try { return TiStreamHelper.write(clientSocket.getOutputStream(), bufferProxy, offset, length); } catch (IOException e) { e.printStackTrace(); closeSocket(); updateState(SocketModule.ERROR, "error", buildErrorCallbackArgs("Unable to write to socket, IO error", 0)); throw new IOException("Unable to write to socket, IO error"); } } @Kroll.method public boolean isWritable() { return isConnected(); } @Kroll.method public boolean isReadable() { return isConnected(); } @Kroll.method public void close() throws IOException { if((state != SocketModule.CONNECTED) && (state != SocketModule.LISTENING)) { throw new IOException("Socket is not connected or listening, unable to call close on socket in <" + state + "> state"); } try { state = 0; // set socket state to uninitialized to prevent use while closing closeSocket(); state = SocketModule.CLOSED; } catch (IOException e) { e.printStackTrace(); throw new IOException("Error occured when closing socket"); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.fusesource.fabric.zookeeper.curator; import static org.apache.felix.scr.annotations.ReferenceCardinality.OPTIONAL_MULTIPLE; import static org.apache.felix.scr.annotations.ReferencePolicy.DYNAMIC; import static org.fusesource.fabric.zookeeper.curator.Constants.CONNECTION_TIMEOUT; import static org.fusesource.fabric.zookeeper.curator.Constants.DEFAULT_CONNECTION_TIMEOUT_MS; import static org.fusesource.fabric.zookeeper.curator.Constants.DEFAULT_RETRY_INTERVAL; import static org.fusesource.fabric.zookeeper.curator.Constants.DEFAULT_SESSION_TIMEOUT_MS; import static org.fusesource.fabric.zookeeper.curator.Constants.MAX_RETRIES_LIMIT; import static org.fusesource.fabric.zookeeper.curator.Constants.RETRY_POLICY_INTERVAL_MS; import static org.fusesource.fabric.zookeeper.curator.Constants.RETRY_POLICY_MAX_RETRIES; import static org.fusesource.fabric.zookeeper.curator.Constants.SESSION_TIMEOUT; import static org.fusesource.fabric.zookeeper.curator.Constants.ZOOKEEPER_PASSWORD; import static org.fusesource.fabric.zookeeper.curator.Constants.ZOOKEEPER_URL; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import org.apache.curator.RetryPolicy; import org.apache.curator.ensemble.fixed.FixedEnsembleProvider; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.api.ACLProvider; import org.apache.curator.framework.state.ConnectionState; import org.apache.curator.framework.state.ConnectionStateListener; import org.apache.curator.retry.RetryNTimes; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.ConfigurationPolicy; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Modified; import org.apache.felix.scr.annotations.Reference; import org.fusesource.fabric.api.Constants; import org.fusesource.fabric.api.RuntimeProperties; import org.fusesource.fabric.api.jcip.ThreadSafe; import org.fusesource.fabric.api.scr.AbstractComponent; import org.fusesource.fabric.api.scr.ValidatingReference; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; import org.osgi.service.cm.ConfigurationException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Strings; import com.google.common.io.Closeables; @ThreadSafe @Component(name = Constants.ZOOKEEPER_CLIENT_PID, description = "Fabric ZooKeeper Client Factory", policy = ConfigurationPolicy.OPTIONAL, immediate = true) public final class ManagedCuratorFramework extends AbstractComponent { private static final Logger LOGGER = LoggerFactory.getLogger(ManagedCuratorFramework.class); @Reference(referenceInterface = RuntimeProperties.class) private final ValidatingReference<RuntimeProperties> runtimeProperties = new ValidatingReference<RuntimeProperties>(); @Reference(referenceInterface = ACLProvider.class) private final ValidatingReference<ACLProvider> aclProvider = new ValidatingReference<ACLProvider>(); @Reference(referenceInterface = ConnectionStateListener.class, bind = "bindConnectionStateListener", unbind = "unbindConnectionStateListener", cardinality = OPTIONAL_MULTIPLE, policy = DYNAMIC) private final List<ConnectionStateListener> connectionStateListeners = new CopyOnWriteArrayList<ConnectionStateListener>(); // private final DynamicEnsembleProvider ensembleProvider = new DynamicEnsembleProvider(); private BundleContext bundleContext; // private CuratorFramework curatorFramework; // private ServiceRegistration<CuratorFramework> registration; // private Map<String, ?> oldConfiguration; private final ExecutorService executor = Executors.newSingleThreadExecutor(); private AtomicReference<State> state = new AtomicReference<State>(); class State implements ConnectionStateListener, Runnable { final Map<String, ?> configuration; final AtomicBoolean closed = new AtomicBoolean(); ServiceRegistration<CuratorFramework> registration; CuratorFramework curator; State(Map<String, ?> configuration) { this.configuration = configuration; } @Override public void run() { if (curator != null) { curator.getZookeeperClient().stop(); } if (registration != null) { registration.unregister(); registration = null; } try { Closeables.close(curator, true); } catch (IOException e) { // Should not happen } curator = null; if (!closed.get()) { curator = buildCuratorFramework(configuration); curator.getConnectionStateListenable().addListener(this, executor); if (curator.getZookeeperClient().isConnected()) { stateChanged(curator, ConnectionState.CONNECTED); } } } @Override public void stateChanged(CuratorFramework client, ConnectionState newState) { if (newState == ConnectionState.CONNECTED) { if (registration == null) { registration = bundleContext.registerService(CuratorFramework.class, curator, null); } } for (ConnectionStateListener listener : connectionStateListeners) { listener.stateChanged(client, newState); } if (newState == ConnectionState.LOST) { run(); } } public void close() { closed.set(true); CuratorFramework curator = this.curator; if (curator != null) { curator.getZookeeperClient().stop(); } try { executor.submit(this).get(); } catch (Exception e) { LOGGER.warn("Error while closing curator", e); } } } @Activate void activate(BundleContext bundleContext, Map<String, ?> configuration) throws ConfigurationException { this.bundleContext = bundleContext; String zookeeperURL = getZookeeperURL(configuration); if (!Strings.isNullOrEmpty(zookeeperURL)) { State next = new State(configuration); if (state.compareAndSet(null, next)) { executor.submit(next); } } activateComponent(); } @Modified void modified(Map<String, ?> configuration) throws ConfigurationException { String zookeeperURL = getZookeeperURL(configuration); if (!Strings.isNullOrEmpty(zookeeperURL)) { State prev = state.get(); Map<String, ?> oldConfiguration = prev != null ? prev.configuration : null; if (isRestartRequired(oldConfiguration, configuration)) { State next = new State(configuration); if (state.compareAndSet(prev, next)) { executor.submit(next); if (prev != null) { prev.close(); } } else { next.close(); } } } } @Deactivate void deactivate() throws IOException { deactivateComponent(); State prev = state.getAndSet(null); if (prev != null) { prev.close(); } executor.shutdownNow(); } private String getZookeeperURL(Map<String, ?> configuration) { String zookeeperURL = null; if (configuration != null) { RuntimeProperties sysprops = runtimeProperties.get(); zookeeperURL = (String) configuration.get(ZOOKEEPER_URL); zookeeperURL = Strings.isNullOrEmpty(zookeeperURL) ? sysprops.getProperty(ZOOKEEPER_URL) : zookeeperURL; } return zookeeperURL; } /** * Builds a {@link org.apache.curator.framework.CuratorFramework} from the specified {@link java.util.Map<String, ?>}. */ private synchronized CuratorFramework buildCuratorFramework(Map<String, ?> properties) { RuntimeProperties sysprops = runtimeProperties.get(); String connectionString = readString(properties, ZOOKEEPER_URL, sysprops.getProperty(ZOOKEEPER_URL, "")); int sessionTimeoutMs = readInt(properties, SESSION_TIMEOUT, DEFAULT_SESSION_TIMEOUT_MS); int connectionTimeoutMs = readInt(properties, CONNECTION_TIMEOUT, DEFAULT_CONNECTION_TIMEOUT_MS); CuratorFrameworkFactory.Builder builder = CuratorFrameworkFactory.builder() .ensembleProvider(new FixedEnsembleProvider(connectionString)) .connectionTimeoutMs(connectionTimeoutMs) .sessionTimeoutMs(sessionTimeoutMs) .retryPolicy(buildRetryPolicy(properties)); if (isAuthorizationConfigured(properties)) { String scheme = "digest"; String password = readString(properties, ZOOKEEPER_PASSWORD, sysprops.getProperty(ZOOKEEPER_PASSWORD, "")); byte[] auth = ("fabric:" + password).getBytes(); builder = builder.authorization(scheme, auth).aclProvider(aclProvider.get()); } CuratorFramework framework = builder.build(); for (ConnectionStateListener listener : connectionStateListeners) { framework.getConnectionStateListenable().addListener(listener); } framework.start(); return framework; } /** * Builds an {@link org.apache.curator.retry.ExponentialBackoffRetry} from the {@link java.util.Map<String, ?>}. */ private RetryPolicy buildRetryPolicy(Map<String, ?> properties) { int maxRetries = readInt(properties, RETRY_POLICY_MAX_RETRIES, MAX_RETRIES_LIMIT); int intervalMs = readInt(properties, RETRY_POLICY_INTERVAL_MS, DEFAULT_RETRY_INTERVAL); return new RetryNTimes(maxRetries, intervalMs); } /** * Returns true if configuration contains authorization configuration. */ private boolean isAuthorizationConfigured(Map<String, ?> properties) { String zkpass = properties != null ? (String)properties.get(ZOOKEEPER_PASSWORD) : null; if (zkpass == null) { zkpass = runtimeProperties.get().getProperty(ZOOKEEPER_PASSWORD); } return !Strings.isNullOrEmpty(zkpass); } /** * Returns true if configuration contains authorization configuration. */ private boolean isRestartRequired(Map<String, ?> oldProperties, Map<String, ?> properties) { if (!propertyEquals(oldProperties, properties, ZOOKEEPER_URL)) { return true; } else if (!propertyEquals(oldProperties, properties, ZOOKEEPER_PASSWORD)) { return true; } else if (!propertyEquals(oldProperties, properties, CONNECTION_TIMEOUT)) { return true; } else if (!propertyEquals(oldProperties, properties, SESSION_TIMEOUT)) { return true; } else if (!propertyEquals(oldProperties, properties, RETRY_POLICY_MAX_RETRIES)) { return true; } else if (!propertyEquals(oldProperties, properties, RETRY_POLICY_INTERVAL_MS)) { return true; } else { return false; } } private boolean propertyEquals(Map<String, ?> left, Map<String, ?> right, String name) { if (left == null || right == null || left.get(name) == null || right.get(name) == null) { return (left == null || left.get(name) == null) && (right == null || right.get(name) == null); } else { return left.get(name).equals(right.get(name)); } } /** * Reads an String value from the specified configuration. * * @param props The source props. * @param key The key of the property to read. * @param defaultValue The default value. * @return The value read or the defaultValue if any error occurs. */ private static String readString(Map<String, ?> props, String key, String defaultValue) { try { Object obj = props.get(key); if (obj instanceof String) { return (String) obj; } else { return defaultValue; } } catch (Exception e) { return defaultValue; } } /** * Reads an int value from the specified configuration. * * @param props The source props. * @param key The key of the property to read. * @param defaultValue The default value. * @return The value read or the defaultValue if any error occurs. */ private static int readInt(Map<String, ?> props, String key, int defaultValue) { try { Object obj = props.get(key); if (obj instanceof Number) { return ((Number) obj).intValue(); } else if (obj instanceof String) { return Integer.parseInt((String) obj); } else { return defaultValue; } } catch (Exception e) { return defaultValue; } } void bindConnectionStateListener(ConnectionStateListener connectionStateListener) { connectionStateListeners.add(connectionStateListener); State curr = state.get(); CuratorFramework curator = curr != null ? curr.curator : null; if (curator != null && curator.getZookeeperClient().isConnected()) { connectionStateListener.stateChanged(curator, ConnectionState.CONNECTED); } } void unbindConnectionStateListener(ConnectionStateListener connectionStateListener) { connectionStateListeners.remove(connectionStateListener); } void bindRuntimeProperties(RuntimeProperties service) { this.runtimeProperties.bind(service); } void unbindRuntimeProperties(RuntimeProperties service) { this.runtimeProperties.unbind(service); } void bindAclProvider(ACLProvider aclProvider) { this.aclProvider.bind(aclProvider); } void unbindAclProvider(ACLProvider aclProvider) { this.aclProvider.unbind(aclProvider); } }
/* * Copyright 2015 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel.pool; import io.netty.bootstrap.Bootstrap; import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelInboundHandlerAdapter; import io.netty.channel.ChannelInitializer; import io.netty.channel.EventLoopGroup; import io.netty.channel.local.LocalAddress; import io.netty.channel.local.LocalChannel; import io.netty.channel.local.LocalEventLoopGroup; import io.netty.channel.local.LocalServerChannel; import io.netty.util.concurrent.Future; import org.hamcrest.CoreMatchers; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.util.Queue; import java.util.concurrent.LinkedBlockingQueue; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; public class SimpleChannelPoolTest { private static final String LOCAL_ADDR_ID = "test.id"; @Rule public ExpectedException expectedException = ExpectedException.none(); @Test public void testAcquire() throws Exception { EventLoopGroup group = new LocalEventLoopGroup(); LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).sync().channel(); CountingChannelPoolHandler handler = new CountingChannelPoolHandler(); ChannelPool pool = new SimpleChannelPool(cb, handler); Channel channel = pool.acquire().sync().getNow(); pool.release(channel).syncUninterruptibly(); Channel channel2 = pool.acquire().sync().getNow(); assertSame(channel, channel2); assertEquals(1, handler.channelCount()); pool.release(channel2).syncUninterruptibly(); // Should fail on multiple release calls. try { pool.release(channel2).syncUninterruptibly(); fail(); } catch (IllegalArgumentException e) { // expected assertFalse(channel.isActive()); } assertEquals(1, handler.acquiredCount()); assertEquals(2, handler.releasedCount()); sc.close().sync(); group.shutdownGracefully(); } @Test public void testBoundedChannelPoolSegment() throws Exception { EventLoopGroup group = new LocalEventLoopGroup(); LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).sync().channel(); CountingChannelPoolHandler handler = new CountingChannelPoolHandler(); ChannelPool pool = new SimpleChannelPool(cb, handler, ChannelHealthChecker.ACTIVE) { private final Queue<Channel> queue = new LinkedBlockingQueue<Channel>(1); @Override protected Channel pollChannel() { return queue.poll(); } @Override protected boolean offerChannel(Channel ch) { return queue.offer(ch); } }; Channel channel = pool.acquire().sync().getNow(); Channel channel2 = pool.acquire().sync().getNow(); pool.release(channel).syncUninterruptibly().getNow(); try { pool.release(channel2).syncUninterruptibly(); fail(); } catch (IllegalStateException e) { // expected } channel2.close().sync(); assertEquals(2, handler.channelCount()); assertEquals(0, handler.acquiredCount()); assertEquals(1, handler.releasedCount()); sc.close().sync(); channel.close().sync(); channel2.close().sync(); group.shutdownGracefully(); } /** * Tests that if channel was unhealthy it is not offered back to the pool. * * @throws Exception */ @Test public void testUnhealthyChannelIsNotOffered() throws Exception { EventLoopGroup group = new LocalEventLoopGroup(); LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); ChannelPoolHandler handler = new CountingChannelPoolHandler(); ChannelPool pool = new SimpleChannelPool(cb, handler); Channel channel1 = pool.acquire().syncUninterruptibly().getNow(); pool.release(channel1).syncUninterruptibly(); Channel channel2 = pool.acquire().syncUninterruptibly().getNow(); //first check that when returned healthy then it actually offered back to the pool. assertSame(channel1, channel2); expectedException.expect(IllegalStateException.class); channel1.close().syncUninterruptibly(); try { pool.release(channel1).syncUninterruptibly(); } finally { sc.close().syncUninterruptibly(); channel2.close().syncUninterruptibly(); group.shutdownGracefully(); } } /** * Tests that if channel was unhealthy it is was offered back to the pool because * it was requested not to validate channel health on release. * * @throws Exception */ @Test public void testUnhealthyChannelIsOfferedWhenNoHealthCheckRequested() throws Exception { EventLoopGroup group = new LocalEventLoopGroup(); LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); ChannelPoolHandler handler = new CountingChannelPoolHandler(); ChannelPool pool = new SimpleChannelPool(cb, handler, ChannelHealthChecker.ACTIVE, false); Channel channel1 = pool.acquire().syncUninterruptibly().getNow(); channel1.close().syncUninterruptibly(); Future<Void> releaseFuture = pool.release(channel1, channel1.eventLoop().<Void>newPromise()).syncUninterruptibly(); assertThat(releaseFuture.isSuccess(), CoreMatchers.is(true)); Channel channel2 = pool.acquire().syncUninterruptibly().getNow(); //verifying that in fact the channel2 is different that means is not pulled from the pool assertNotSame(channel1, channel2); sc.close().syncUninterruptibly(); channel2.close().syncUninterruptibly(); group.shutdownGracefully(); } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ide.actions; import com.intellij.ide.IdeBundle; import com.intellij.ide.util.PropertiesComponent; import com.intellij.idea.ActionsBundle; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.impl.Utils; import com.intellij.openapi.editor.impl.EditorHeaderComponent; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.wm.ToolWindow; import com.intellij.ui.content.Content; import com.intellij.ui.content.ContentManager; import com.intellij.ui.content.ContentManagerEvent; import com.intellij.ui.content.ContentManagerListener; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.util.Collections; import java.util.List; import java.util.function.Supplier; /** * @author gregsh */ public final class ToggleToolbarAction extends ToggleAction implements DumbAware { @NotNull public static DefaultActionGroup createToggleToolbarGroup(@NotNull Project project, @NotNull ToolWindow toolWindow) { return new DefaultActionGroup(new OptionsGroup(toolWindow), createToolWindowAction(toolWindow, PropertiesComponent.getInstance(project))); } @NotNull public static ToggleToolbarAction createAction(@NotNull String id, @NotNull PropertiesComponent properties, @NotNull Supplier<? extends Iterable<? extends JComponent>> components) { return new ToggleToolbarAction(properties, getShowToolbarProperty(id), components); } @NotNull public static ToggleToolbarAction createToolWindowAction(@NotNull ToolWindow toolWindow, @NotNull PropertiesComponent properties) { updateToolbarsVisibility(toolWindow, properties); toolWindow.addContentManagerListener(new ContentManagerListener() { @Override public void contentAdded(@NotNull ContentManagerEvent event) { JComponent component = event.getContent().getComponent(); setToolbarVisible(Collections.singletonList(component), isToolbarVisible(toolWindow, properties)); // support nested content managers, e.g. RunnerLayoutUi as content component ContentManager contentManager = component instanceof DataProvider ? PlatformDataKeys.CONTENT_MANAGER.getData((DataProvider)component) : null; if (contentManager != null) { contentManager.addContentManagerListener(this); } } @Override public void selectionChanged(@NotNull ContentManagerEvent event) { if (event.getOperation() != ContentManagerEvent.ContentOperation.remove) { updateToolbarsVisibility(toolWindow, properties); } } }); return new ToggleToolbarAction(properties, getShowToolbarProperty(toolWindow), () -> { return Collections.singletonList(toolWindow.getContentManager().getComponent()); }); } public static void updateToolbarsVisibility(@NotNull ToolWindow toolWindow, @NotNull PropertiesComponent properties) { if (toolWindow.getContentManagerIfCreated() != null) { setToolbarVisible(Collections.singletonList(toolWindow.getComponent()), isToolbarVisible(toolWindow, properties)); } } public static void setToolbarVisible(@NotNull ToolWindow toolWindow, @NotNull PropertiesComponent properties, @Nullable Boolean visible) { boolean state = visible == null ? isToolbarVisible(toolWindow, properties) : visible; setToolbarVisibleImpl(getShowToolbarProperty(toolWindow), properties, Collections.singletonList(toolWindow.getComponent()), state); } public static void setToolbarVisible(@NotNull String id, @NotNull PropertiesComponent properties, @NotNull Iterable<? extends JComponent> components, @Nullable Boolean visible) { boolean state = visible == null ? isToolbarVisible(id, properties) : visible; setToolbarVisibleImpl(getShowToolbarProperty(id), properties, components, state); } public static void setToolbarVisible(@NotNull Iterable<? extends JComponent> roots, boolean state) { for (ActionToolbar toolbar : iterateToolbars(roots)) { JComponent c = toolbar.getComponent(); c.setVisible(state); Container parent = c.getParent(); if (parent instanceof EditorHeaderComponent) { parent.setVisible(state); } } } public static boolean isToolbarVisible(@NotNull String property) { return isToolbarVisible(property, PropertiesComponent.getInstance()); } public static boolean isToolbarVisible(@NotNull String property, @NotNull Project project) { return isToolbarVisible(property, PropertiesComponent.getInstance(project)); } public static boolean isToolbarVisible(@NotNull String property, @NotNull PropertiesComponent properties) { return isSelectedImpl(properties, getShowToolbarProperty(property)); } public static boolean isToolbarVisible(@NotNull ToolWindow toolWindow) { return isToolbarVisible(toolWindow, PropertiesComponent.getInstance()); } public static boolean isToolbarVisible(@NotNull ToolWindow toolWindow, @NotNull Project project) { return isToolbarVisible(toolWindow, PropertiesComponent.getInstance(project)); } public static boolean isToolbarVisible(@NotNull ToolWindow toolWindow, @NotNull PropertiesComponent properties) { return isSelectedImpl(properties, getShowToolbarProperty(toolWindow)); } private final PropertiesComponent myPropertiesComponent; private final String myProperty; private final Supplier<? extends Iterable<? extends JComponent>> myProducer; private ToggleToolbarAction(@NotNull PropertiesComponent propertiesComponent, @NotNull String property, @NotNull Supplier<? extends Iterable<? extends JComponent>> producer) { super(ActionsBundle.messagePointer("action.ShowToolbar.text")); myPropertiesComponent = propertiesComponent; myProperty = property; myProducer = producer; } @Override public void update(@NotNull AnActionEvent e) { super.update(e); boolean hasToolbars = iterateToolbars(myProducer.get()).iterator().hasNext(); e.getPresentation().setVisible(hasToolbars); } @Override public boolean isSelected(@NotNull AnActionEvent e) { return isSelected(); } @Override public void setSelected(@NotNull AnActionEvent e, boolean state) { setToolbarVisibleImpl(myProperty, myPropertiesComponent, myProducer.get(), state); } static void setToolbarVisibleImpl(@NotNull String property, @NotNull PropertiesComponent propertiesComponent, @NotNull Iterable<? extends JComponent> components, boolean visible) { propertiesComponent.setValue(property, String.valueOf(visible), String.valueOf(true)); setToolbarVisible(components, visible); } boolean isSelected() { return isSelectedImpl(myPropertiesComponent, myProperty); } static boolean isSelectedImpl(@NotNull PropertiesComponent properties, @NotNull String property) { return properties.getBoolean(property, true); } @NotNull static String getShowToolbarProperty(@NotNull ToolWindow window) { return getShowToolbarProperty("ToolWindow" + window.getStripeTitle()); } @NotNull static String getShowToolbarProperty(@NotNull @NonNls String s) { return s + ".ShowToolbar"; } @NotNull private static Iterable<ActionToolbar> iterateToolbars(Iterable<? extends JComponent> roots) { return UIUtil.uiTraverser(null).withRoots(roots).preOrderDfsTraversal().filter(ActionToolbar.class); } private static class OptionsGroup extends NonTrivialActionGroup implements DumbAware { private final ToolWindow myToolWindow; OptionsGroup(ToolWindow toolWindow) { getTemplatePresentation().setText(IdeBundle.message("group.view.options")); setPopup(true); myToolWindow = toolWindow; } @Override public AnAction @NotNull [] getChildren(@Nullable AnActionEvent e) { ContentManager contentManager = myToolWindow.getContentManagerIfCreated(); Content selectedContent = contentManager == null ? null : contentManager.getSelectedContent(); JComponent contentComponent = selectedContent == null ? null : selectedContent.getComponent(); if (contentComponent == null || e == null) return EMPTY_ARRAY; UpdateSession session = Utils.getOrCreateUpdateSession(e); List<AnAction> result = new SmartList<>(); for (final ActionToolbar toolbar : iterateToolbars(Collections.singletonList(contentComponent))) { JComponent c = toolbar.getComponent(); if (c.isVisible() || !c.isValid()) continue; if (!result.isEmpty() && !(ContainerUtil.getLastItem(result) instanceof Separator)) { result.add(Separator.getInstance()); } List<AnAction> actions = toolbar.getActions(); for (AnAction action : actions) { if (action instanceof ToggleAction && !result.contains(action) && session.presentation(action).isVisible()) { result.add(action); } else if (action instanceof Separator) { if (!result.isEmpty() && !(ContainerUtil.getLastItem(result) instanceof Separator)) { result.add(Separator.getInstance()); } } } } boolean popup = ContainerUtil.count(result, it -> !(it instanceof Separator)) > 3; setPopup(popup); if (!popup && !result.isEmpty()) result.add(Separator.getInstance()); return result.toArray(AnAction.EMPTY_ARRAY); } } }
package com.purediscovery.vennlayout.model.geom; /** * From: http://stackoverflow.com/questions/2263272/how-to-calculate-the-area-of-a-java-awt-geom-area */ import static java.lang.Double.NaN; import java.awt.geom.*; import java.util.ArrayList; public abstract class AreaUtil { public static double approxArea(Area area) { PathIterator i = area.getPathIterator(identity); return approxArea(i); } public static double approxArea(GeneralPath p) { PathIterator i = p.getPathIterator(identity); return approxArea(i); } public static double approxArea(PathIterator i) { double a = 0.0; double[] coords = new double[6]; double startX = NaN, startY = NaN; Line2D segment = new Line2D.Double(NaN, NaN, NaN, NaN); while (!i.isDone()) { int segType = i.currentSegment(coords); double x = coords[0], y = coords[1]; switch (segType) { case PathIterator.SEG_CLOSE: segment.setLine(segment.getX2(), segment.getY2(), startX, startY); a += hexArea(segment); startX = startY = NaN; segment.setLine(NaN, NaN, NaN, NaN); break; case PathIterator.SEG_LINETO: segment.setLine(segment.getX2(), segment.getY2(), x, y); a += hexArea(segment); break; case PathIterator.SEG_MOVETO: startX = x; startY = y; segment.setLine(NaN, NaN, x, y); break; default: throw new IllegalArgumentException("PathIterator contains curved segments"); } i.next(); } if (Double.isNaN(a)) { throw new IllegalArgumentException("PathIterator contains an open path"); } else { return 0.5 * Math.abs(a); } } private static double hexArea(Line2D seg) { return seg.getX1() * seg.getY2() - seg.getX2() * seg.getY1(); } private static final AffineTransform identity = new AffineTransform(); static public GeneralPath makeCircle(double xCenter, double yCenter, double r, int nPoints) { if (nPoints < 4) throw new RuntimeException("too few points. n=" + nPoints); GeneralPath gp = new GeneralPath(); for (int i = 0; i < nPoints; i++) { double angle = i / (double) nPoints * Math.PI * 2; double x = r * Math.cos(angle) + xCenter; double y = r * Math.sin(angle) + yCenter; if (i == 0) gp.moveTo(x, y); else gp.lineTo(x, y); } gp.closePath(); return gp; } public static double perimeter(GeneralPath gp) { return perimeter(gp.getPathIterator(identity)); } public static double perimeter(Area area) { PathIterator i = area.getPathIterator(identity); return perimeter(i); } private static double perimeter(PathIterator i) { double perimeter = 0.0; double[] coords = new double[6]; double startX = NaN, startY = NaN; Line2D segment = new Line2D.Double(NaN, NaN, NaN, NaN); while (!i.isDone()) { int segType = i.currentSegment(coords); double x = coords[0], y = coords[1]; switch (segType) { case PathIterator.SEG_CLOSE: segment.setLine(segment.getX2(), segment.getY2(), startX, startY); perimeter += length(segment); startX = startY = NaN; segment.setLine(NaN, NaN, NaN, NaN); break; case PathIterator.SEG_LINETO: segment.setLine(segment.getX2(), segment.getY2(), x, y); perimeter += length(segment); break; case PathIterator.SEG_MOVETO: startX = x; startY = y; segment.setLine(NaN, NaN, x, y); break; default: throw new IllegalArgumentException("PathIterator contains curved segments"); } i.next(); } if (Double.isNaN(perimeter)) throw new IllegalArgumentException("PathIterator contains an open path"); return perimeter; } private static double length(Line2D segment) { double x = segment.getX1() - segment.getX2(); double y = segment.getY1() - segment.getY2(); return Math.sqrt(x * x + y * y); } public static Point2D.Double center(GeneralPath generalPath) { return center(generalPath.getPathIterator(identity)); } public static Point2D.Double center(PathIterator i) { double[] coords = new double[6]; double xTotal = 0; double yTotal = 0; int count = 0; while (!i.isDone()) { int segType = i.currentSegment(coords); if (segType != PathIterator.SEG_CLOSE) { xTotal += coords[0]; yTotal += coords[1]; count++; } i.next(); } return new Point2D.Double(xTotal / count, yTotal / count); } public static double averageRadius(GeneralPath generalPath) { Point2D.Double center = center(generalPath.getPathIterator(identity)); PathIterator i = generalPath.getPathIterator(identity); double[] coords = new double[6]; double rTotal = 0; int count = 0; while (!i.isDone()) { int segType = i.currentSegment(coords); if (segType != PathIterator.SEG_CLOSE) { double x = center.x - coords[0]; double y = center.y - coords[1]; rTotal += Math.sqrt(x * x + y * y); count++; } i.next(); } return rTotal / count; } public static int getClosedPathLength(GeneralPath generalPath) { int c = 0; PathIterator p = generalPath.getPathIterator(identity); while (!p.isDone()) { c++; p.next(); } if (c > 0) // for closed paths c--; return c; } public static ArrayList<double[]> getPoints(GeneralPath generalPath) { ArrayList<double[]> list = new ArrayList<double[]>(); PathIterator i = generalPath.getPathIterator(identity); while (!i.isDone()) { double[] coords = new double[2]; int segType = i.currentSegment(coords); if (segType != PathIterator.SEG_CLOSE) { list.add(coords); } i.next(); } return list; } }
/** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) 1999-2006 at Multimedia- & E-Learning Services (MELS),<br> * University of Zurich, Switzerland. * <p> */ package org.olat.presentation.filebrowser.components; import java.text.DateFormat; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringEscapeUtils; import org.olat.data.basesecurity.Identity; import org.olat.data.commons.vfs.AbstractVirtualContainer; import org.olat.data.commons.vfs.FolderConfig; import org.olat.data.commons.vfs.OlatRelPathImpl; import org.olat.data.commons.vfs.VFSConstants; import org.olat.data.commons.vfs.VFSItem; import org.olat.data.commons.vfs.VFSLeaf; import org.olat.data.commons.vfs.version.Versionable; import org.olat.data.commons.vfs.version.Versions; import org.olat.data.filebrowser.metadata.MetaInfo; import org.olat.data.user.UserConstants; import org.olat.lms.commons.filemetadata.FileMetadataInfoHelper; import org.olat.lms.commons.filemetadata.FileMetadataInfoService; import org.olat.lms.user.UserService; import org.olat.presentation.filebrowser.FileSelection; import org.olat.presentation.framework.core.control.generic.folder.FolderHelper; import org.olat.presentation.framework.core.control.winmgr.AJAXFlags; import org.olat.presentation.framework.core.render.StringOutput; import org.olat.presentation.framework.core.render.URLBuilder; import org.olat.presentation.framework.core.translator.Translator; import org.olat.presentation.framework.core.util.CSSHelper; import org.olat.system.commons.Formatter; import org.olat.system.commons.StringHelper; import org.olat.system.spring.CoreSpringFactory; /** * Initial Date: Feb 12, 2004 * * @author Mike Stock */ public class ListRenderer { /** Edit parameter identifier. */ public static final String PARAM_EDTID = "fcedt"; /** Edit parameter identifier. */ public static final String PARAM_CONTENTEDITID = "contentedit"; /** Serve resource identifier */ public static final String PARAM_SERV = "serv"; /** Sort parameter identifier. */ public static final String PARAM_SORTID = "fcsrt"; /** View version parameter identifier. */ public static final String PARAM_VERID = "fcver"; /** Add to ePortfolio parameter identifier. */ public static final String PARAM_EPORT = "epadd"; /** View thumbnail */ public static final String PARAM_SERV_THUMBNAIL = "servthumb"; /** dummy file types */ private static final String TYPE_FILE = "file"; private boolean bgFlag = true; /** * Default constructor. */ public ListRenderer() { super(); } /** * Render contents of directory to a html table. * * @param dir * @param secCallback * @param ubu * @param translator * @param iframePostEnabled * @return Render results. */ public String render(FolderComponent fc, URLBuilder ubu, Translator translator, boolean iframePostEnabled) { StringOutput sb = new StringOutput(); List<VFSItem> children = fc.getCurrentContainerChildren(); // folder empty? if (children.size() == 0) { sb.append("<div class=\"b_briefcase_empty\">"); sb.append(translator.translate("NoFiles")); sb.append("</div>"); return sb.toString(); } boolean canVersion = FolderConfig.versionsEnabled(fc.getCurrentContainer()); sb.append("<table class=\"b_briefcase_filetable\">"); // header sb.append("<thead><tr><th class=\"b_briefcase_col_name b_first_child\">"); // TODO:laeb: set css class depending on sorting state like following and add sort arrow pics as css background image // String cssClass; // if (FolderComponent.sortCol.equals(FolderComponent.SORT_NAME)) { // if (FolderComponent.sortAsc) cssClass = "o_cfc_col_srt_asc"; // else cssClass = "o_cfc_col_srt_desc"; // } else cssClass = "o_cfc_col_unsorted"; sb.append("<a href=\""); // file name column ubu.buildURI(sb, new String[] { PARAM_SORTID }, new String[] { FolderComponent.SORT_NAME }, iframePostEnabled ? AJAXFlags.MODE_TOBGIFRAME : AJAXFlags.MODE_NORMAL); sb.append("\""); if (iframePostEnabled) { // add ajax iframe target StringOutput so = new StringOutput(); ubu.appendTarget(so); sb.append(so.toString()); } sb.append(" ext:qtip=\"").append(StringEscapeUtils.escapeHtml(translator.translate("header.Name"))).append("\">").append(translator.translate("header.Name")) .append("</a>"); sb.append("</th><th class=\"b_briefcase_col_size\">"); sb.append("<a href=\""); // file size column ubu.buildURI(sb, new String[] { PARAM_SORTID }, new String[] { FolderComponent.SORT_SIZE }, iframePostEnabled ? AJAXFlags.MODE_TOBGIFRAME : AJAXFlags.MODE_NORMAL); sb.append("\""); if (iframePostEnabled) { // add ajax iframe target StringOutput so = new StringOutput(); ubu.appendTarget(so); sb.append(so.toString()); } sb.append(" ext:qtip=\"").append(StringEscapeUtils.escapeHtml(translator.translate("header.Size"))).append("\">").append(translator.translate("header.Size")) .append("</a>"); sb.append("</th><th class=\"b_briefcase_col_type\">"); sb.append("<a href=\""); // file type column ubu.buildURI(sb, new String[] { PARAM_SORTID }, new String[] { FolderComponent.SORT_TYPE }, iframePostEnabled ? AJAXFlags.MODE_TOBGIFRAME : AJAXFlags.MODE_NORMAL); sb.append("\""); if (iframePostEnabled) { // add ajax iframe target StringOutput so = new StringOutput(); ubu.appendTarget(so); sb.append(so.toString()); } sb.append(" ext:qtip=\"").append(StringEscapeUtils.escapeHtml(translator.translate("header.Type"))).append("\">").append(translator.translate("header.Type")) .append("</a>"); if (canVersion) { sb.append("</th><th class=\"b_briefcase_col_rev\">").append("<a href=\""); // file size column ubu.buildURI(sb, new String[] { PARAM_SORTID }, new String[] { FolderComponent.SORT_REV }, iframePostEnabled ? AJAXFlags.MODE_TOBGIFRAME : AJAXFlags.MODE_NORMAL); sb.append("\""); if (iframePostEnabled) { // add ajax iframe target StringOutput so = new StringOutput(); ubu.appendTarget(so); sb.append(so.toString()); } sb.append(" ext:qtip=\"").append(StringEscapeUtils.escapeHtml(translator.translate("header.Version"))).append("\">") .append(translator.translate("header.Version")).append("</a>"); } sb.append("</th><th class=\"b_briefcase_col_date\">"); sb.append("<a href=\""); // file modification date column ubu.buildURI(sb, new String[] { PARAM_SORTID }, new String[] { FolderComponent.SORT_DATE }, iframePostEnabled ? AJAXFlags.MODE_TOBGIFRAME : AJAXFlags.MODE_NORMAL); sb.append("\""); if (iframePostEnabled) { // add ajax iframe target StringOutput so = new StringOutput(); ubu.appendTarget(so); sb.append(so.toString()); } sb.append(" ext:qtip=\"").append(StringEscapeUtils.escapeHtml(translator.translate("header.Modified"))).append("\">") .append(translator.translate("header.Modified")).append("</a>"); sb.append("</th>"); sb.append("<th class=\"b_briefcase_col_info\">"); sb.append("<a href=\""); // file lock ubu.buildURI(sb, new String[] { PARAM_SORTID }, new String[] { FolderComponent.SORT_LOCK }, iframePostEnabled ? AJAXFlags.MODE_TOBGIFRAME : AJAXFlags.MODE_NORMAL); sb.append("\""); if (iframePostEnabled) { // add ajax iframe target StringOutput so = new StringOutput(); ubu.appendTarget(so); sb.append(so.toString()); } sb.append(" ext:qtip=\"").append(StringEscapeUtils.escapeHtml(translator.translate("header.Status"))).append("\">").append(translator.translate("header.Status")) .append("</a>"); // meta data column sb.append("</th><th class=\"b_briefcase_col_info b_last_child\"><span"); sb.append(" ext:qtip=\"").append(StringEscapeUtils.escapeHtml(translator.translate("header.Info"))).append("\">"); sb.append(translator.translate("header.Info")); sb.append("</span></th></tr></thead>"); // render directory contents String currentContainerPath = fc.getCurrentContainerPath(); if (currentContainerPath.length() > 0 && currentContainerPath.charAt(0) == '/') currentContainerPath = currentContainerPath.substring(1); bgFlag = true; sb.append("<tbody>"); Map<Long, Identity> identityMap = new HashMap<Long, Identity>(); for (int i = 0; i < children.size(); i++) { VFSItem child = children.get(i); if (child instanceof VFSLeaf) { if (child.exists()) { appendRenderedFile(fc, child, currentContainerPath, sb, ubu, translator, iframePostEnabled, canVersion, identityMap, i); } } else { appendRenderedFile(fc, child, currentContainerPath, sb, ubu, translator, iframePostEnabled, canVersion, identityMap, i); } } sb.append("</tbody></table>"); return sb.toString(); } // getRenderedDirectoryContent /** * Render a single file or folder. * * @param f * The file or folder to render * @param sb * StringOutput to append generated html code */ private void appendRenderedFile(FolderComponent fc, VFSItem child, String currentContainerPath, StringOutput sb, URLBuilder ubu, Translator translator, boolean iframePostEnabled, boolean canContainerVersion, Map<Long, Identity> identityMap, int pos) { // asume full access unless security callback tells us something different. boolean canWrite = child.getParentContainer().canWrite() == VFSConstants.YES; boolean canDelete = child.getParentContainer().canDelete() == VFSConstants.YES; boolean isAbstract = (child instanceof AbstractVirtualContainer); Versions versions = null; if (canContainerVersion && child instanceof Versionable) { Versionable versionable = (Versionable) child; if (versionable.getVersions().isVersioned()) { versions = versionable.getVersions(); } } boolean canVersion = versions != null && !versions.getRevisions().isEmpty(); boolean canAddToEPortfolio = FolderConfig.isEPortfolioAddEnabled(); VFSLeaf leaf = null; if (child instanceof VFSLeaf) { leaf = (VFSLeaf) child; } boolean isContainer = (leaf == null); // if not a leaf, it must be a container... MetaInfo metaInfo = null; if (child instanceof OlatRelPathImpl) { FileMetadataInfoService metaInfoService = CoreSpringFactory.getBean(FileMetadataInfoService.class); metaInfo = metaInfoService.createMetaInfoFor((OlatRelPathImpl) child); } boolean lockedForUser = FileMetadataInfoHelper.isLocked(metaInfo, fc.getIdentityEnvironnement()); String name = child.getName(); String pathAndName = currentContainerPath; if (pathAndName.length() > 0 && !pathAndName.endsWith("/")) pathAndName = pathAndName + "/"; pathAndName = pathAndName + name; String type = FolderHelper.extractFileType(child.getName(), translator.getLocale()); // tr begin, set alternating bgcolor sb.append("<tr"); bgFlag = !bgFlag; if (bgFlag) { sb.append(" class=\"b_table_odd\""); } sb.append("><td class=\"b_first_child\">"); // add checkbox for actions if user can write or delete to this directory if (canWrite || canDelete) { sb.append("<input type=\"checkbox\" class=\"b_checkbox\" name=\""); sb.append(FileSelection.FORM_ID); sb.append("\" value=\""); sb.append(StringEscapeUtils.escapeHtml(name)); sb.append("\" />"); } // browse link pre sb.append("<a href=\""); if (isContainer) { // for directories... normal module URIs ubu.buildURI(sb, null, null, pathAndName, iframePostEnabled ? AJAXFlags.MODE_TOBGIFRAME : AJAXFlags.MODE_NORMAL); sb.append("\""); if (iframePostEnabled) { // add ajax iframe target StringOutput so = new StringOutput(); ubu.appendTarget(so); sb.append(so.toString()); } } else { // for files, add PARAM_SERV command ubu.buildURI(sb, new String[] { PARAM_SERV }, new String[] { "x" }, pathAndName, AJAXFlags.MODE_NORMAL); sb.append("\" target=\"_blank\""); } // icon css sb.append(" class=\"b_with_small_icon_left "); if (isContainer) sb.append(CSSHelper.CSS_CLASS_FILETYPE_FOLDER); else sb.append(CSSHelper.createFiletypeIconCssClassFor(name)); sb.append("\""); // file metadata if (metaInfo != null) { sb.append(" ext:qtip=\"<div class='b_ext_tooltip_wrapper b_briefcase_meta'>"); if (StringHelper.containsNonWhitespace(metaInfo.getTitle())) { String title = StringHelper.escapeHtml(metaInfo.getTitle()); // TODO: LD: 1024: NOK sb.append("<h5>").append(Formatter.escapeDoubleQuotes(title)).append("</h5>"); } if (StringHelper.containsNonWhitespace(metaInfo.getComment())) { String comment = StringHelper.escapeHtml(metaInfo.getComment()); sb.append(Formatter.escapeDoubleQuotes(comment)); } if (metaInfo.isThumbnailAvailable()) { sb.append("<div class='b_briefcase_preview' style='width:200px; height:200px; background-image:url("); ubu.buildURI(sb, new String[] { PARAM_SERV_THUMBNAIL }, new String[] { "x" }, pathAndName, AJAXFlags.MODE_NORMAL); sb.append("); background-repeat:no-repeat; background-position:50% 50%;'>&nbsp;</div>"); } String author = StringHelper.escapeHtml(metaInfo.getAuthor()); if (StringHelper.containsNonWhitespace(author)) { sb.append("<p>").append(Formatter.escapeDoubleQuotes(translator.translate("mf.author"))); sb.append(": ").append(Formatter.escapeDoubleQuotes(author)).append("</p>"); } sb.append("</div>\""); } sb.append(">"); // name if (isAbstract) sb.append("<i>"); sb.append(name); if (isAbstract) sb.append("</i>"); sb.append("</a></td><td>"); // filesize if (!isContainer) { // append filesize sb.append(StringHelper.formatMemory(leaf.getSize())); } sb.append("</td><td>"); // type if (isContainer) { sb.append(translator.translate("Directory")); } else { if (type.equals(TYPE_FILE)) { sb.append(translator.translate("UnknownFile")); } else { sb.append(type.toUpperCase()); sb.append(" ").append(translator.translate(TYPE_FILE)); } } sb.append("</td><td>"); if (canContainerVersion) { if (canVersion) sb.append(versions.getRevisionNr()); sb.append("</td><td>"); } // last modified long lastModified = child.getLastModified(); if (lastModified != VFSConstants.UNDEFINED) sb.append(DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT, translator.getLocale()).format(new Date(lastModified))); else sb.append("-"); sb.append("</td><td>"); // locked if (metaInfo != null) { if (metaInfo.isLocked()) { Identity lockedBy = identityMap.get(metaInfo.getLockedBy()); if (lockedBy == null) { lockedBy = metaInfo.getLockedByIdentity(); if (lockedBy != null) { identityMap.put(lockedBy.getKey(), lockedBy); } } sb.append("<span class=\"b_small_icon b_briefcase_locked_file_icon\" ext:qtip=\""); if (lockedBy != null) { String firstName = getUserService().getUserProperty(lockedBy.getUser(), UserConstants.FIRSTNAME, translator.getLocale()); String lastName = getUserService().getUserProperty(lockedBy.getUser(), UserConstants.LASTNAME, translator.getLocale()); String date = ""; if (metaInfo.getLockedDate() != null) { date = fc.getDateTimeFormat().format(metaInfo.getLockedDate()); } sb.append(translator.translate("Locked", new String[] { firstName, lastName, date })); } sb.append("\">&#160;</span>"); } } sb.append("</td><td class=\"b_last_child\">"); // Info link if (canWrite) { sb.append("<table class=\"b_briefcase_actions\"><tr><td>"); // versions action if (canVersion) { // Versions link if (lockedForUser) { sb.append("<span ext:qtip=\"").append(StringEscapeUtils.escapeHtml(translator.translate("versions"))) .append("\" class=\" b_small_icon b_briefcase_versions_dis_icon\">&#160;</span>"); } else { sb.append("<a href=\""); ubu.buildURI(sb, new String[] { PARAM_VERID }, new String[] { Integer.toString(pos) }, iframePostEnabled ? AJAXFlags.MODE_TOBGIFRAME : AJAXFlags.MODE_NORMAL); sb.append("\""); if (iframePostEnabled) { // add ajax iframe target StringOutput so = new StringOutput(); ubu.appendTarget(so); sb.append(so.toString()); } sb.append(" ext:qtip=\"").append(StringEscapeUtils.escapeHtml(translator.translate("versions"))) .append("\" class=\" b_small_icon b_briefcase_versions_icon\">&#160;</a>"); } } else { sb.append("<span class=\"b_small_icon b_briefcase_noicon\">&#160;</span>"); } sb.append("</td><td>"); // show content edit action if HTML, text or CSS file which is not locked String nameLowerCase = name.toLowerCase(); if (!isContainer && !lockedForUser && (nameLowerCase.endsWith(".html") || nameLowerCase.endsWith(".htm") || nameLowerCase.endsWith(".txt") || nameLowerCase.endsWith(".css"))) { sb.append("<a href=\""); ubu.buildURI(sb, new String[] { PARAM_CONTENTEDITID }, new String[] { Integer.toString(pos) }, iframePostEnabled ? AJAXFlags.MODE_TOBGIFRAME : AJAXFlags.MODE_NORMAL); sb.append("\""); if (iframePostEnabled) { // add ajax iframe target StringOutput so = new StringOutput(); ubu.appendTarget(so); sb.append(so.toString()); } sb.append(" ext:qtip=\"").append(StringEscapeUtils.escapeHtml(translator.translate("editor"))); sb.append("\" class=\"b_small_icon b_briefcase_edit_file_icon\">&#160;</a>"); } else { sb.append("<span class=\"b_small_icon b_briefcase_noicon\">&#160;</span>"); } sb.append("</td><td>"); // eportfolio collect action // get a link for adding a file to ePortfolio, if file-owner is the current user if (canAddToEPortfolio && !isContainer) { if (metaInfo != null) { Identity author = metaInfo.getAuthorIdentity(); if (author != null && fc.getIdentityEnvironnement().getIdentity().getKey().equals(author.getKey())) { sb.append("<a href=\""); ubu.buildURI(sb, new String[] { PARAM_EPORT }, new String[] { Integer.toString(pos) }, iframePostEnabled ? AJAXFlags.MODE_TOBGIFRAME : AJAXFlags.MODE_NORMAL); sb.append("\""); if (iframePostEnabled) { // add ajax iframe target StringOutput so = new StringOutput(); ubu.appendTarget(so); sb.append(so.toString()); } sb.append(" ext:qtip=\"").append(StringEscapeUtils.escapeHtml(translator.translate("eportfolio"))) .append("\" class=\" b_small_icon b_eportfolio_add\">&#160;</a>"); } else { sb.append("<span class=\"b_small_icon b_briefcase_noicon\">&#160;</span>"); } } } else { sb.append("<span class=\"b_small_icon b_briefcase_noicon\">&#160;</span>"); } sb.append("</td><td>"); // meta edit action (rename etc) boolean canMetaData = FileMetadataInfoHelper.canMetaInfo(child); if (canMetaData) { if (lockedForUser) { // Metadata link disabled... sb.append("<span ext:qtip=\"").append(StringEscapeUtils.escapeHtml(translator.translate("edit"))) .append("\" class=\" b_small_icon b_briefcase_edit_meta_dis_icon\">&#160;</span>"); } else { // Metadata edit link... also handles rename for non-OlatRelPathImpls sb.append("<a href=\""); ubu.buildURI(sb, new String[] { PARAM_EDTID }, new String[] { Integer.toString(pos) }, iframePostEnabled ? AJAXFlags.MODE_TOBGIFRAME : AJAXFlags.MODE_NORMAL); sb.append("\""); if (iframePostEnabled) { // add ajax iframe target StringOutput so = new StringOutput(); ubu.appendTarget(so); sb.append(so.toString()); } sb.append(" ext:qtip=\"").append(StringEscapeUtils.escapeHtml(translator.translate("mf.edit"))) .append("\" class=\" b_small_icon b_briefcase_edit_meta_icon\">&#160;</a>"); } } else { sb.append("<span class=\"b_small_icon b_briefcase_noicon\">&#160;</span>"); } sb.append("</td></tr></table>"); } else { sb.append("&nbsp;"); } sb.append("</td></tr>"); } private UserService getUserService() { return CoreSpringFactory.getBean(UserService.class); } }
/* * This file is part of the Jikes RVM project (http://jikesrvm.org). * * This file is licensed to You under the Common Public License (CPL); * You may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.opensource.org/licenses/cpl1.0.php * * See the COPYRIGHT.txt file distributed with this work for information * regarding copyright ownership. */ package org.mmtk.utility.heap; import org.mmtk.utility.alloc.EmbeddedMetaData; import org.mmtk.utility.options.Options; import org.mmtk.policy.Space; import org.mmtk.utility.Conversions; import org.mmtk.utility.Constants; import org.mmtk.vm.VM; import org.vmmagic.pragma.*; import org.vmmagic.unboxed.*; /** * This class manages the allocation of pages for a space. When a * page is requested by the space both a page budget and the use of * virtual address space are checked. If the request for space can't * be satisfied (for either reason) a GC may be triggered.<p> */ @Uninterruptible public final class MonotonePageResource extends PageResource implements Constants { /**************************************************************************** * * Instance variables */ private Address cursor; private Address sentinel; private final int metaDataPagesPerRegion; private Address currentChunk = Address.zero(); /** * Constructor * * Contiguous monotone resource. The address range is pre-defined at * initialization time and is immutable. * * @param pageBudget The budget of pages available to this memory * manager before it must poll the collector. * @param space The space to which this resource is attached * @param start The start of the address range allocated to this resource * @param bytes The size of the address rage allocated to this resource * @param metaDataPagesPerRegion The number of pages of meta data * that are embedded in each region. */ public MonotonePageResource(int pageBudget, Space space, Address start, Extent bytes, int metaDataPagesPerRegion) { super(pageBudget, space, start); this.cursor = start; this.sentinel = start.plus(bytes); this.metaDataPagesPerRegion = metaDataPagesPerRegion; } /** * Constructor * * Discontiguous monotone resource. The address range is <i>not</i> * pre-defined at initialization time and is dynamically defined to * be some set of pages, according to demand and availability. * * CURRENTLY UNIMPLEMENTED * * @param pageBudget The budget of pages available to this memory * manager before it must poll the collector. * @param space The space to which this resource is attached * @param metaDataPagesPerRegion The number of pages of meta data * that are embedded in each region. */ public MonotonePageResource(int pageBudget, Space space, int metaDataPagesPerRegion) { super(pageBudget, space); /* unimplemented */ this.start = Address.zero(); this.cursor = Address.zero(); this.sentinel = Address.zero(); this.metaDataPagesPerRegion = metaDataPagesPerRegion; } /** * Return the number of available physical pages for this resource. * This includes all pages currently unused by this resource's page * cursor. If the resource is using discontiguous space it also includes * currently unassigned discontiguous space.<p> * * Note: This just considers physical pages (ie virtual memory pages * allocated for use by this resource). This calculation is orthogonal * to and does not consider any restrictions on the number of pages * this resource may actually use at any time (ie the number of * committed and reserved pages).<p> * * Note: The calculation is made on the assumption that all space that * could be assigned to this resource would be assigned to this resource * (ie the unused discontiguous space could just as likely be assigned * to another competing resource). * * @return The number of available physical pages for this resource. */ @Override public int getAvailablePhysicalPages() { int rtn = Conversions.bytesToPages(sentinel.diff(cursor)); if (!contiguous) rtn += Map.getAvailableDiscontiguousChunks()*Space.PAGES_IN_CHUNK; return rtn; } /** * Allocate <code>pages</code> pages from this resource. Simply * bump the cursor, and fail if we hit the sentinel.<p> * * If the request can be satisfied, then ensure the pages are * mmpapped and zeroed before returning the address of the start of * the region. If the request cannot be satisfied, return zero. * * @param requestPages The number of pages to be allocated. * @return The start of the first page if successful, zero on * failure. */ @Inline protected Address allocPages(int requestPages) { int pages = requestPages; boolean newChunk = false; lock(); Address rtn = cursor; if (Space.chunkAlign(rtn, true).NE(currentChunk)) { newChunk = true; currentChunk = Space.chunkAlign(rtn, true); } if (metaDataPagesPerRegion != 0) { /* adjust allocation for metadata */ Address regionStart = getRegionStart(cursor.plus(Conversions.pagesToBytes(pages))); Offset regionDelta = regionStart.diff(cursor); if (regionDelta.sGE(Offset.zero())) { /* start new region, so adjust pages and return address accordingly */ pages += Conversions.bytesToPages(regionDelta) + metaDataPagesPerRegion; rtn = regionStart.plus(Conversions.pagesToBytes(metaDataPagesPerRegion)); } } Extent bytes = Conversions.pagesToBytes(pages); Address tmp = cursor.plus(bytes); if (!contiguous && tmp.GT(sentinel)) { /* we're out of virtual memory within our discontiguous region, so ask for more */ int requiredChunks = Space.requiredChunks(pages); start = space.growDiscontiguousSpace(requiredChunks); cursor = start; sentinel = cursor.plus(start.isZero() ? 0 : requiredChunks<<Space.LOG_BYTES_IN_CHUNK); rtn = cursor; tmp = cursor.plus(bytes); newChunk = true; } if (VM.VERIFY_ASSERTIONS) VM.assertions._assert(rtn.GE(cursor) && rtn.LT(cursor.plus(bytes))); if (tmp.GT(sentinel)) { unlock(); return Address.zero(); } else { Address old = cursor; cursor = tmp; commitPages(requestPages, pages); space.growSpace(old, bytes, newChunk); unlock(); Mmapper.ensureMapped(old, pages); VM.memory.zero(old, bytes); return rtn; } } /** * Adjust a page request to include metadata requirements, if any.<p> * * In this case we simply report the expected page cost. We can't use * worst case here because we would exhaust our budget every time. * * @param pages The size of the pending allocation in pages * @return The number of required pages, inclusive of any metadata */ public int adjustForMetaData(int pages) { return (metaDataPagesPerRegion * pages) / EmbeddedMetaData.PAGES_IN_REGION; } /** * Adjust a page request to include metadata requirements, if any.<p> * * Note that there could be a race here, with multiple threads each * adjusting their request on account of the same single metadata * region. This should not be harmful, as the failing requests will * just retry, and if multiple requests succeed, only one of them * will actually have the metadata accounted against it, the others * will simply have more space than they originally requested. * * @param pages The size of the pending allocation in pages * @param begin The start address of the region assigned to this pending * request * @return The number of required pages, inclusive of any metadata */ public int adjustForMetaData(int pages, Address begin) { if (getRegionStart(begin).plus(metaDataPagesPerRegion<<LOG_BYTES_IN_PAGE).EQ(begin)) pages += metaDataPagesPerRegion; return pages; } private static Address getRegionStart(Address addr) { return addr.toWord().and(Word.fromIntSignExtend(EmbeddedMetaData.BYTES_IN_REGION - 1).not()).toAddress(); } /** * Reset this page resource, freeing all pages and resetting * reserved and committed pages appropriately. */ @Inline public void reset() { lock(); reserved = 0; committed = 0; releasePages(); unlock(); } /** * Notify that several pages are no longer in use. * * @param pages The number of pages */ public void unusePages(int pages) { lock(); reserved -= pages; committed -= pages; unlock(); } /** * Notify that previously unused pages are in use again. * * @param pages The number of pages */ public void reusePages(int pages) { lock(); reserved += pages; committed += pages; unlock(); } /** * Release all pages associated with this page resource, optionally * zeroing on release and optionally memory protecting on release. */ @Inline private void releasePages() { Address first = start; do { Extent bytes = cursor.diff(start).toWord().toExtent(); releasePages(start, bytes); cursor = start; } while (!contiguous && moveToNextChunk()); if (!contiguous) { sentinel = Address.zero(); Map.freeAllChunks(first); } } /** * Adjust the start and cursor fields to point to the next chunk * in the linked list of chunks tied down by this page resource. * * @return True if we moved to the next chunk; false if we hit the * end of the linked list. */ private boolean moveToNextChunk() { start = Map.getNextContiguousRegion(start); if (start.isZero()) return false; else { cursor = start.plus(Map.getContiguousRegionSize(start)); return true; } } /** * Release a range of pages associated with this page resource, optionally * zeroing on release and optionally memory protecting on release. */ @Inline private void releasePages(Address first, Extent bytes) { int pages = Conversions.bytesToPages(bytes); if (VM.VERIFY_ASSERTIONS) VM.assertions._assert(bytes.EQ(Conversions.pagesToBytes(pages))); if (ZERO_ON_RELEASE) VM.memory.zero(first, bytes); if (Options.protectOnRelease.getValue()) Mmapper.protect(first, pages); } }
package com.uc4.deploymentservice; import java.rmi.RemoteException; import java.util.Calendar; import com.uc4.deploymentservice.binding.DeploymentServiceLocator; public class DeploymentServiceProxy implements com.uc4.deploymentservice.DeploymentService { private String _endpoint = null; private com.uc4.deploymentservice.DeploymentService deploymentService = null; public DeploymentServiceProxy() { _initDeploymentServiceProxy(); } public DeploymentServiceProxy(String endpoint) { _endpoint = endpoint; _initDeploymentServiceProxy(); } private void _initDeploymentServiceProxy() { try { deploymentService = (new DeploymentServiceLocator()).getBasicHttpBinding_DeploymentService(); if (deploymentService != null) { if (_endpoint != null) ((javax.xml.rpc.Stub)deploymentService)._setProperty("javax.xml.rpc.service.endpoint.address", _endpoint); else _endpoint = (String)((javax.xml.rpc.Stub)deploymentService)._getProperty("javax.xml.rpc.service.endpoint.address"); } } catch (javax.xml.rpc.ServiceException serviceException) {} } public String getEndpoint() { return _endpoint; } public void setEndpoint(String endpoint) { _endpoint = endpoint; if (deploymentService != null) ((javax.xml.rpc.Stub)deploymentService)._setProperty("javax.xml.rpc.service.endpoint.address", _endpoint); } public com.uc4.deploymentservice.DeploymentService getDeploymentService_PortType() { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService; } @Override public String getWebserviceVersion() throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.getWebserviceVersion(); } @Override public ExecutionWorkflowResult executeApplicationWorkflow(String user, String pwd, String workflowName, String appName, String packageName, String profileName, Calendar startDate, String queueName, Boolean needsManualStart, String manualConfirmer, InstallationMode installationMode, DynamicProperty[] properties) throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.executeApplicationWorkflow(user, pwd, workflowName, appName, packageName, profileName, startDate, queueName, needsManualStart, manualConfirmer, installationMode, properties); } @Override public ExecutionWorkflowResult executeGeneralWorkflow(String user, String pwd, String workflowName, Calendar startDate, String queueName, Boolean needsManualStart, String manualConfirmer, DynamicProperty[] properties) throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.executeGeneralWorkflow(user, pwd, workflowName, startDate, queueName, needsManualStart, manualConfirmer, properties); } @Override public RunQueueResult runQueue(String triggerAuthenticationKey, String queueName) throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.runQueue(triggerAuthenticationKey, queueName); } @Override public ObjectsCreationResult createActivitiesFromTemplate(String user, String pwd, String activityTemplate, String templateActivity, Calendar plannedStart, String owner, String folder) throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.createActivitiesFromTemplate(user, pwd, activityTemplate, templateActivity, plannedStart, owner, folder); } @Override public ObjectsCreationResult createDeploymentTargetsFromTemplate(String user, String pwd, String templateTarget, String[] targetNames, String owner, String folder) throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.createDeploymentTargetsFromTemplate(user, pwd, templateTarget, targetNames, owner, folder); } @Override public TypePermissionsResult getTypePermissions(String user, String secret, String technicalName) throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.getTypePermissions(user, secret, technicalName); } @Override public GetPropertiesResult getComponentProperties(String user, String secret, String applicationName, String componentName) throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.getComponentProperties(user, secret, applicationName, componentName); } @Override public ActionResult addDeploymentTargetSnapshot(String user, String secret, String environmentSnapshotGuid, Integer processRunId, String guid, String hash, String variables, Long componentId, Long deploymentTargetId, String snapshotType) throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.addDeploymentTargetSnapshot(user, secret, environmentSnapshotGuid, processRunId, guid, hash, variables, componentId, deploymentTargetId, snapshotType); } @Override public ActionResult addDeploymentTargetSnapshotReport(String user, String secret, Long reportId, Integer processRunId, String snapshotGuid, String reportType, String format, String compression, String xsltName, String status, String data) throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.addDeploymentTargetSnapshotReport(user, secret, reportId, processRunId, snapshotGuid, reportType, format, compression, xsltName, status, data); } @Override public ActionResult uploadDetailCompareFile(String user, String secret, Integer processRunId, String data) throws RemoteException { // TODO Auto-generated method stub if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.uploadDetailCompareFile(user, secret, processRunId, data); } @Override public EnvironmentSnapshotValidationResult validateEnvironmentSnapshots( String user, String secret, long[] environmentSnapshotIds) throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.validateEnvironmentSnapshots(user, secret, environmentSnapshotIds); } @Override public Long getClientSettings() throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.getClientSettings(); } @Override public DeploymentDescriptorResult getDeploymentDescriptor(String user, String secret, Integer runID) throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.getDeploymentDescriptor(user, secret, runID); } @Override public ApprovalRuleDefinitionsResult getApprovalRuleDefinitions(String language) throws RemoteException { if (deploymentService == null) _initDeploymentServiceProxy(); return deploymentService.getApprovalRuleDefinitions(language); } }
/* * Copyright (c) 2019 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.api.services.samples.authorizedbuyers.realtimebidding; import com.google.api.client.http.HttpRequestInitializer; import com.google.api.services.realtimebidding.v1.model.*; import com.google.auth.http.HttpCredentialsAdapter; import com.google.auth.oauth2.GoogleCredentials; import com.google.auth.oauth2.ServiceAccountCredentials; import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; import com.google.api.client.http.HttpTransport; import com.google.api.client.json.JsonFactory; import com.google.api.client.json.jackson2.JacksonFactory; import com.google.api.services.pubsub.Pubsub; import com.google.api.services.pubsub.PubsubScopes; import com.google.api.services.realtimebidding.v1.RealTimeBidding; import com.google.api.services.realtimebidding.v1.RealTimeBiddingScopes; import java.io.FileInputStream; import java.io.IOException; import java.security.GeneralSecurityException; import java.util.HashSet; import java.util.List; import java.util.Set; import org.joda.time.LocalDate; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; /** * Utilities used by the Authorized Buyers Real-time Bidding API samples. */ public class Utils { /** * Specify the name of your application. If the application name is {@code null} or blank, the * application will log a warning. Suggested format is "MyCompany-ProductName/1.0". */ private static final String APPLICATION_NAME = ""; /** Full path to JSON Key file - include file name */ private static final java.io.File JSON_FILE = new java.io.File("INSERT_PATH_TO_JSON_FILE"); /** * Global instance of a DateTimeFormatter used to parse LocalDate instances and convert them to * String. */ private static final DateTimeFormatter dateFormatter = DateTimeFormat.forPattern("Y/M/d"); /** Global instance of the JSON factory. */ private static final JsonFactory JSON_FACTORY = JacksonFactory.getDefaultInstance(); /** * Global instance of the maximum page size, which will be the default page size for samples with * pagination. */ private static final Integer MAXIMUM_PAGE_SIZE = 50; /** Authorizes the application to access the user's protected data. * * @throws IOException if the {@code JSON_FILE} can not be read. * @return An instantiated GoogleCredentials instance. */ private static GoogleCredentials authorize() throws IOException { GoogleCredentials credentials; try (FileInputStream serviceAccountStream = new FileInputStream((JSON_FILE))) { Set<String> scopes = new HashSet<>(RealTimeBiddingScopes.all()); scopes.add(PubsubScopes.PUBSUB); credentials = ServiceAccountCredentials .fromStream(serviceAccountStream) .createScoped(scopes); } return credentials; } /** * Converts a {@code Date} instance into a human-readable String format. * * @param date a {@code Date} instance. * @return A human-readable {@code String} representation of the given {@code Date} instance. */ public static String convertDateToString(Date date) { return String.format("%d/%02d/%02d", date.getYear(), date.getMonth(), date.getDay()); } /** * Converts a {@code LocalDate} instance to a corresponding {@code Date} instance. * * @param date A {code LocalDate} instance to be converted to the {@code Date} type used by the * Real-time Bidding API client library. * @return An instantiated {@code Date} instance. */ public static Date convertJodaLocalDateToRTBDate(LocalDate date) { return new Date() .setDay(date.getDayOfMonth()) .setMonth(date.getMonthOfYear()) .setYear(date.getYear()); } /** * Retrieve a {@code DateTimeFormatter} instance used to parse and serialize {@code LocalDate}. * * @return An initialized {@code DateTimeFormatter} instance. */ public static DateTimeFormatter getDateTimeFormatterForLocalDate() { return dateFormatter; } /** * Performs all necessary setup steps for running requests against the Google Cloud Pubsub API. * * @return An initialized {@code Pubsub} service object. */ public static Pubsub getPubsubClient() throws IOException, GeneralSecurityException { GoogleCredentials credentials = authorize(); HttpRequestInitializer requestInitializer = new HttpCredentialsAdapter(credentials); HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport(); return new Pubsub.Builder( httpTransport, JSON_FACTORY, requestInitializer) .setApplicationName(APPLICATION_NAME).build(); } /** * Retrieve the default maximum page size. * * @return An Integer representing the default maximum page size for samples with pagination. */ public static Integer getMaximumPageSize() { return MAXIMUM_PAGE_SIZE; } /** * Performs all necessary setup steps for running requests against the Real-time Bidding API. * * @return An initialized RealTimeBidding service object. */ public static RealTimeBidding getRealTimeBiddingClient() throws IOException, GeneralSecurityException { GoogleCredentials credentials = authorize(); HttpRequestInitializer requestInitializer = new HttpCredentialsAdapter(credentials); HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport(); return new RealTimeBidding.Builder( httpTransport, JSON_FACTORY, requestInitializer) .setApplicationName(APPLICATION_NAME).build(); } /** * Prints a {@code Bidder} instance in a human-readable format. */ public static void printBidder(Bidder bidder) { System.out.printf("* Bidder name: %s\n", bidder.getName()); String cookieMatchingUrl = bidder.getCookieMatchingUrl(); if (cookieMatchingUrl != null) { System.out.printf("\t- Cookie Matching URL: %s\n", cookieMatchingUrl); } String cookieMatchingNetworkId = bidder.getCookieMatchingNetworkId(); if (cookieMatchingNetworkId != null) { System.out.printf("\t- Cookie Matching Network ID: %s\n", cookieMatchingNetworkId); } Boolean bypassNonGuaranteedDealsPretargeting = bidder.getBypassNonguaranteedDealsPretargeting(); if (bypassNonGuaranteedDealsPretargeting != null) { System.out.printf("\t- Bypass Non-Guaranteed Deals Pretargeting: %s\n", bypassNonGuaranteedDealsPretargeting); } String dealsBillingId = bidder.getDealsBillingId(); if (dealsBillingId != null) { System.out.printf("\t- Deals ID: %s\n", dealsBillingId); } } /** * Prints a {@code Buyer} instance in a human-readable format. */ public static void printBuyer(Buyer buyer) { System.out.printf("* Buyer name: %s\n", buyer.getName()); String displayName = buyer.getDisplayName(); if (displayName != null) { System.out.printf("\t- Display name: %s\n", displayName); } String bidder = buyer.getBidder(); if (bidder != null) { System.out.printf("\t- Bidder: %s\n", bidder); } Long activeCreativeCount = buyer.getActiveCreativeCount(); if (activeCreativeCount != null) { System.out.printf("\t- Active creative count: %s\n", activeCreativeCount); } Long maximumActiveCreativeCount = buyer.getMaximumActiveCreativeCount(); if (maximumActiveCreativeCount != null) { System.out.printf("\t- Maximum active creative count: %s\n", maximumActiveCreativeCount); } List<String> billingIds = buyer.getBillingIds(); if (billingIds != null && !billingIds.isEmpty()) { System.out.println("\t- Billing IDs:"); for (String billingId : billingIds) { System.out.printf("\t\t%s\n", billingId); } } } /** * Prints a {@code Creative} instance in a human-readable format. */ public static void printCreative(Creative creative) { System.out.printf("* Creative name: %s\n", creative.getName()); String advertiserName = creative.getAdvertiserName(); if (advertiserName != null) { System.out.printf("\t- Advertiser name: %s\n", advertiserName); } Integer version = creative.getVersion(); if (version != null) { System.out.printf("\t- Version: %d\n", version); } System.out.printf("\t- Creative format: %s\n", creative.getCreativeFormat()); CreativeServingDecision servingDecision = creative.getCreativeServingDecision(); if (servingDecision != null) { System.out.println("\t- Creative serving decision"); System.out.printf("\t\t- Deals policy compliance status: %s\n", servingDecision.getDealsPolicyCompliance().getStatus()); System.out.printf("\t\t- Network policy compliance status: %s\n", servingDecision.getNetworkPolicyCompliance().getStatus()); System.out.printf("\t\t- Platform policy compliance status: %s\n", servingDecision.getPlatformPolicyCompliance().getStatus()); System.out.printf("\t\t- China policy compliance status: %s\n", servingDecision.getChinaPolicyCompliance().getStatus()); System.out.printf("\t\t- Russia policy compliance status: %s\n", servingDecision.getRussiaPolicyCompliance().getStatus()); } List<String> declaredClickThroughUrls = creative.getDeclaredClickThroughUrls(); if (declaredClickThroughUrls != null && !declaredClickThroughUrls.isEmpty()) { System.out.println("\t- Declared click-through URLs:"); for (String declaredClickThroughUrl : declaredClickThroughUrls) { System.out.printf("\t\t%s\n", declaredClickThroughUrl); } } List<String> declaredAttributes = creative.getDeclaredAttributes(); if (declaredAttributes != null && !declaredAttributes.isEmpty()) { System.out.println("\t- Declared attributes:"); for (String declaredAttribute : declaredAttributes) { System.out.printf("\t\t%s\n", declaredAttribute); } } List<Integer> declaredVendorIds = creative.getDeclaredVendorIds(); if (declaredVendorIds != null && !declaredVendorIds.isEmpty()) { System.out.println("\t- Declared vendor IDs:"); for (Integer declaredVendorId : declaredVendorIds) { System.out.printf("\t\t%d\n", declaredVendorId); } } List<String> declaredRestrictedCategories = creative.getDeclaredRestrictedCategories(); if (declaredRestrictedCategories != null && !declaredRestrictedCategories.isEmpty()) { System.out.println("\t- Declared restricted categories:"); for (String declaredRestrictedCategory : declaredRestrictedCategories) { System.out.printf("\t\t%s\n", declaredRestrictedCategory); } } HtmlContent htmlContent = creative.getHtml(); if (htmlContent != null) { System.out.println("\t- HTML creative contents:"); System.out.printf("\t\tSnippet: %s\n", htmlContent.getSnippet()); System.out.printf("\t\tHeight: %d\n", htmlContent.getHeight()); System.out.printf("\t\tSnippet: %d\n", htmlContent.getWidth()); } NativeContent nativeContent = creative.getNative(); if (nativeContent != null) { System.out.println("\t- Native creative contents:"); System.out.printf("\t\tHeadline: %s\n", nativeContent.getHeadline()); System.out.printf("\t\tBody: %s\n", nativeContent.getBody()); System.out.printf("\t\tCallToAction: %s\n", nativeContent.getCallToAction()); System.out.printf("\t\tAdvertiser name: %s\n", nativeContent.getAdvertiserName()); System.out.printf("\t\tStar rating: %f\n", nativeContent.getStarRating()); System.out.printf("\t\tClick link URL: %s\n", nativeContent.getClickLinkUrl()); System.out.printf("\t\tClick tracking URL: %s\n", nativeContent.getClickTrackingUrl()); System.out.printf("\t\tPrice display text: %s\n", nativeContent.getPriceDisplayText()); Image image = nativeContent.getImage(); if (image != null) { System.out.println("\t\tImage contents:"); System.out.printf("\t\t\tURL: %s\n", image.getUrl()); System.out.printf("\t\t\tHeight: %d\n", image.getHeight()); System.out.printf("\t\t\tWidth: %d\n", image.getWidth()); } Image logo = nativeContent.getLogo(); if (logo != null) { System.out.println("\t\tLogo contents:"); System.out.printf("\t\t\tURL: %s\n", logo.getUrl()); System.out.printf("\t\t\tHeight: %d\n", logo.getHeight()); System.out.printf("\t\t\tWidth: %d\n", logo.getWidth()); } Image appIcon = nativeContent.getAppIcon(); if (appIcon != null) { System.out.println("\t\tApp icon contents:"); System.out.printf("\t\t\tURL: %s\n", appIcon.getUrl()); System.out.printf("\t\t\tHeight: %d\n", appIcon.getHeight()); System.out.printf("\t\t\tWidth: %d\n", appIcon.getWidth()); } } VideoContent videoContent = creative.getVideo(); if (videoContent != null) { System.out.println("\t- Video creative contents:"); String videoUrl = videoContent.getVideoUrl(); if (videoUrl != null) { System.out.printf("\t\tVideo URL: %s\n", videoUrl); } String videoVastXML = videoContent.getVideoVastXml(); if (videoVastXML != null) { System.out.printf("\t\tVideo VAST XML: %s\n", videoVastXML); } } } /** * Prints a {@code Endpoint} instance in a human-readable format. */ public static void printEndpoint(Endpoint endpoint) { System.out.printf("* Endpoint name: %s\n", endpoint.getName()); String url = endpoint.getUrl(); if (url != null) { System.out.printf("\t- URL: %s\n", url); } Long maximumQps = endpoint.getMaximumQps(); if (maximumQps != null) { System.out.printf("\t- Maximum QPS: %d\n", maximumQps); } String tradingLocation = endpoint.getTradingLocation(); if (tradingLocation != null) { System.out.printf("\t- Trading location: %s\n", tradingLocation); } String bidProtocol = endpoint.getBidProtocol(); if (bidProtocol != null) { System.out.printf("\t- Bid protocol: %s\n", bidProtocol); } } /** * Prints a {@code PretargetingConfig} instance in a human-readable format. */ public static void printPretargetingConfig(PretargetingConfig pretargetingConfig) { System.out.printf("* Pretargeting configuration name: %s\n", pretargetingConfig.getName()); System.out.printf("\t- Display name: %s\n", pretargetingConfig.getDisplayName()); System.out.printf("\t- Billing ID: %s\n", pretargetingConfig.getBillingId()); System.out.printf("\t- State: %s\n", pretargetingConfig.getState()); Long maximumQps = pretargetingConfig.getMaximumQps(); if (maximumQps != null) { System.out.printf("\t- Maximum QPS: %s\n", maximumQps); } String interstitialTargeting = pretargetingConfig.getInterstitialTargeting(); if (interstitialTargeting != null) { System.out.printf("\t- Interstitial targeting: %s\n", interstitialTargeting); } Integer minimumViewabilityDecile = pretargetingConfig.getMinimumViewabilityDecile(); if (minimumViewabilityDecile != null) { System.out.printf("\t- Minimum viewability decile: %s\n", minimumViewabilityDecile); } List<String> includedFormats = pretargetingConfig.getIncludedFormats(); if (includedFormats != null && !includedFormats.isEmpty()) { System.out.println("\t- Included formats:"); for (String includedFormat : includedFormats) { System.out.printf("\t\t%s\n", includedFormat); } } NumericTargetingDimension geoTargeting = pretargetingConfig.getGeoTargeting(); if (geoTargeting != null) { System.out.println("\t- Geo targeting:"); List<Long> includedIds = geoTargeting.getIncludedIds(); if (includedIds != null && !includedIds.isEmpty()) { System.out.println("\t\t- Included geo IDs:"); for (Long id : includedIds) { System.out.printf("\t\t\t%s\n", id); } } List<Long> excludedIds = geoTargeting.getExcludedIds(); if (excludedIds != null && !excludedIds.isEmpty()) { System.out.println("\t\t- Excluded geo IDs:"); for (Long id : excludedIds) { System.out.printf("\t\t\t%s\n", id); } } } List<Long> invalidGeoIds = pretargetingConfig.getInvalidGeoIds(); if (invalidGeoIds != null && !invalidGeoIds.isEmpty()) { System.out.println("\t- Invalid geo IDs:"); for (Long id : invalidGeoIds) { System.out.printf("\t\t%s\n", id); } } NumericTargetingDimension userListTargeting = pretargetingConfig.getUserListTargeting(); if (userListTargeting != null) { System.out.println("\t- User list targeting:"); List<Long> includedIds = userListTargeting.getIncludedIds(); if (includedIds != null && !includedIds.isEmpty()) { System.out.println("\t\t- Included user list IDs:"); for (Long id : includedIds) { System.out.printf("\t\t\t%s\n", id); } } List<Long> excludedIds = userListTargeting.getExcludedIds(); if (excludedIds != null && !excludedIds.isEmpty()) { System.out.println("\t\t- Excluded user list IDs:"); for (Long id : excludedIds) { System.out.printf("\t\t\t%s\n", id); } } } List<String> allowedUserTargetingModes = pretargetingConfig.getAllowedUserTargetingModes(); if (allowedUserTargetingModes != null && !allowedUserTargetingModes.isEmpty()) { System.out.println("\t- Allowed user targeting modes:"); for (String userTargetingMode : allowedUserTargetingModes) { System.out.printf("\t\t%s\n", userTargetingMode); } } List<Long> excludedContentLabelIds = pretargetingConfig.getExcludedContentLabelIds(); if (excludedContentLabelIds != null && !excludedContentLabelIds.isEmpty()) { System.out.println("\t- Excluded content label IDs:"); for (Long id : excludedContentLabelIds) { System.out.printf("\t\t%s\n", id); } } List<String> includedUserIdTypes = pretargetingConfig.getIncludedUserIdTypes(); if (includedUserIdTypes != null && !includedUserIdTypes.isEmpty()) { System.out.println("\t- Included user ID types:"); for (String userIdType : includedUserIdTypes) { System.out.printf("\t\t%s\n", userIdType); } } List<String> includedLanguages = pretargetingConfig.getIncludedLanguages(); if (includedLanguages != null && !includedLanguages.isEmpty()) { System.out.println("\t- Included languages:"); for (String language : includedLanguages) { System.out.printf("\t\t%s\n", language); } } List<Long> includedMobileOSIds = pretargetingConfig.getIncludedMobileOperatingSystemIds(); if (includedMobileOSIds != null && !includedMobileOSIds.isEmpty()) { System.out.println("\t- Included mobile operating system IDs:"); for (Long id : includedMobileOSIds) { System.out.printf("\t\t%s\n", id); } } NumericTargetingDimension verticalTargeting = pretargetingConfig.getVerticalTargeting(); if (verticalTargeting != null) { System.out.println("\t- Vertical targeting:"); List<Long> includedIds = verticalTargeting.getIncludedIds(); if (includedIds != null && !includedIds.isEmpty()) { System.out.println("\t\t- Included vertical IDs:"); for (Long id : includedIds) { System.out.printf("\t\t\t%s\n", id); } } List<Long> excludedIds = verticalTargeting.getExcludedIds(); if (excludedIds != null && !excludedIds.isEmpty()) { System.out.println("\t\t- Excluded vertical IDs:"); for (Long id : excludedIds) { System.out.printf("\t\t\t%s\n", id); } } } List<String> includedPlatforms = pretargetingConfig.getIncludedPlatforms(); if (includedPlatforms != null && !includedPlatforms.isEmpty()) { System.out.println("\t- Included platforms:"); for (String platform : includedPlatforms) { System.out.printf("\t\t%s\n", platform); } } List<CreativeDimensions> creativeDimensions = pretargetingConfig.getIncludedCreativeDimensions(); if (creativeDimensions != null && !creativeDimensions.isEmpty()) { System.out.println("\t- Included creative dimensions:"); for (CreativeDimensions dimensions : creativeDimensions) { System.out.printf("\t\tHeight: %s; Width: %s\n", dimensions.getHeight(), dimensions.getWidth()); } } List<String> includedEnvironments = pretargetingConfig.getIncludedEnvironments(); if (includedEnvironments != null && !includedEnvironments.isEmpty()) { System.out.println("\t- Included environments:"); for (String environment : includedEnvironments) { System.out.printf("\t\t%s\n", environment); } } StringTargetingDimension webTargeting = pretargetingConfig.getWebTargeting(); if (webTargeting != null) { System.out.println("\t- Web targeting:"); System.out.printf("\t\t- Targeting mode: %s\n", webTargeting.getTargetingMode()); System.out.println("\t\t- Site URLs:"); for (String siteUrl : webTargeting.getValues()) { System.out.printf("\t\t\t%s\n", siteUrl); } } AppTargeting appTargeting = pretargetingConfig.getAppTargeting(); if (appTargeting != null) { System.out.println("\t- App targeting:"); StringTargetingDimension mobileAppTargeting = appTargeting.getMobileAppTargeting(); if (mobileAppTargeting != null) { System.out.println("\t\t- Mobile app targeting:"); System.out.printf("\t\t\t- Targeting mode: %s\n", mobileAppTargeting.getTargetingMode()); System.out.println("\t\t\t- Mobile App IDs:"); for (String appId : mobileAppTargeting.getValues()) { System.out.printf("\t\t\t\t%s\n", appId); } } NumericTargetingDimension mobileAppCategoryTargeting = appTargeting.getMobileAppCategoryTargeting(); if (mobileAppCategoryTargeting != null) { System.out.println("\t\t- Mobile app category targeting:"); List<Long> includedIds = mobileAppCategoryTargeting.getIncludedIds(); if (includedIds != null && !includedIds.isEmpty()) { System.out.println("\t\t- Included mobile app category targeting IDs:"); for (Long id : includedIds) { System.out.printf("\t\t\t%s\n", id); } } List<Long> excludedIds = mobileAppCategoryTargeting.getExcludedIds(); if (excludedIds != null && !excludedIds.isEmpty()) { System.out.println("\t\t- Excluded mobile app category targeting IDs:"); for (Long id : excludedIds) { System.out.printf("\t\t\t%s\n", id); } } } } StringTargetingDimension publisherTargeting = pretargetingConfig.getPublisherTargeting(); if (publisherTargeting != null) { System.out.println("\t- Publisher targeting:"); System.out.printf("\t\t- Targeting mode: %s\n", publisherTargeting.getTargetingMode()); System.out.println("\t\t- Publisher IDs:"); for (String publisherId : publisherTargeting.getValues()) { System.out.printf("\t\t\t%s\n", publisherId); } } } /** * Prints a {@code UserList} instance in a human-readable format. */ public static void printUserList(UserList userList) { System.out.printf("* UserList name: '%s'\n", userList.getName()); String displayName = userList.getDisplayName(); if(displayName != null) { System.out.printf("\tUserList display name: '%s'\n", displayName); } String description = userList.getDescription(); if(description != null) { System.out.printf("\tUserList description: '%s'\n", description); } UrlRestriction urlRestriction = userList.getUrlRestriction(); if(urlRestriction != null) { System.out.println("\tURL Restriction:"); System.out.printf("\t\tURL: '%s'\n", urlRestriction.getUrl()); System.out.printf("\t\tRestriction Type: '%s'\n", urlRestriction.getRestrictionType()); Date startDate = urlRestriction.getStartDate(); if(startDate != null) { System.out.printf("\t\tStart Date: '%s'\n", convertDateToString(startDate)); } Date endDate = urlRestriction.getEndDate(); if(endDate != null) { System.out.printf("\t\tEnd Date: '%s'\n", convertDateToString(endDate)); } } System.out.printf("\tUserList status: '%s'\n", userList.getStatus()); System.out.printf("\tMembership duration days: %s\n\n", userList.getMembershipDurationDays()); } }
/* * Copyright WSO2 Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.apimgt.gateway.handlers.security; import org.apache.axis2.Constants; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.HttpStatus; import org.apache.synapse.ManagedLifecycle; import org.apache.synapse.Mediator; import org.apache.synapse.MessageContext; import org.apache.synapse.config.xml.rest.VersionStrategyFactory; import org.apache.synapse.core.SynapseEnvironment; import org.apache.synapse.core.axis2.Axis2MessageContext; import org.apache.synapse.rest.*; import org.apache.synapse.rest.dispatch.RESTDispatcher; import org.wso2.carbon.apimgt.gateway.APIMgtGatewayConstants; import org.wso2.carbon.apimgt.gateway.handlers.Utils; import org.wso2.carbon.apimgt.gateway.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.metrics.manager.MetricManager; import org.wso2.carbon.metrics.manager.Timer; import java.util.*; public class CORSRequestHandler extends AbstractHandler implements ManagedLifecycle { private static final Log log = LogFactory.getLog(CORSRequestHandler.class); private String apiImplementationType; private String allowHeaders; private String allowCredentials; private Set<String> allowedOrigins; private boolean initializeHeaderValues; private String allowedMethods; private boolean allowCredentialsEnabled; public void init(SynapseEnvironment synapseEnvironment) { if (log.isDebugEnabled()) { log.debug("Initializing CORSRequest Handler instance"); } if (ServiceReferenceHolder.getInstance().getApiManagerConfigurationService() != null) { initializeHeaders(); } } /** * This method used to Initialize header values * * @return true after Initialize the values */ void initializeHeaders() { if (allowHeaders == null) { allowHeaders = APIUtil.getAllowedHeaders(); } if (allowedOrigins == null) { String allowedOriginsList = APIUtil.getAllowedOrigins(); if (!allowedOriginsList.isEmpty()) { allowedOrigins = new HashSet<String>(Arrays.asList(allowedOriginsList.split(","))); } } if (allowCredentials == null) { allowCredentialsEnabled = APIUtil.isAllowCredentials(); } if (allowedMethods == null) { allowedMethods = APIUtil.getAllowedMethods(); } initializeHeaderValues = true; } public void destroy() { if (log.isDebugEnabled()) { log.debug("Destroying CORSRequest Handler instance"); } } public boolean handleRequest(MessageContext messageContext) { long executionStartTime = System.currentTimeMillis(); Timer timer = MetricManager.timer(org.wso2.carbon.metrics.manager.Level.INFO, MetricManager.name( APIConstants.METRICS_PREFIX, this.getClass().getSimpleName())); Timer.Context context = timer.start(); try { if (!initializeHeaderValues) { initializeHeaders(); } String apiContext = (String) messageContext.getProperty(RESTConstants.REST_API_CONTEXT); String apiVersion = (String) messageContext.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION); String apiName = (String) messageContext.getProperty(RESTConstants.SYNAPSE_REST_API); String httpMethod = (String) ((Axis2MessageContext) messageContext).getAxis2MessageContext(). getProperty(Constants.Configuration.HTTP_METHOD); API selectedApi = messageContext.getConfiguration().getAPI(apiName); Resource selectedResource = null; String subPath = null; String path = RESTUtils.getFullRequestPath(messageContext); if(selectedApi != null) { if (VersionStrategyFactory.TYPE_URL.equals(selectedApi.getVersionStrategy().getVersionType())) { subPath = path.substring( selectedApi.getContext().length() + selectedApi.getVersionStrategy().getVersion().length() + 1); } else { subPath = path.substring(selectedApi.getContext().length()); } } if ("".equals(subPath)) { subPath = "/"; } messageContext.setProperty(RESTConstants.REST_SUB_REQUEST_PATH, subPath); if(selectedApi != null){ Resource[] allAPIResources = selectedApi.getResources(); Set<Resource> acceptableResources = new HashSet<Resource>(); for(Resource resource : allAPIResources){ //If the requesting method is OPTIONS or if the Resource contains the requesting method if (RESTConstants.METHOD_OPTIONS.equals(httpMethod) || (resource.getMethods() != null && Arrays.asList(resource.getMethods()).contains(httpMethod))) { acceptableResources.add(resource); } } if (acceptableResources.size() > 0) { for (RESTDispatcher dispatcher : RESTUtils.getDispatchers()) { Resource resource = dispatcher.findResource(messageContext, acceptableResources); if (resource != null) { selectedResource = resource; break; } } } //If no acceptable resources are found else{ //We're going to send a 405 or a 404. Run the following logic to determine which. handleResourceNotFound(messageContext, Arrays.asList(allAPIResources)); return false; } //No matching resource found if(selectedResource == null){ //Respond with a 404 onResourceNotFoundError(messageContext, HttpStatus.SC_NOT_FOUND, APIMgtGatewayConstants.RESOURCE_NOT_FOUND_ERROR_MSG); return false; } } String resourceString = selectedResource.getDispatcherHelper().getString(); String resourceCacheKey = APIUtil .getResourceInfoDTOCacheKey(apiContext, apiVersion, resourceString, httpMethod); messageContext.setProperty(APIConstants.API_ELECTED_RESOURCE, resourceString); messageContext.setProperty(APIConstants.API_RESOURCE_CACHE_KEY, resourceCacheKey); //If this is an OPTIONS request if (APIConstants.SupportedHTTPVerbs.OPTIONS.name().equalsIgnoreCase(httpMethod)) { //If the OPTIONS method is explicity specified in the resource if (Arrays.asList(selectedResource.getMethods()).contains( APIConstants.SupportedHTTPVerbs.OPTIONS.name())) { //We will not handle the CORS headers, let the back-end do it. return true; } setCORSHeaders(messageContext, selectedResource); Mediator corsSequence = messageContext.getSequence(APIConstants.CORS_SEQUENCE_NAME); if (corsSequence != null) { corsSequence.mediate(messageContext); } Utils.send(messageContext, HttpStatus.SC_OK); return false; } else if (APIConstants.IMPLEMENTATION_TYPE_INLINE.equalsIgnoreCase(apiImplementationType)) { setCORSHeaders(messageContext, selectedResource); messageContext.getSequence(APIConstants.CORS_SEQUENCE_NAME).mediate(messageContext); } setCORSHeaders(messageContext, selectedResource); return true; } finally { context.stop(); } } public boolean handleResponse(MessageContext messageContext) { Mediator corsSequence = messageContext.getSequence(APIConstants.CORS_SEQUENCE_NAME); if (corsSequence != null) { corsSequence.mediate(messageContext); } return true; } private void handleResourceNotFound(MessageContext messageContext, List<Resource> allAPIResources) { Resource uriMatchingResource = null; for (RESTDispatcher dispatcher : RESTUtils.getDispatchers()) { uriMatchingResource = dispatcher.findResource(messageContext, allAPIResources); //If a resource with a matching URI was found. if (uriMatchingResource != null) { onResourceNotFoundError(messageContext, HttpStatus.SC_METHOD_NOT_ALLOWED, APIMgtGatewayConstants.METHOD_NOT_FOUND_ERROR_MSG); return; } } //If a resource with a matching URI was not found. if(uriMatchingResource == null){ //Respond with a 404. onResourceNotFoundError(messageContext, HttpStatus.SC_NOT_FOUND, APIMgtGatewayConstants.RESOURCE_NOT_FOUND_ERROR_MSG); return; } } private void onResourceNotFoundError(MessageContext messageContext, int statusCode, String errorMessage){ messageContext.setProperty(APIConstants.CUSTOM_HTTP_STATUS_CODE, statusCode); messageContext.setProperty(APIConstants.CUSTOM_ERROR_CODE, statusCode); messageContext.setProperty(APIConstants.CUSTOM_ERROR_MESSAGE, errorMessage); Mediator resourceMisMatchedSequence = messageContext.getSequence(RESTConstants.NO_MATCHING_RESOURCE_HANDLER); if (resourceMisMatchedSequence != null) { resourceMisMatchedSequence.mediate(messageContext); } } /** * This method used to set CORS headers into message context * * @param messageContext message context for set cors headers as properties * @param selectedResource resource according to the request */ public void setCORSHeaders(MessageContext messageContext, Resource selectedResource) { org.apache.axis2.context.MessageContext axis2MC = ((Axis2MessageContext) messageContext).getAxis2MessageContext(); Map<String, String> headers = (Map) axis2MC.getProperty(org.apache.axis2.context.MessageContext.TRANSPORT_HEADERS); String requestOrigin = headers.get("Origin"); String allowedOrigin = getAllowedOrigins(requestOrigin); //Set the access-Control-Allow-Credentials header in the response only if it is specified to true in the api-manager configuration //and the allowed origin is not the wildcard (*) if (allowCredentialsEnabled && !"*".equals(allowedOrigin)) { messageContext.setProperty(APIConstants.CORSHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, Boolean.TRUE); } messageContext.setProperty(APIConstants.CORSHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, allowedOrigin); String allowedMethods = ""; StringBuffer allowedMethodsBuffer = new StringBuffer(); if (selectedResource != null) { String[] methods = selectedResource.getMethods(); for (String method : methods) { allowedMethodsBuffer.append(method).append(','); } allowedMethods = allowedMethodsBuffer.toString(); if (methods.length != 0) { allowedMethods = allowedMethods.substring(0, allowedMethods.length() - 1); } } else { allowedMethods = this.allowedMethods; } if ("*".equals(allowHeaders)) { allowHeaders = headers.get("Access-Control-Request-Headers"); } messageContext.setProperty(APIConstants.CORS_CONFIGURATION_ENABLED, APIUtil.isCORSEnabled()); messageContext.setProperty(APIConstants.CORSHeaders.ACCESS_CONTROL_ALLOW_METHODS, allowedMethods); messageContext.setProperty(APIConstants.CORSHeaders.ACCESS_CONTROL_ALLOW_HEADERS, allowHeaders); } public String getAllowHeaders() { return allowHeaders; } public void setAllowHeaders(String allowHeaders) { this.allowHeaders = allowHeaders; } public String getAllowedOrigins(String origin) { if (allowedOrigins.contains("*")) { return "*"; } else if (allowedOrigins.contains(origin)) { return origin; } else { return null; } } public void setAllowedOrigins(String allowedOrigins) { this.allowedOrigins = new HashSet<String>(Arrays.asList(allowedOrigins.split(","))); } public String getApiImplementationType() { return apiImplementationType; } public void setApiImplementationType(String apiImplementationType) { this.apiImplementationType = apiImplementationType; } // For backward compatibility with 1.9.0 since the property name is inline public String getInline() { return getApiImplementationType(); } // For backward compatibility with 1.9.0 since the property name is inline public void setInline(String inlineType) { setApiImplementationType(inlineType); } public String isAllowCredentials() { return allowCredentials; } public void setAllowCredentials(String allowCredentials) { this.allowCredentialsEnabled = Boolean.parseBoolean(allowCredentials); this.allowCredentials = allowCredentials; } public String getAllowedMethods() { return allowedMethods; } public void setAllowedMethods(String allowedMethods) { this.allowedMethods = allowedMethods; } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/pubsub/v1beta2/pubsub.proto package com.google.pubsub.v1beta2; /** * <pre> * Request for the ListSubscriptions method. * </pre> * * Protobuf type {@code google.pubsub.v1beta2.ListSubscriptionsRequest} */ public final class ListSubscriptionsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.pubsub.v1beta2.ListSubscriptionsRequest) ListSubscriptionsRequestOrBuilder { // Use ListSubscriptionsRequest.newBuilder() to construct. private ListSubscriptionsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSubscriptionsRequest() { project_ = ""; pageSize_ = 0; pageToken_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private ListSubscriptionsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { java.lang.String s = input.readStringRequireUtf8(); project_ = s; break; } case 16: { pageSize_ = input.readInt32(); break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); pageToken_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.pubsub.v1beta2.PubsubProto.internal_static_google_pubsub_v1beta2_ListSubscriptionsRequest_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.pubsub.v1beta2.PubsubProto.internal_static_google_pubsub_v1beta2_ListSubscriptionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.pubsub.v1beta2.ListSubscriptionsRequest.class, com.google.pubsub.v1beta2.ListSubscriptionsRequest.Builder.class); } public static final int PROJECT_FIELD_NUMBER = 1; private volatile java.lang.Object project_; /** * <pre> * The name of the cloud project that subscriptions belong to. * </pre> * * <code>optional string project = 1;</code> */ public java.lang.String getProject() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } } /** * <pre> * The name of the cloud project that subscriptions belong to. * </pre> * * <code>optional string project = 1;</code> */ public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_; /** * <pre> * Maximum number of subscriptions to return. * </pre> * * <code>optional int32 page_size = 2;</code> */ public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; private volatile java.lang.Object pageToken_; /** * <pre> * The value returned by the last ListSubscriptionsResponse; indicates that * this is a continuation of a prior ListSubscriptions call, and that the * system should return the next page of data. * </pre> * * <code>optional string page_token = 3;</code> */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * <pre> * The value returned by the last ListSubscriptionsResponse; indicates that * this is a continuation of a prior ListSubscriptions call, and that the * system should return the next page of data. * </pre> * * <code>optional string page_token = 3;</code> */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getProjectBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, project_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!getPageTokenBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getProjectBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, project_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, pageSize_); } if (!getPageTokenBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.pubsub.v1beta2.ListSubscriptionsRequest)) { return super.equals(obj); } com.google.pubsub.v1beta2.ListSubscriptionsRequest other = (com.google.pubsub.v1beta2.ListSubscriptionsRequest) obj; boolean result = true; result = result && getProject() .equals(other.getProject()); result = result && (getPageSize() == other.getPageSize()); result = result && getPageToken() .equals(other.getPageToken()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (37 * hash) + PROJECT_FIELD_NUMBER; hash = (53 * hash) + getProject().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.pubsub.v1beta2.ListSubscriptionsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.pubsub.v1beta2.ListSubscriptionsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.pubsub.v1beta2.ListSubscriptionsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.pubsub.v1beta2.ListSubscriptionsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.pubsub.v1beta2.ListSubscriptionsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.pubsub.v1beta2.ListSubscriptionsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.pubsub.v1beta2.ListSubscriptionsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.pubsub.v1beta2.ListSubscriptionsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.pubsub.v1beta2.ListSubscriptionsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.pubsub.v1beta2.ListSubscriptionsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.pubsub.v1beta2.ListSubscriptionsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Request for the ListSubscriptions method. * </pre> * * Protobuf type {@code google.pubsub.v1beta2.ListSubscriptionsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.pubsub.v1beta2.ListSubscriptionsRequest) com.google.pubsub.v1beta2.ListSubscriptionsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.pubsub.v1beta2.PubsubProto.internal_static_google_pubsub_v1beta2_ListSubscriptionsRequest_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.pubsub.v1beta2.PubsubProto.internal_static_google_pubsub_v1beta2_ListSubscriptionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.pubsub.v1beta2.ListSubscriptionsRequest.class, com.google.pubsub.v1beta2.ListSubscriptionsRequest.Builder.class); } // Construct using com.google.pubsub.v1beta2.ListSubscriptionsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); project_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.pubsub.v1beta2.PubsubProto.internal_static_google_pubsub_v1beta2_ListSubscriptionsRequest_descriptor; } public com.google.pubsub.v1beta2.ListSubscriptionsRequest getDefaultInstanceForType() { return com.google.pubsub.v1beta2.ListSubscriptionsRequest.getDefaultInstance(); } public com.google.pubsub.v1beta2.ListSubscriptionsRequest build() { com.google.pubsub.v1beta2.ListSubscriptionsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.pubsub.v1beta2.ListSubscriptionsRequest buildPartial() { com.google.pubsub.v1beta2.ListSubscriptionsRequest result = new com.google.pubsub.v1beta2.ListSubscriptionsRequest(this); result.project_ = project_; result.pageSize_ = pageSize_; result.pageToken_ = pageToken_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.pubsub.v1beta2.ListSubscriptionsRequest) { return mergeFrom((com.google.pubsub.v1beta2.ListSubscriptionsRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.pubsub.v1beta2.ListSubscriptionsRequest other) { if (other == com.google.pubsub.v1beta2.ListSubscriptionsRequest.getDefaultInstance()) return this; if (!other.getProject().isEmpty()) { project_ = other.project_; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.pubsub.v1beta2.ListSubscriptionsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.pubsub.v1beta2.ListSubscriptionsRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object project_ = ""; /** * <pre> * The name of the cloud project that subscriptions belong to. * </pre> * * <code>optional string project = 1;</code> */ public java.lang.String getProject() { java.lang.Object ref = project_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The name of the cloud project that subscriptions belong to. * </pre> * * <code>optional string project = 1;</code> */ public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The name of the cloud project that subscriptions belong to. * </pre> * * <code>optional string project = 1;</code> */ public Builder setProject( java.lang.String value) { if (value == null) { throw new NullPointerException(); } project_ = value; onChanged(); return this; } /** * <pre> * The name of the cloud project that subscriptions belong to. * </pre> * * <code>optional string project = 1;</code> */ public Builder clearProject() { project_ = getDefaultInstance().getProject(); onChanged(); return this; } /** * <pre> * The name of the cloud project that subscriptions belong to. * </pre> * * <code>optional string project = 1;</code> */ public Builder setProjectBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); project_ = value; onChanged(); return this; } private int pageSize_ ; /** * <pre> * Maximum number of subscriptions to return. * </pre> * * <code>optional int32 page_size = 2;</code> */ public int getPageSize() { return pageSize_; } /** * <pre> * Maximum number of subscriptions to return. * </pre> * * <code>optional int32 page_size = 2;</code> */ public Builder setPageSize(int value) { pageSize_ = value; onChanged(); return this; } /** * <pre> * Maximum number of subscriptions to return. * </pre> * * <code>optional int32 page_size = 2;</code> */ public Builder clearPageSize() { pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * <pre> * The value returned by the last ListSubscriptionsResponse; indicates that * this is a continuation of a prior ListSubscriptions call, and that the * system should return the next page of data. * </pre> * * <code>optional string page_token = 3;</code> */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The value returned by the last ListSubscriptionsResponse; indicates that * this is a continuation of a prior ListSubscriptions call, and that the * system should return the next page of data. * </pre> * * <code>optional string page_token = 3;</code> */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The value returned by the last ListSubscriptionsResponse; indicates that * this is a continuation of a prior ListSubscriptions call, and that the * system should return the next page of data. * </pre> * * <code>optional string page_token = 3;</code> */ public Builder setPageToken( java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; onChanged(); return this; } /** * <pre> * The value returned by the last ListSubscriptionsResponse; indicates that * this is a continuation of a prior ListSubscriptions call, and that the * system should return the next page of data. * </pre> * * <code>optional string page_token = 3;</code> */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); onChanged(); return this; } /** * <pre> * The value returned by the last ListSubscriptionsResponse; indicates that * this is a continuation of a prior ListSubscriptions call, and that the * system should return the next page of data. * </pre> * * <code>optional string page_token = 3;</code> */ public Builder setPageTokenBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:google.pubsub.v1beta2.ListSubscriptionsRequest) } // @@protoc_insertion_point(class_scope:google.pubsub.v1beta2.ListSubscriptionsRequest) private static final com.google.pubsub.v1beta2.ListSubscriptionsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.pubsub.v1beta2.ListSubscriptionsRequest(); } public static com.google.pubsub.v1beta2.ListSubscriptionsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSubscriptionsRequest> PARSER = new com.google.protobuf.AbstractParser<ListSubscriptionsRequest>() { public ListSubscriptionsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListSubscriptionsRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListSubscriptionsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSubscriptionsRequest> getParserForType() { return PARSER; } public com.google.pubsub.v1beta2.ListSubscriptionsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.wss4j.common.saml.bean; import java.util.List; import java.util.ArrayList; /** * Class SamlDecision represents the raw data to be used by the <code>SamlAssertionWrapper</code> when * creating SAML Authorization Decision Statements. */ public class AuthDecisionStatementBean { /** * The SAML subject */ private SubjectBean subject; /** * enum representing the possible decision types as specified in the SAML spec */ public enum Decision {PERMIT, INDETERMINATE, DENY} /** * The decision rendered by the SAML authority with respect to the specified resource */ private Decision decision; /** * A URI reference identifying the resource to which access authorization is sought */ private String resource; /** * The set of actions authorized to be performed on the specified resource (one or more) */ private List<ActionBean> actionBeans; /** * A set of assertions that the SAML authority relied on in making the decision (optional) */ private Object evidence; /** * Constructor SamlDecision creates a new SamlDecision instance. */ public AuthDecisionStatementBean() { actionBeans = new ArrayList<>(); } /** * Constructor SamlDecision creates a new SamlDecision instance. * * @param decision of type Decision * @param resource of type String * @param subject of type SubjectBean * @param evidence of type Object * @param actionBeans of type List<SamlAction> */ public AuthDecisionStatementBean( Decision decision, String resource, SubjectBean subject, Object evidence, List<ActionBean> actionBeans ) { this.decision = decision; this.resource = resource; this.subject = subject; this.evidence = evidence; this.actionBeans = actionBeans; } /** * Method getResource returns the resource of this SamlDecision object. * * @return the resource (type String) of this SamlDecision object. */ public String getResource() { return resource; } /** * Method setResource sets the resource of this SamlDecision object. * * @param resource the resource of this SamlDecision object. */ public void setResource(String resource) { this.resource = resource; } /** * Method getActions returns the actions of this SamlDecision object. * * @return the actions (type List<SamlAction>) of this SamlDecision object. */ public List<ActionBean> getActions() { return actionBeans; } /** * Method setActions sets the actions of this SamlDecision object. * * @param actionBeans the actions of this SamlDecision object. */ public void setActions(List<ActionBean> actionBeans) { this.actionBeans = actionBeans; } /** * Method getDecision returns the decision of this SamlDecision object. * * @return the decision (type Decision) of this SamlDecision object. */ public Decision getDecision() { return decision; } /** * Method setDecision sets the decision of this SamlDecision object. * * @param decision the decision of this SamlDecision object. */ public void setDecision(Decision decision) { this.decision = decision; } /** * Method getEvidence returns the evidence of this SamlDecision object. * * @return the evidence (type Object) of this SamlDecision object. */ public Object getEvidence() { return evidence; } /** * Method setEvidence sets the evidence of this SamlDecision object. * * @param evidence the evidence of this SamlDecision object. */ public void setEvidence(Object evidence) { this.evidence = evidence; } /** * Get the Subject * @return the Subject */ public SubjectBean getSubject() { return subject; } /** * Set the Subject * @param subject the SubjectBean instance to set */ public void setSubject(SubjectBean subject) { this.subject = subject; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof AuthDecisionStatementBean)) return false; AuthDecisionStatementBean that = (AuthDecisionStatementBean) o; if (subject == null && that.subject != null) { return false; } else if (subject != null && !subject.equals(that.subject)) { return false; } if (decision == null && that.decision != null) { return false; } else if (decision != null && !decision.equals(that.decision)) { return false; } if (evidence == null && that.evidence != null) { return false; } else if (evidence != null && !evidence.equals(that.evidence)) { return false; } if (actionBeans == null && that.actionBeans != null) { return false; } else if (actionBeans != null && !actionBeans.equals(that.actionBeans)) { return false; } if (resource == null && that.resource != null) { return false; } else if (resource != null && !resource.equals(that.resource)) { return false; } return true; } @Override public int hashCode() { int result = subject != null ? subject.hashCode() : 0; result = 31 * result + (decision != null ? decision.hashCode() : 0); result = 31 * result + (evidence != null ? evidence.hashCode() : 0); result = 31 * result + (actionBeans != null ? actionBeans.hashCode() : 0); result = 31 * result + (resource != null ? resource.hashCode() : 0); return result; } }
package testBenchmarks; import java.io.ByteArrayOutputStream; import java.io.FileDescriptor; import java.io.FileOutputStream; import java.io.PrintStream; import java.util.*; import static org.junit.Assert.*; import java.io.FileReader; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import org.junit.Test; import LTLparser.LTLNode; import LTLparser.LTLParserProvider; import RegexParser.RegexParserProvider; import RegexParser.RegexNode; import TreeAutomataParser.TreeNode; import TreeAutomataParser.TreeParserProvider; import MonaParser.MonaParserProvider; /* * This class parses and removes duplicate benchmarks, * The parser for regular expression currently does not support non regular operators * The MONA predicate features is not supported by the mona parser, thus the Regsy and STRAND set in WS1S are not tested here. * The above features will be implemented in the near future. * */ public class TestAllBenchmarks { @Test public void testLTL() { //redirect and gather error message to String for easier debugging ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; HashSet<String> set = new HashSet<>(); //this array will store duplicate LTLs ArrayList<String> dupFile = new ArrayList<>(); try { Files.walk(Paths.get("../LTL/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".ltl"))) { try { FileReader file = new FileReader(filePath.toFile()); List<LTLNode> nodes = LTLParserProvider.parse(file); for (LTLNode node : nodes) { String cur = node.toString(); if (set.contains(cur)) { dupFile.add(cur); } else { set.add(cur); } } System.out.println(filePath); } catch (Exception e1) { e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); noFail = false; }finally { //redirect system.err back System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } if (dupFile.isEmpty()) { System.out.println("No duplicate formulas"); } else { System.err.println("Found duplicate formulas"); for (String s : dupFile) { System.out.println(s); } } assertTrue(noFail); } @Test public void testRegex() { ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; HashSet<String> set = new HashSet<>(); ArrayList<String> dupFile = new ArrayList<>(); try { Files.walk(Paths.get("../regex/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".re"))) { try { FileReader file = new FileReader(filePath.toFile()); List<RegexNode> nodes = RegexParserProvider.parse(file); for (RegexNode node : nodes) { String cur = node.toString(); if (set.contains(cur)) { dupFile.add(cur); } else { set.add(cur); } } System.out.println(filePath); } catch (Exception e1) { e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); noFail = false; } finally { System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } if (dupFile.isEmpty()) { System.out.println("No duplicate formulas"); } else { System.err.println("Found duplicate formulas"); for (String s : dupFile) { System.out.println(s); } } String errors = errMsgs.toString(); System.err.println(errors); assertTrue(noFail); } @Test public void testNFA() { ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; HashSet<String> set = new HashSet<>(); ArrayList<String> dupFilePath = new ArrayList<>(); try { Files.walk(Paths.get("../NFA/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".timbuk"))) { try { FileReader file = new FileReader(filePath.toFile()); System.out.println(filePath); TreeNode node = TreeParserProvider.parse(file); System.out.println(filePath + " parsed correctly"); String cur = node.toString(); if (set.contains(cur)) { dupFilePath.add(filePath.toString()); } else { set.add(cur); } } catch (Exception e1) { System.err.println(filePath); e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); noFail = false; }finally { System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } if (dupFilePath.isEmpty()) { System.out.println("No duplicate formulas"); } else { System.err.println("Found duplicate formulas"); for (String s : dupFilePath) { System.err.println("Duplicate found at: "+ s); Path p = Paths.get(s); try{ Files.delete(p); System.out.println("deleted "+ s); }catch(Exception e){ System.out.println("Error deleting " +s +": "+ e); } } } String errors = errMsgs.toString(); if(errors.length()>0){ System.err.println("Found errors!"); System.err.println(errors); } assertTrue(noFail); } @Test public void testTreeAutomata() { ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; HashSet<String> set = new HashSet<>(); ArrayList<String> dupFilePath = new ArrayList<>(); try { Files.walk(Paths.get("../tree-automata/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".timbuk"))) { try { FileReader file = new FileReader(filePath.toFile()); System.out.println(filePath); TreeNode node = TreeParserProvider.parse(file); System.out.println(filePath + "parsed correctly"); String cur = node.toString(); if (set.contains(cur)) { dupFilePath.add(filePath.toString()); } else { set.add(cur); } } catch (Exception e1) { System.err.println(filePath); e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); System.err.println(e); noFail = false; }finally { System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } if (dupFilePath.isEmpty()) { System.out.println("No duplicate formulas"); } else { System.err.println("Found duplicate formulas"); for (String s : dupFilePath) { System.err.println("Duplicate found at: "+ s); Path p = Paths.get(s); try{ Files.delete(p); System.out.println("deleted "+ s); }catch(Exception e){ System.out.println("Error deleting " +s +": "+ e); } } } String errors = errMsgs.toString(); if(errors.length()>0){ System.err.println("Found errors!"); System.err.println(errors); } assertTrue(noFail); } @Test public void testM2L() { //redirect and gather error message to String for easier debugging ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; try { Files.walk(Paths.get("../m2l-str/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".mona"))) { try { FileReader file = new FileReader(filePath.toFile()); MonaParserProvider monaProvider= new MonaParserProvider(file); System.out.println("parsing "+ filePath); monaProvider.parseFormula(); System.out.println("successfully parsed "+ filePath); } catch (Exception e1) { e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); noFail = false; }finally { //redirect system.err back System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } String errors = errMsgs.toString(); if(errors.length()>0){ System.err.println("Found errors!"); System.err.println(errors); } assertTrue(noFail); } }
package view; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.Rectangle; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.IOException; import java.util.ArrayList; import java.util.concurrent.TimeUnit; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTextField; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import controller.client.Broadcaster; import controller.server.Server; import model.InformationServer; import model.Language; /** * Window is the main frame of the application * * @author Afkid */ public class Window extends JFrame { private static final Logger log = LogManager.getLogger(); private JButton buttonCreate = new JButton(); private JPanel panelTop = new JPanel(); private BorderLayout layout = new BorderLayout(); private JScrollPane scrollList = new JScrollPane(); private JTextField fieldAlias = new JTextField(); private JLabel labelAlias = new JLabel(); private int size = 25; private String serverType; private Broadcaster broadcast; private ServerList serverList; private int selectedIndex; private DiscussionsWindow groups; private Server server; private boolean noGroup = true; private Language language; /** * Constructor of window, depends on the language * * @param lang */ public Window(Language lang) { this.language = lang; this.setSize(new Dimension(400, 600)); serverList = new ServerList(this, language); broadcast = new Broadcaster(this, serverList); Thread t = new Thread(broadcast); log.info("Window created"); t.start(); this.setLayout(layout); this.add(panelTop, BorderLayout.NORTH); panelTop.add(labelAlias, null); panelTop.add(fieldAlias, null); panelTop.add(buttonCreate, null); JPanel panel = new JPanel(); panel.setLayout(new BorderLayout()); panel.add(scrollList); this.add(panel, BorderLayout.CENTER); scrollList.setViewportView(serverList); fieldAlias.setColumns(15); labelAlias.setText("pseudo :"); scrollList.setBounds(new Rectangle(84, 5, 25, 130)); buttonCreate.setText(language.getValue("CREATE_GROUP")); buttonCreate.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { boutonCreateActionPerformed(e); } }); this.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { windowClosingEvent(e); } }); } /** * Verifies a connection * * @param infos */ public void verifyConnection(InformationServer infos) { log.info("Verification of the connection to: " + infos.getName()); if (infos.getClients() == infos.getClientsMax()) { log.info("Group " + infos.getName() + " is full"); JOptionPane.showMessageDialog(null, language.getValue("GROUP_FULL"), language.getValue("NO_PLACE"), JOptionPane.ERROR_MESSAGE); } else if (infos.getType().equals("public")) { if (fieldAlias.getText().length() != 0) { log.info("Connection to public group: " + infos.getName()); Connection(fieldAlias.getText(), infos); } else { String pseudo = ""; try { while (pseudo.equals("")) { pseudo = JOptionPane.showInputDialog(null, language.getValue("ALIAS_QUESTION"), language.getValue("INDETIFICATION"), JOptionPane.INFORMATION_MESSAGE); } } catch (NullPointerException e) { pseudo = ""; } if (pseudo.length() > 30) { pseudo = pseudo.substring(0, 30); } if (!pseudo.equals("")) { fieldAlias.setText(pseudo); log.info("Connection to public group: " + infos.getName()); Connection(pseudo, infos); } } } else if (infos.getType().equals("private")) { String pseudo = fieldAlias.getText(); boolean arret = false; while (!arret) { for (;;) { LoginPanel panel = new LoginPanel(pseudo, language); int n = JOptionPane.showConfirmDialog(null, panel, language.getValue("LOGIN"), JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE, null); if (n == JOptionPane.OK_OPTION) { try { pseudo = panel.getUserName(); if (!pseudo.equals("")) { broadcast.sendPassword(panel.getPassword()); TimeUnit.MILLISECONDS.sleep(100); if (broadcast.isAccepted() == true) { log.info("Connection to private group: " + infos.getName()); Connection(pseudo, infos); fieldAlias.setText(pseudo); arret = true; break; } fieldAlias.setText(pseudo); } } catch (InterruptedException e) { log.error("Error on connecting to a private group"); showErrorMessage(language.getValue("ERROR_OCCURED"), language.getValue("Error")); } } else { arret = true; break; } } } fieldAlias.setText(pseudo); } } private void Connection(String pseudo, InformationServer infos) { broadcast.setAcceptedConnection(true); if (noGroup) { groups = new DiscussionsWindow(this, language); noGroup = false; AllChat od = new AllChat(pseudo, infos, groups, server, language); groups.addTab(od); } else { AllChat od = new AllChat(pseudo, infos, groups, server, language); groups.addTab(od); } } private void boutonCreateActionPerformed(ActionEvent e) { CreateServerPanel panel = new CreateServerPanel(language); int n = JOptionPane.showConfirmDialog(null, panel, language.getValue("NEW_GROUP"), JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE, null); if (n == JOptionPane.OK_OPTION) { serverType = panel.getServerType(); try { String nameS = panel.getServerName(); while (nameS.equals("")) { nameS = JOptionPane.showInputDialog(null, language.getValue("CHOOSE_NAME_GROUP"), language.getValue("NAME_GROUP"), JOptionPane.WARNING_MESSAGE); } String name; name = fieldAlias.getText(); boolean connected = false; while (name.equals("")) { name = JOptionPane.showInputDialog(null, language.getValue("ALIAS_QUESTION"), language.getValue("IDENTIFICATION"), JOptionPane.OK_CANCEL_OPTION); } if (!name.equals("")) { try { size = panel.getServerSize(); server = new Server(nameS, name, size, serverType, panel.getPassword()); Thread t = new Thread(server); t.start(); buttonCreate.setEnabled(false); InformationServer info = server.getInfos(); connected = true; log.info("Connection to group: " + info.getName()); Connection(name, info); } catch (IOException i) { buttonCreate.setEnabled(false); log.error("Error creating a server"); showErrorMessage(language.getValue("ERROR_GROUP"), language.getValue("ERROR")); } } fieldAlias.setText(name); if (name.length() > 30) { name = name.substring(0, 30); } } catch (NullPointerException npe) { log.error("Error creating a server"); showErrorMessage(language.getValue("ERROR_OCCURED"), language.getValue("ERROR")); } } } private void windowClosingEvent(WindowEvent e) { if (!noGroup) { ArrayList<AllChat> tabOnglets = groups.getTabDiscussions(); for (int i = 0; i < tabOnglets.size(); i++) { tabOnglets.get(i).quit(); if (tabOnglets.get(i) != null) { if (tabOnglets.get(i).getServer() != null) { tabOnglets.get(i).getServer().stopServer(); } } } } System.exit(0); } /** * Shows an error message * @param txt * @param titre */ public void showErrorMessage(String txt, String titre) { JOptionPane.showMessageDialog(null, txt, titre, JOptionPane.ERROR_MESSAGE); } /** * @return the create button */ public JButton getCreateButton() { return buttonCreate; } /** * @return the selected index */ public int getSelectedIndex() { return selectedIndex; } /** * Clear the groups */ public void clearGroups() { noGroup = true; } }
/* * * Copyright (c) 2015 Caricah <info@caricah.com>. * * Caricah licenses this file to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS * OF ANY KIND, either express or implied. See the License for the specific language * governing permissions and limitations under the License. * * * * */ package com.caricah.iotracah.core.worker; import com.caricah.iotracah.bootstrap.data.messages.*; import com.caricah.iotracah.bootstrap.data.messages.base.IOTMessage; import com.caricah.iotracah.bootstrap.data.models.subscriptions.IotSubscription; import com.caricah.iotracah.bootstrap.exceptions.RetriableException; import com.caricah.iotracah.bootstrap.exceptions.UnRetriableException; import com.caricah.iotracah.bootstrap.security.realm.state.IOTClient; import com.caricah.iotracah.core.handlers.*; import com.caricah.iotracah.core.modules.Worker; import com.caricah.iotracah.core.security.DefaultSecurityHandler; import com.caricah.iotracah.core.worker.exceptions.ShutdownException; import com.caricah.iotracah.core.worker.state.SessionResetManager; import com.mashape.unirest.http.Unirest; import org.apache.commons.configuration.Configuration; import org.apache.shiro.session.Session; import rx.Observable; import java.io.IOException; /** * @author <a href="mailto:bwire@caricah.com"> Peter Bwire </a> * @version 1.0 8/15/15 */ public class DefaultWorker extends Worker { /** * <code>configure</code> allows the base system to configure itself by getting * all the settings it requires and storing them internally. The plugin is only expected to * pick the settings it has registered on the configuration file for its particular use. * * @param configuration * @throws UnRetriableException */ @Override public void configure(Configuration configuration) throws UnRetriableException { boolean configAnnoymousLoginEnabled = configuration.getBoolean(CORE_CONFIG_WORKER_ANNONYMOUS_LOGIN_ENABLED, CORE_CONFIG_WORKER_ANNONYMOUS_LOGIN_ENABLED_DEFAULT_VALUE); log.debug(" configure : Anonnymous login is configured to be enabled [{}]", configAnnoymousLoginEnabled); setAnnonymousLoginEnabled(configAnnoymousLoginEnabled); String configAnnoymousLoginUsername = configuration.getString(CORE_CONFIG_WORKER_ANNONYMOUS_LOGIN_USERNAME, CORE_CONFIG_ENGINE_WORKER_ANNONYMOUS_LOGIN_USERNAME_DEFAULT_VALUE); log.debug(" configure : Anonnymous login username is configured to be [{}]", configAnnoymousLoginUsername); setAnnonymousLoginUsername(configAnnoymousLoginUsername); String configAnnoymousLoginPassword = configuration.getString(CORE_CONFIG_WORKER_ANNONYMOUS_LOGIN_PASSWORD, CORE_CONFIG_ENGINE_WORKER_ANNONYMOUS_LOGIN_PASSWORD_DEFAULT_VALUE); log.debug(" configure : Anonnymous login password is configured to be [{}]", configAnnoymousLoginPassword); setAnnonymousLoginPassword(configAnnoymousLoginPassword); int keepaliveInSeconds = configuration.getInt(CORE_CONFIG_WORKER_CLIENT_KEEP_ALIVE_IN_SECONDS, CORE_CONFIG_WORKER_CLIENT_KEEP_ALIVE_IN_SECONDS_DEFAULT_VALUE); log.debug(" configure : Keep alive maximum is configured to be [{}]", keepaliveInSeconds); setKeepAliveInSeconds(keepaliveInSeconds); String defaultPartitionName = configuration.getString(DefaultSecurityHandler.CONFIG_SYSTEM_SECURITY_DEFAULT_PARTITION_NAME, DefaultSecurityHandler.CONFIG_SYSTEM_SECURITY_DEFAULT_PARTITION_NAME_VALUE_DEFAULT); setDefaultPartitionName(defaultPartitionName); } /** * <code>initiate</code> starts the operations of this system handler. * All excecution code for the plugins is expected to begin at this point. * * @throws UnRetriableException */ @Override public void initiate() throws UnRetriableException { addHandler(new ConnectionHandler()); addHandler(new DisconnectHandler()); addHandler(new PingRequestHandler()); addHandler(new PublishAcknowledgeHandler()); addHandler(new PublishCompleteHandler()); addHandler(new PublishInHandler()); addHandler(new PublishOutHandler()); addHandler(new PublishReceivedHandler()); addHandler(new PublishReleaseHandler()); addHandler(new SubscribeHandler()); addHandler(new UnSubscribeHandler()); //Initiate the session reset manager. SessionResetManager sessionResetManager = new SessionResetManager(); sessionResetManager.setWorker(this); sessionResetManager.setDatastore(this.getDatastore()); setSessionResetManager(sessionResetManager); //Initiate unirest properties. Unirest.setTimeouts(5000, 5000); } /** * <code>terminate</code> halts excecution of this plugin. * This provides a clean way to exit /stop operations of this particular plugin. */ @Override public void terminate() { //Shutdown unirest. try { Unirest.shutdown(); } catch (IOException e) { log.warn(" terminate : problem closing unirest", e); } } /** * Provides the Observer with a new item to observe. * <p> * The {@link com.caricah.iotracah.core.modules.Server} may call this method 0 or more times. * <p> * The {@code Observable} will not call this method again after it calls either {@link #onCompleted} or * {@link #onError}. * * @param iotMessage the item emitted by the Observable */ @Override public void onNext(IOTMessage iotMessage) { log.debug(" onNext : received {}", iotMessage); try { handleReceivedMessage(iotMessage); } catch (ShutdownException e) { IOTMessage response = e.getResponse(); if (null != response) { pushToServer(response); } try { DisconnectMessage disconnectMessage = DisconnectMessage.from(true); disconnectMessage.copyTransmissionData(iotMessage); getHandler(DisconnectHandler.class).handle(disconnectMessage); } catch (RetriableException | UnRetriableException finalEx) { log.error(" onNext : Problems disconnecting.", finalEx); } } catch (Exception e) { log.error(" onNext : Serious error that requires attention ", e); } } private void handleReceivedMessage(IOTMessage iotMessage) throws UnRetriableException, RetriableException { switch (iotMessage.getMessageType()) { case ConnectMessage.MESSAGE_TYPE: ConnectMessage connectMessage = (ConnectMessage) iotMessage; if (connectMessage.isAnnonymousSession() && isAnnonymousLoginEnabled()) { connectMessage.setUserName(getAnnonymousLoginUsername()); connectMessage.setPassword(getAnnonymousLoginPassword()); } if (connectMessage.getKeepAliveTime() <= 0) { connectMessage.setKeepAliveTime(getKeepAliveInSeconds()); } getHandler(ConnectionHandler.class).handle(connectMessage); break; case SubscribeMessage.MESSAGE_TYPE: getHandler(SubscribeHandler.class).handle((SubscribeMessage) iotMessage); break; case UnSubscribeMessage.MESSAGE_TYPE: getHandler(UnSubscribeHandler.class).handle((UnSubscribeMessage) iotMessage); break; case Ping.MESSAGE_TYPE: getHandler( PingRequestHandler.class).handle((Ping) iotMessage); break; case PublishMessage.MESSAGE_TYPE: getHandler(PublishInHandler.class).handle((PublishMessage) iotMessage); break; case PublishReceivedMessage.MESSAGE_TYPE: getHandler(PublishReceivedHandler.class).handle((PublishReceivedMessage) iotMessage); break; case ReleaseMessage.MESSAGE_TYPE: getHandler(PublishReleaseHandler.class).handle((ReleaseMessage) iotMessage); break; case CompleteMessage.MESSAGE_TYPE: getHandler(PublishCompleteHandler.class).handle((CompleteMessage) iotMessage); break; case DisconnectMessage.MESSAGE_TYPE: getHandler(DisconnectHandler.class).handle((DisconnectMessage) iotMessage); break; case AcknowledgeMessage.MESSAGE_TYPE: getHandler(PublishAcknowledgeHandler.class).handle((AcknowledgeMessage) iotMessage); break; default: throw new ShutdownException("Unknown messages being propergated"); } } @Override public void onStart(Session session) { } @Override public void onStop(Session session) { publishWill((IOTClient) session); } @Override public void onExpiration(Session session) { log.debug(" onExpiration : -----------------------------------------------------"); log.debug(" onExpiration : ------- We have an expired session {} -------", session); log.debug(" onExpiration : -----------------------------------------------------"); } }
/* * Copyright 2016, The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package <%= appPackage %>.data.source; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import android.content.Context; import <%= appPackage %>.data.Task; import com.google.common.collect.Lists; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import java.util.List; /** * Unit tests for the implementation of the in-memory repository with cache. */ public class TasksRepositoryTest { private final static String TASK_TITLE = "title"; private final static String TASK_TITLE2 = "title2"; private final static String TASK_TITLE3 = "title3"; private static List<Task> TASKS = Lists.newArrayList(new Task("Title1", "Description1"), new Task("Title2", "Description2")); private TasksRepository mTasksRepository; @Mock private TasksDataSource mTasksRemoteDataSource; @Mock private TasksDataSource mTasksLocalDataSource; @Mock private Context mContext; @Mock private TasksDataSource.GetTaskCallback mGetTaskCallback; @Mock private TasksDataSource.LoadTasksCallback mLoadTasksCallback; /** * {@link ArgumentCaptor} is a powerful Mockito API to capture argument values and use them to * perform further actions or assertions on them. */ @Captor private ArgumentCaptor<TasksDataSource.LoadTasksCallback> mTasksCallbackCaptor; /** * {@link ArgumentCaptor} is a powerful Mockito API to capture argument values and use them to * perform further actions or assertions on them. */ @Captor private ArgumentCaptor<TasksDataSource.GetTaskCallback> mTaskCallbackCaptor; @Before public void setupTasksRepository() { // Mockito has a very convenient way to inject mocks by using the @Mock annotation. To // inject the mocks in the test the initMocks method needs to be called. MockitoAnnotations.initMocks(this); // Get a reference to the class under test mTasksRepository = TasksRepository.getInstance( mTasksRemoteDataSource, mTasksLocalDataSource); } @After public void destroyRepositoryInstance() { TasksRepository.destroyInstance(); } @Test public void getTasks_repositoryCachesAfterFirstApiCall() { // Given a setup Captor to capture callbacks // When two calls are issued to the tasks repository twoTasksLoadCallsToRepository(mLoadTasksCallback); // Then tasks were only requested once from Service API verify(mTasksRemoteDataSource).getTasks(any(TasksDataSource.LoadTasksCallback.class)); } @Test public void getTasks_requestsAllTasksFromLocalDataSource() { // When tasks are requested from the tasks repository mTasksRepository.getTasks(mLoadTasksCallback); // Then tasks are loaded from the local data source verify(mTasksLocalDataSource).getTasks(any(TasksDataSource.LoadTasksCallback.class)); } @Test public void saveTask_savesTaskToServiceAPI() { // Given a stub task with title and description Task newTask = new Task(TASK_TITLE, "Some Task Description"); // When a task is saved to the tasks repository mTasksRepository.saveTask(newTask); // Then the service API and persistent repository are called and the cache is updated verify(mTasksRemoteDataSource).saveTask(newTask); verify(mTasksLocalDataSource).saveTask(newTask); assertThat(mTasksRepository.mCachedTasks.size(), is(1)); } @Test public void completeTask_completesTaskToServiceAPIUpdatesCache() { // Given a stub active task with title and description added in the repository Task newTask = new Task(TASK_TITLE, "Some Task Description"); mTasksRepository.saveTask(newTask); // When a task is completed to the tasks repository mTasksRepository.completeTask(newTask); // Then the service API and persistent repository are called and the cache is updated verify(mTasksRemoteDataSource).completeTask(newTask); verify(mTasksLocalDataSource).completeTask(newTask); assertThat(mTasksRepository.mCachedTasks.size(), is(1)); assertThat(mTasksRepository.mCachedTasks.get(newTask.getId()).isActive(), is(false)); } @Test public void completeTaskId_completesTaskToServiceAPIUpdatesCache() { // Given a stub active task with title and description added in the repository Task newTask = new Task(TASK_TITLE, "Some Task Description"); mTasksRepository.saveTask(newTask); // When a task is completed using its id to the tasks repository mTasksRepository.completeTask(newTask.getId()); // Then the service API and persistent repository are called and the cache is updated verify(mTasksRemoteDataSource).completeTask(newTask); verify(mTasksLocalDataSource).completeTask(newTask); assertThat(mTasksRepository.mCachedTasks.size(), is(1)); assertThat(mTasksRepository.mCachedTasks.get(newTask.getId()).isActive(), is(false)); } @Test public void activateTask_activatesTaskToServiceAPIUpdatesCache() { // Given a stub completed task with title and description in the repository Task newTask = new Task(TASK_TITLE, "Some Task Description", true); mTasksRepository.saveTask(newTask); // When a completed task is activated to the tasks repository mTasksRepository.activateTask(newTask); // Then the service API and persistent repository are called and the cache is updated verify(mTasksRemoteDataSource).activateTask(newTask); verify(mTasksLocalDataSource).activateTask(newTask); assertThat(mTasksRepository.mCachedTasks.size(), is(1)); assertThat(mTasksRepository.mCachedTasks.get(newTask.getId()).isActive(), is(true)); } @Test public void activateTaskId_activatesTaskToServiceAPIUpdatesCache() { // Given a stub completed task with title and description in the repository Task newTask = new Task(TASK_TITLE, "Some Task Description", true); mTasksRepository.saveTask(newTask); // When a completed task is activated with its id to the tasks repository mTasksRepository.activateTask(newTask.getId()); // Then the service API and persistent repository are called and the cache is updated verify(mTasksRemoteDataSource).activateTask(newTask); verify(mTasksLocalDataSource).activateTask(newTask); assertThat(mTasksRepository.mCachedTasks.size(), is(1)); assertThat(mTasksRepository.mCachedTasks.get(newTask.getId()).isActive(), is(true)); } @Test public void getTask_requestsSingleTaskFromLocalDataSource() { // When a task is requested from the tasks repository mTasksRepository.getTask(TASK_TITLE, mGetTaskCallback); // Then the task is loaded from the database verify(mTasksLocalDataSource).getTask(eq(TASK_TITLE), any( TasksDataSource.GetTaskCallback.class)); } @Test public void deleteCompletedTasks_deleteCompletedTasksToServiceAPIUpdatesCache() { // Given 2 stub completed tasks and 1 stub active tasks in the repository Task newTask = new Task(TASK_TITLE, "Some Task Description", true); mTasksRepository.saveTask(newTask); Task newTask2 = new Task(TASK_TITLE2, "Some Task Description"); mTasksRepository.saveTask(newTask2); Task newTask3 = new Task(TASK_TITLE3, "Some Task Description", true); mTasksRepository.saveTask(newTask3); // When a completed tasks are cleared to the tasks repository mTasksRepository.clearCompletedTasks(); // Then the service API and persistent repository are called and the cache is updated verify(mTasksRemoteDataSource).clearCompletedTasks(); verify(mTasksLocalDataSource).clearCompletedTasks(); assertThat(mTasksRepository.mCachedTasks.size(), is(1)); assertTrue(mTasksRepository.mCachedTasks.get(newTask2.getId()).isActive()); assertThat(mTasksRepository.mCachedTasks.get(newTask2.getId()).getTitle(), is(TASK_TITLE2)); } @Test public void deleteAllTasks_deleteTasksToServiceAPIUpdatesCache() { // Given 2 stub completed tasks and 1 stub active tasks in the repository Task newTask = new Task(TASK_TITLE, "Some Task Description", true); mTasksRepository.saveTask(newTask); Task newTask2 = new Task(TASK_TITLE2, "Some Task Description"); mTasksRepository.saveTask(newTask2); Task newTask3 = new Task(TASK_TITLE3, "Some Task Description", true); mTasksRepository.saveTask(newTask3); // When all tasks are deleted to the tasks repository mTasksRepository.deleteAllTasks(); // Verify the data sources were called verify(mTasksRemoteDataSource).deleteAllTasks(); verify(mTasksLocalDataSource).deleteAllTasks(); assertThat(mTasksRepository.mCachedTasks.size(), is(0)); } @Test public void deleteTask_deleteTaskToServiceAPIRemovedFromCache() { // Given a task in the repository Task newTask = new Task(TASK_TITLE, "Some Task Description", true); mTasksRepository.saveTask(newTask); assertThat(mTasksRepository.mCachedTasks.containsKey(newTask.getId()), is(true)); // When deleted mTasksRepository.deleteTask(newTask.getId()); // Verify the data sources were called verify(mTasksRemoteDataSource).deleteTask(newTask.getId()); verify(mTasksLocalDataSource).deleteTask(newTask.getId()); // Verify it's removed from repository assertThat(mTasksRepository.mCachedTasks.containsKey(newTask.getId()), is(false)); } @Test public void getTasksWithDirtyCache_tasksAreRetrievedFromRemote() { // When calling getTasks in the repository with dirty cache mTasksRepository.refreshTasks(); mTasksRepository.getTasks(mLoadTasksCallback); // And the remote data source has data available setTasksAvailable(mTasksRemoteDataSource, TASKS); // Verify the tasks from the remote data source are returned, not the local verify(mTasksLocalDataSource, never()).getTasks(mLoadTasksCallback); verify(mLoadTasksCallback).onTasksLoaded(TASKS); } @Test public void getTasksWithLocalDataSourceUnavailable_tasksAreRetrievedFromRemote() { // When calling getTasks in the repository mTasksRepository.getTasks(mLoadTasksCallback); // And the local data source has no data available setTasksNotAvailable(mTasksLocalDataSource); // And the remote data source has data available setTasksAvailable(mTasksRemoteDataSource, TASKS); // Verify the tasks from the local data source are returned verify(mLoadTasksCallback).onTasksLoaded(TASKS); } @Test public void getTasksWithBothDataSourcesUnavailable_firesOnDataUnavailable() { // When calling getTasks in the repository mTasksRepository.getTasks(mLoadTasksCallback); // And the local data source has no data available setTasksNotAvailable(mTasksLocalDataSource); // And the remote data source has no data available setTasksNotAvailable(mTasksRemoteDataSource); // Verify no data is returned verify(mLoadTasksCallback).onDataNotAvailable(); } @Test public void getTaskWithBothDataSourcesUnavailable_firesOnDataUnavailable() { // Given a task id final String taskId = "123"; // When calling getTask in the repository mTasksRepository.getTask(taskId, mGetTaskCallback); // And the local data source has no data available setTaskNotAvailable(mTasksLocalDataSource, taskId); // And the remote data source has no data available setTaskNotAvailable(mTasksRemoteDataSource, taskId); // Verify no data is returned verify(mGetTaskCallback).onDataNotAvailable(); } @Test public void getTasks_refreshesLocalDataSource() { // Mark cache as dirty to force a reload of data from remote data source. mTasksRepository.refreshTasks(); // When calling getTasks in the repository mTasksRepository.getTasks(mLoadTasksCallback); // Make the remote data source return data setTasksAvailable(mTasksRemoteDataSource, TASKS); // Verify that the data fetched from the remote data source was saved in local. verify(mTasksLocalDataSource, times(TASKS.size())).saveTask(any(Task.class)); } /** * Convenience method that issues two calls to the tasks repository */ private void twoTasksLoadCallsToRepository(TasksDataSource.LoadTasksCallback callback) { // When tasks are requested from repository mTasksRepository.getTasks(callback); // First call to API // Use the Mockito Captor to capture the callback verify(mTasksLocalDataSource).getTasks(mTasksCallbackCaptor.capture()); // Local data source doesn't have data yet mTasksCallbackCaptor.getValue().onDataNotAvailable(); // Verify the remote data source is queried verify(mTasksRemoteDataSource).getTasks(mTasksCallbackCaptor.capture()); // Trigger callback so tasks are cached mTasksCallbackCaptor.getValue().onTasksLoaded(TASKS); mTasksRepository.getTasks(callback); // Second call to API } private void setTasksNotAvailable(TasksDataSource dataSource) { verify(dataSource).getTasks(mTasksCallbackCaptor.capture()); mTasksCallbackCaptor.getValue().onDataNotAvailable(); } private void setTasksAvailable(TasksDataSource dataSource, List<Task> tasks) { verify(dataSource).getTasks(mTasksCallbackCaptor.capture()); mTasksCallbackCaptor.getValue().onTasksLoaded(tasks); } private void setTaskNotAvailable(TasksDataSource dataSource, String taskId) { verify(dataSource).getTask(eq(taskId), mTaskCallbackCaptor.capture()); mTaskCallbackCaptor.getValue().onDataNotAvailable(); } private void setTaskAvailable(TasksDataSource dataSource, Task task) { verify(dataSource).getTask(eq(task.getId()), mTaskCallbackCaptor.capture()); mTaskCallbackCaptor.getValue().onTaskLoaded(task); } }
/* Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package javax.portlet.tck.portlets; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import javax.portlet.ActionParameters; import javax.portlet.ActionRequest; import javax.portlet.ActionResponse; import javax.portlet.ActionURL; import javax.portlet.GenericPortlet; import javax.portlet.MimeResponse; import javax.portlet.MutableRenderParameters; import javax.portlet.PortletException; import javax.portlet.PortletMode; import javax.portlet.PortletModeException; import javax.portlet.PortletSession; import javax.portlet.RenderMode; import javax.portlet.RenderParameters; import javax.portlet.RenderRequest; import javax.portlet.RenderResponse; import javax.portlet.RenderURL; import javax.portlet.WindowState; import javax.portlet.WindowStateException; import javax.portlet.tck.beans.TestButton; import javax.portlet.tck.beans.TestResult; import javax.portlet.tck.constants.Constants; import javax.portlet.tck.util.ModuleTestCaseDetails; import static javax.portlet.tck.util.ModuleTestCaseDetails.*; /** * This portlet implements several test cases for the JSR 362 TCK. The test case * names are defined in the /src/main/resources/xml-resources/additionalTCs.xml * file. The build process will integrate the test case names defined in the * additionalTCs.xml file into the complete list of test case names for * execution by the driver. * */ public class RenderStateTests_SPEC2_12_MutableRenderState extends GenericPortlet { @Override public void processAction(ActionRequest actionRequest, ActionResponse actionResponse) throws PortletException, IOException { ModuleTestCaseDetails tcd = new ModuleTestCaseDetails(); StringWriter writer = new StringWriter(); ActionParameters actionParams = actionRequest.getActionParameters(); String action = actionParams.getValue(Constants.BUTTON_PARAM_NAME); MutableRenderParameters renderParams = actionResponse.getRenderParameters(); if (action != null) { if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_GETRENDERPARAMETERS)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_getRenderParameters */ /* * Details: * "Returns an MutableRenderParameters object representing the private and public render parameters." */ if (!renderParams.isEmpty() && renderParams.isPublic("tr0_public") && renderParams.getValue("tr0_public") != null && renderParams.getValue("tr0_public").equals("true") && renderParams.getValue("tr0_private") != null && renderParams.getValue("tr0_private").equals("true")) { TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_GETRENDERPARAMETERS); result.setTcSuccess(true); result.writeTo(writer); } } else if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_GETRENDERPARAMETERS2)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_getRenderParameters2 */ /* * Details: * "The MutableRenderParameters object return by this method is mutable." */ renderParams.setValue("tr1", "true"); } else if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setPortletMode */ /* * Details: * "Sets the portlet mode of a portlet to the given portlet mode." */ actionResponse.setPortletMode(PortletMode.HELP); } else if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE2)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setPortletMode2 */ /* * Details: * "A custom portlet mode declared in deployment descriptor could be set." */ actionResponse.setPortletMode(new PortletMode("custom1")); renderParams.setValue("tr_setPortlet2", "true"); } else if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE3)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setPortletMode3 */ /* * Details: * "Not more than one portlet mode can be set. If more than one portlet mode is set, only the last one set is valid." */ actionResponse.setPortletMode(PortletMode.EDIT); actionResponse.setPortletMode(PortletMode.HELP); } else if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE4)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setPortletMode4 */ /* * Details: * "Throws PortletModeException if the portlet cannot switch to this mode." */ TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE4); try { actionResponse.setPortletMode(PortletMode.UNDEFINED); } catch (PortletModeException e) { result.setTcSuccess(true); result.appendTcDetail(e.toString()); } result.writeTo(writer); } else if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE5)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setPortletMode5 */ /* * Details: * "Throws IllegalStateException if the method is invoked after sendRedirect has been called." */ TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE5); try { RenderURL redirectURL = actionResponse.createRedirectURL( MimeResponse.Copy.NONE); actionResponse.sendRedirect(redirectURL.toString()); actionResponse.setPortletMode(PortletMode.UNDEFINED); } catch (IllegalStateException e) { result.setTcSuccess(true); result.appendTcDetail(e.toString()); } result.writeTo(writer); } else if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE6)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setPortletMode6 */ /* * Details: * "Throws IllegalArgumentException if the input parameter is null." */ TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE6); try { actionResponse.setPortletMode(null); } catch (IllegalArgumentException e) { result.setTcSuccess(true); result.appendTcDetail(e.toString()); } result.writeTo(writer); } else if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setWindowState */ /* * Details: * "Sets the window state of a portlet to the given window state." */ actionResponse.setWindowState(WindowState.NORMAL); renderParams.setValue("tr_setWindow", "true"); } else if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE2)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setWindowState2 */ /* * Details: * "A custom window state declared in deployment descriptor could be set." */ renderParams.setValue("tr_setWindow2", "true"); } else if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE3)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setWindowState3 */ /* * Details: * "Not more than one window state can be set. If more than one window state is set, only the last one set is valid." */ actionResponse.setWindowState(WindowState.MAXIMIZED); actionResponse.setWindowState(WindowState.NORMAL); renderParams.setValue("tr_setWindow3", "true"); } else if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE4)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setWindowState4 */ /* * Details: * "Throws WindowStateException if the portlet cannot switch to this state." */ TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE4); try { actionResponse.setWindowState(WindowState.UNDEFINED); } catch (WindowStateException e) { result.setTcSuccess(true); result.appendTcDetail(e.toString()); } result.writeTo(writer); } else if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE5)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setWindowState5 */ /* * Details: * "Throws IllegalStateException if the method is invoked after sendRedirect has been called." */ TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE5); try { RenderURL redirectURL = actionResponse.createRedirectURL(MimeResponse.Copy.NONE); actionResponse.sendRedirect(redirectURL.toString()); actionResponse.setWindowState(WindowState.UNDEFINED); } catch (IllegalStateException e) { result.setTcSuccess(true); result.appendTcDetail(e.toString()); } result.writeTo(writer); } else if (action.equals( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE6)) { /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setWindowState6 */ /* * Details: * "Throws IllegalArgumentException if the input parameter is null." */ TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE6); try { actionResponse.setWindowState(null); } catch (IllegalArgumentException e) { result.setTcSuccess(true); result.appendTcDetail(e.toString()); } result.writeTo(writer); } } PortletSession ps = actionRequest.getPortletSession(); ps.setAttribute( Constants.RESULT_ATTR_PREFIX + "RenderStateTests_SPEC2_12_MutableRenderState", writer.toString()); } @Override public void render(RenderRequest portletReq, RenderResponse portletResp) throws PortletException, IOException { portletResp.setContentType("text/html"); PrintWriter writer = portletResp.getWriter(); ModuleTestCaseDetails tcd = new ModuleTestCaseDetails(); RenderParameters renderParams = portletReq.getRenderParameters(); /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_getRenderParameters */ /* * Details: * "Returns an MutableRenderParameters object representing the private and public render parameters." */ { ActionURL actionURL = portletResp.createActionURL(); MutableRenderParameters mutableRenderParams = actionURL .getRenderParameters(); mutableRenderParams.setValue("tr0_private", "true"); mutableRenderParams.setValue("tr0_public", "true"); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_GETRENDERPARAMETERS, actionURL); tb.writeTo(writer); } /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_getRenderParameters2 */ /* * Details: * "The MutableRenderParameters object return by this method is mutable." */ if (renderParams.getValue("tr1") != null && renderParams.getValue("tr1").equals("true")) { TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_GETRENDERPARAMETERS2); result.setTcSuccess(true); result.writeTo(writer); } else { ActionURL actionURL = portletResp.createActionURL(); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_GETRENDERPARAMETERS2, actionURL); tb.writeTo(writer); } /* * TestCase: V3RenderStateTests_SPEC2_12_MutableRenderState_setPortletMode */ /* * Details: * "Sets the portlet mode of a portlet to the given portlet mode." */ if (portletReq.getPortletMode().equals(PortletMode.HELP)) { TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE); result.setTcSuccess(true); result.writeTo(writer); } else { ActionURL actionURL = portletResp.createActionURL(); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE, actionURL); tb.writeTo(writer); } /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setPortletMode2 */ /* * Details: * "A custom portlet mode declared in deployment descriptor could be set." */ if (renderParams.getValue("tr_setPortlet2") == null) { ActionURL actionURL = portletResp.createActionURL(); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE2, actionURL); tb.writeTo(writer); } else { super.render(portletReq, portletResp); } /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setPortletMode3 */ /* * Details: * "Not more than one portlet mode can be set. If more than one portlet mode is set, only the last one set is valid." */ if (portletReq.getPortletMode().equals(PortletMode.HELP)) { TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE3); result.setTcSuccess(true); result.writeTo(writer); } else { ActionURL actionURL = portletResp.createActionURL(); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE3, actionURL); tb.writeTo(writer); } /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setPortletMode4 */ /* * Details: * "Throws PortletModeException if the portlet cannot switch to this mode." */ { ActionURL actionURL = portletResp.createActionURL(); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE4, actionURL); tb.writeTo(writer); } /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setPortletMode5 */ /* * Details: * "Throws IllegalStateException if the method is invoked after sendRedirect has been called." */ { ActionURL actionURL = portletResp.createActionURL(); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE5, actionURL); tb.writeTo(writer); } /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setPortletMode6 */ /* * Details: * "Throws IllegalArgumentException if the input parameter is null." */ { ActionURL actionURL = portletResp.createActionURL(); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE6, actionURL); tb.writeTo(writer); } /* * TestCase: V3RenderStateTests_SPEC2_12_MutableRenderState_setWindowState */ /* * Details: * "Sets the window state of a portlet to the given window state." */ if (renderParams.getValue("tr_setWindow") != null && renderParams.getValue("tr_setWindow").equals("true") && portletReq.getWindowState().equals(WindowState.NORMAL)) { TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE); result.setTcSuccess(true); result.writeTo(writer); } else { ActionURL actionURL = portletResp.createActionURL(); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE, actionURL); tb.writeTo(writer); } /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setWindowState2 */ /* * Details: * "A custom window state declared in deployment descriptor could be set." */ if (renderParams.getValue("tr_setWindow2") == null) { ActionURL actionURL = portletResp.createActionURL(); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE2, actionURL); tb.writeTo(writer); } else { TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE2); result.setTcSuccess(true); result.appendTcDetail("Cannot be tested as it is implementation specific"); result.writeTo(writer); } /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setWindowState3 */ /* * Details: * "Not more than one window state can be set. If more than one window state is set, only the last one set is valid." */ if (renderParams.getValue("tr_setWindow3") != null && renderParams.getValue("tr_setWindow3").equals("true") && portletReq.getWindowState().equals(WindowState.NORMAL)) { TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE3); result.setTcSuccess(true); result.writeTo(writer); } else { ActionURL actionURL = portletResp.createActionURL(); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE3, actionURL); tb.writeTo(writer); } /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setWindowState4 */ /* * Details: * "Throws WindowStateException if the portlet cannot switch to this state." */ { ActionURL actionURL = portletResp.createActionURL(); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE4, actionURL); tb.writeTo(writer); } /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setWindowState5 */ /* * Details: * "Throws IllegalStateException if the method is invoked after sendRedirect has been called." */ { ActionURL actionURL = portletResp.createActionURL(); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE5, actionURL); tb.writeTo(writer); } /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setWindowState6 */ /* * Details: * "Throws IllegalArgumentException if the input parameter is null." */ { ActionURL actionURL = portletResp.createActionURL(); TestButton tb = new TestButton( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETWINDOWSTATE6, actionURL); tb.writeTo(writer); } PortletSession ps = portletReq.getPortletSession(); String msg = (String) ps.getAttribute(Constants.RESULT_ATTR_PREFIX + "RenderStateTests_SPEC2_12_MutableRenderState"); if (msg != null && msg.length() > 0) { writer.write("<p>" + msg + "</p>\n"); } ps.removeAttribute(Constants.RESULT_ATTR_PREFIX + "RenderStateTests_SPEC2_12_MutableRenderState"); } @RenderMode(name = "custom1") public void doView(RenderRequest portletReq, RenderResponse portletResp) throws PortletException, IOException { PrintWriter writer = portletResp.getWriter(); ModuleTestCaseDetails tcd = new ModuleTestCaseDetails(); /* * TestCase: * V3RenderStateTests_SPEC2_12_MutableRenderState_setPortletMode2 */ /* * Details: * "A custom portlet mode declared in deployment descriptor could be set." */ RenderParameters renderParams = portletReq.getRenderParameters(); if (renderParams.getValue("tr_setPortlet2") != null && renderParams.getValue("tr_setPortlet2").equals("true")) { TestResult result = tcd.getTestResultFailed( V3RENDERSTATETESTS_SPEC2_12_MUTABLERENDERSTATE_SETPORTLETMODE2); if (portletReq.getPortletMode().equals(new PortletMode("custom1"))) { result.setTcSuccess(true); } else { result.appendTcDetail( "Failed because portlet mode is not CUSTOM1 but " + portletReq.getPortletMode().toString()); } result.writeTo(writer); } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.util.projectWizard; import com.intellij.ide.IdeBundle; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectBundle; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.projectRoots.ProjectJdkTable; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.projectRoots.SdkType; import com.intellij.openapi.projectRoots.ui.ProjectJdksEditor; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.roots.ui.configuration.ProjectStructureConfigurable; import com.intellij.openapi.roots.ui.configuration.projectRoot.ProjectSdksModel; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.wm.ex.WindowManagerEx; import com.intellij.ui.ScrollPaneFactory; import com.intellij.ui.components.JBList; import com.intellij.util.ArrayUtil; import gnu.trove.TIntArrayList; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.*; public class JdkChooserPanel extends JPanel { private JList myList = null; private DefaultListModel myListModel = null; private Sdk myCurrentJdk; private final Project myProject; private SdkType[] myAllowedJdkTypes = null; public JdkChooserPanel(Project project) { super(new BorderLayout()); myProject = project; myListModel = new DefaultListModel(); myList = new JBList(myListModel); myList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); myList.setCellRenderer(new ProjectJdkListRenderer(myList.getCellRenderer())); //noinspection HardCodedStringLiteral myList.setPrototypeCellValue("XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"); myList.addListSelectionListener(new ListSelectionListener() { public void valueChanged(ListSelectionEvent e) { myCurrentJdk = (Sdk)myList.getSelectedValue(); } }); myList.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { if (e.getClickCount() == 2 && myProject == null) { editJdkTable(); } } }); JPanel panel = new JPanel(new BorderLayout()); panel.add(ScrollPaneFactory.createScrollPane(myList), BorderLayout.CENTER); add(panel, BorderLayout.CENTER); if (myListModel.getSize() > 0) { myList.setSelectedIndex(0); } } /** * Sets the JDK types which may be shown in the panel. * * @param allowedJdkTypes the array of JDK types which may be shown, or null if all JDK types are allowed. * @since 7.0.3 */ public void setAllowedJdkTypes(@Nullable final SdkType[] allowedJdkTypes) { myAllowedJdkTypes = allowedJdkTypes; } public Sdk getChosenJdk() { return myCurrentJdk; } public void editJdkTable() { ProjectJdksEditor editor = new ProjectJdksEditor((Sdk)myList.getSelectedValue(), myProject != null ? myProject : ProjectManager.getInstance().getDefaultProject(), myList); editor.show(); if (editor.isOK()) { Sdk selectedJdk = editor.getSelectedJdk(); updateList(selectedJdk, null); } } public void updateList(final Sdk selectedJdk, final SdkType type) { final int[] selectedIndices = myList.getSelectedIndices(); fillList(type); // restore selection if (selectedJdk != null) { TIntArrayList list = new TIntArrayList(); for (int i = 0; i < myListModel.size(); i++) { final Sdk jdk = (Sdk)myListModel.getElementAt(i); if (Comparing.strEqual(jdk.getName(), selectedJdk.getName())){ list.add(i); } } final int[] indicesToSelect = list.toNativeArray(); if (indicesToSelect.length > 0) { myList.setSelectedIndices(indicesToSelect); } else if (myList.getModel().getSize() > 0) { myList.setSelectedIndex(0); } } else { if (selectedIndices.length > 0) { myList.setSelectedIndices(selectedIndices); } else { myList.setSelectedIndex(0); } } myCurrentJdk = (Sdk)myList.getSelectedValue(); } public JList getPreferredFocusedComponent() { return myList; } public void fillList(final SdkType type) { myListModel.clear(); final Sdk[] jdks; if (myProject == null) { final Sdk[] allJdks = ProjectJdkTable.getInstance().getAllJdks(); jdks = getCompatibleJdks(type, Arrays.asList(allJdks)); } else { final ProjectSdksModel projectJdksModel = ProjectStructureConfigurable.getInstance(myProject).getProjectJdksModel(); if (!projectJdksModel.isInitialized()){ //should be initialized projectJdksModel.reset(myProject); } final Collection<Sdk> collection = projectJdksModel.getProjectSdks().values(); jdks = getCompatibleJdks(type, collection); } Arrays.sort(jdks, new Comparator<Sdk>() { public int compare(final Sdk o1, final Sdk o2) { return o1.getName().compareToIgnoreCase(o2.getName()); } }); for (Sdk jdk : jdks) { myListModel.addElement(jdk); } } private Sdk[] getCompatibleJdks(final SdkType type, final Collection<Sdk> collection) { final Set<Sdk> compatibleJdks = new HashSet<Sdk>(); for (Sdk projectJdk : collection) { if (isCompatibleJdk(projectJdk, type)) { compatibleJdks.add(projectJdk); } } return compatibleJdks.toArray(new Sdk[compatibleJdks.size()]); } private boolean isCompatibleJdk(final Sdk projectJdk, final SdkType type) { if (type != null) { return projectJdk.getSdkType() == type; } if (myAllowedJdkTypes != null) { return ArrayUtil.indexOf(myAllowedJdkTypes, projectJdk.getSdkType()) >= 0; } return true; } public JComponent getDefaultFocusedComponent() { return myList; } public void selectJdk(Sdk defaultJdk) { final int index = myListModel.indexOf(defaultJdk); if (index >= 0) { myList.setSelectedIndex(index); } } private static Sdk showDialog(final Project project, String title, final Component parent, Sdk jdkToSelect) { final JdkChooserPanel jdkChooserPanel = new JdkChooserPanel(project); jdkChooserPanel.fillList(null); final MyDialog dialog = jdkChooserPanel.new MyDialog(parent); if (title != null) { dialog.setTitle(title); } if (jdkToSelect != null) { jdkChooserPanel.selectJdk(jdkToSelect); } jdkChooserPanel.myList.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(final MouseEvent e) { if (e.getClickCount() == 2 && e.getButton() == MouseEvent.BUTTON1) { dialog.clickDefaultButton(); } } }); dialog.show(); return dialog.isOK() ? jdkChooserPanel.getChosenJdk() : null; } public static Sdk chooseAndSetJDK(final Project project) { final Sdk projectJdk = ProjectRootManager.getInstance(project).getProjectSdk(); final Sdk jdk = showDialog(project, ProjectBundle.message("module.libraries.target.jdk.select.title"), WindowManagerEx.getInstanceEx().getFrame(project), projectJdk); if (jdk == null) { return null; } ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { ProjectRootManager.getInstance(project).setProjectSdk(jdk); } }); return jdk; } public class MyDialog extends DialogWrapper implements ListSelectionListener { public MyDialog(Component parent) { super(parent, true); setTitle(IdeBundle.message("title.select.jdk")); init(); myList.addListSelectionListener(this); updateOkButton(); } protected String getDimensionServiceKey() { return "#com.intellij.ide.util.projectWizard.JdkChooserPanel.MyDialog"; } public void valueChanged(ListSelectionEvent e) { updateOkButton(); } private void updateOkButton() { setOKActionEnabled(myList.getSelectedValue() != null); } public void dispose() { myList.removeListSelectionListener(this); super.dispose(); } protected JComponent createCenterPanel() { return JdkChooserPanel.this; } protected Action[] createActions() { return new Action[]{new ConfigureAction(), getOKAction(), getCancelAction()}; } public JComponent getPreferredFocusedComponent() { return myList; } private final class ConfigureAction extends AbstractAction { public ConfigureAction() { super(IdeBundle.message("button.configure.e")); putValue(Action.MNEMONIC_KEY, new Integer('E')); } public void actionPerformed(ActionEvent e) { editJdkTable(); } } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.roots.ui.configuration; import com.intellij.core.JavaCoreBundle; import com.intellij.icons.AllIcons; import com.intellij.ide.impl.ProjectUtil; import com.intellij.ide.util.BrowseFilesListener; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.fileChooser.FileChooserFactory; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectBundle; import com.intellij.openapi.project.ex.ProjectEx; import com.intellij.openapi.projectRoots.JavaSdk; import com.intellij.openapi.projectRoots.JavaSdkVersion; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.CompilerProjectExtension; import com.intellij.openapi.roots.LanguageLevelProjectExtension; import com.intellij.openapi.roots.ModifiableRootModel; import com.intellij.openapi.roots.impl.LanguageLevelProjectExtensionImpl; import com.intellij.openapi.roots.ui.configuration.projectRoot.ProjectSdksModel; import com.intellij.openapi.roots.ui.configuration.projectRoot.ProjectStructureElementConfigurable; import com.intellij.openapi.roots.ui.configuration.projectRoot.StructureConfigurableContext; import com.intellij.openapi.roots.ui.configuration.projectRoot.daemon.ProjectStructureDaemonAnalyzer; import com.intellij.openapi.roots.ui.configuration.projectRoot.daemon.ProjectStructureElement; import com.intellij.openapi.ui.DetailsComponent; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.EmptyRunnable; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.pom.java.LanguageLevel; import com.intellij.ui.DocumentAdapter; import com.intellij.ui.FieldPanel; import com.intellij.ui.InsertPathAction; import com.intellij.util.ui.JBUI; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.EmptyBorder; import javax.swing.event.DocumentEvent; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.IOException; /** * @author Eugene Zhuravlev * Date: Dec 15, 2003 */ public class ProjectConfigurable extends ProjectStructureElementConfigurable<Project> implements DetailsComponent.Facade { private final Project myProject; private LanguageLevelCombo myLanguageLevelCombo; private ProjectJdkConfigurable myProjectJdkConfigurable; private FieldPanel myProjectCompilerOutput; private JTextField myProjectName; private JPanel myPanel; private final StructureConfigurableContext myContext; private final ModulesConfigurator myModulesConfigurator; private JPanel myWholePanel; private boolean myFreeze = false; private DetailsComponent myDetailsComponent; private final GeneralProjectSettingsElement mySettingsElement; public ProjectConfigurable(Project project, final StructureConfigurableContext context, ModulesConfigurator configurator, ProjectSdksModel model) { myProject = project; myContext = context; myModulesConfigurator = configurator; mySettingsElement = new GeneralProjectSettingsElement(context); final ProjectStructureDaemonAnalyzer daemonAnalyzer = context.getDaemonAnalyzer(); myModulesConfigurator.addAllModuleChangeListener(new ModuleEditor.ChangeListener() { @Override public void moduleStateChanged(ModifiableRootModel moduleRootModel) { daemonAnalyzer.queueUpdate(mySettingsElement); } }); init(model); } @Override public ProjectStructureElement getProjectStructureElement() { return mySettingsElement; } @Override public DetailsComponent getDetailsComponent() { return myDetailsComponent; } @Override public JComponent createOptionsPanel() { myDetailsComponent = new DetailsComponent(false, false); myDetailsComponent.setContent(myPanel); myDetailsComponent.setText(getBannerSlogan()); myProjectJdkConfigurable.createComponent(); //reload changed jdks return myDetailsComponent.getComponent(); } private void init(final ProjectSdksModel model) { myPanel = new JPanel(new GridBagLayout()); myPanel.setPreferredSize(JBUI.size(700, 500)); if (ProjectUtil.isDirectoryBased(myProject)) { final JPanel namePanel = new JPanel(new BorderLayout()); final JLabel label = new JLabel("<html><body><b>Project name:</b></body></html>", SwingConstants.LEFT); namePanel.add(label, BorderLayout.NORTH); myProjectName = new JTextField(); myProjectName.setColumns(40); final JPanel nameFieldPanel = new JPanel(); nameFieldPanel.setLayout(new BoxLayout(nameFieldPanel, BoxLayout.X_AXIS)); nameFieldPanel.add(Box.createHorizontalStrut(4)); nameFieldPanel.add(myProjectName); namePanel.add(nameFieldPanel, BorderLayout.CENTER); final JPanel wrapper = new JPanel(new FlowLayout(FlowLayout.LEFT, 0, 0)); wrapper.add(namePanel); wrapper.setAlignmentX(0); myPanel.add(wrapper, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, JBUI.insets(4, 0, 10, 0), 0, 0)); } myProjectJdkConfigurable = new ProjectJdkConfigurable(myProject, model); myPanel.add(myProjectJdkConfigurable.createComponent(), new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, JBUI.insetsTop(4), 0, 0)); myPanel.add(myWholePanel, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, JBUI.insetsTop(4), 0, 0)); myPanel.setBorder(new EmptyBorder(0, 10, 0, 10)); myProjectCompilerOutput.getTextField().getDocument().addDocumentListener(new DocumentAdapter() { @Override protected void textChanged(DocumentEvent e) { if (myFreeze) return; myModulesConfigurator.processModuleCompilerOutputChanged(getCompilerOutputUrl()); } }); myProjectJdkConfigurable.addChangeListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { myLanguageLevelCombo.sdkUpdated(myProjectJdkConfigurable.getSelectedProjectJdk(), myProject.isDefault()); LanguageLevelProjectExtensionImpl.getInstanceImpl(myProject).setCurrentLevel(myLanguageLevelCombo.getSelectedLevel()); } }); myLanguageLevelCombo.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { LanguageLevelProjectExtensionImpl.getInstanceImpl(myProject).setCurrentLevel(myLanguageLevelCombo.getSelectedLevel()); } }); } @Override public void disposeUIResources() { if (myProjectJdkConfigurable != null) { myProjectJdkConfigurable.disposeUIResources(); } } @Override public void reset() { myFreeze = true; try { myProjectJdkConfigurable.reset(); final String compilerOutput = getOriginalCompilerOutputUrl(); if (compilerOutput != null) { myProjectCompilerOutput.setText(FileUtil.toSystemDependentName(VfsUtilCore.urlToPath(compilerOutput))); } myLanguageLevelCombo.reset(myProject); if (myProjectName != null) { myProjectName.setText(myProject.getName()); } } finally { myFreeze = false; } myContext.getDaemonAnalyzer().queueUpdate(mySettingsElement); } @Override public void apply() throws ConfigurationException { final CompilerProjectExtension compilerProjectExtension = CompilerProjectExtension.getInstance(myProject); assert compilerProjectExtension != null : myProject; if (myProjectName != null && StringUtil.isEmptyOrSpaces(myProjectName.getText())) { throw new ConfigurationException("Please, specify project name!"); } ApplicationManager.getApplication().runWriteAction(() -> { // set the output path first so that handlers of RootsChanged event sent after JDK is set // would see the updated path String canonicalPath = myProjectCompilerOutput.getText(); if (canonicalPath != null && canonicalPath.length() > 0) { try { canonicalPath = FileUtil.resolveShortWindowsName(canonicalPath); } catch (IOException e) { //file doesn't exist yet } canonicalPath = FileUtil.toSystemIndependentName(canonicalPath); compilerProjectExtension.setCompilerOutputUrl(VfsUtilCore.pathToUrl(canonicalPath)); } else { compilerProjectExtension.setCompilerOutputPointer(null); } LanguageLevelProjectExtension extension = LanguageLevelProjectExtension.getInstance(myProject); LanguageLevel level = myLanguageLevelCombo.getSelectedLevel(); if (level != null) { extension.setLanguageLevel(level); } extension.setDefault(myLanguageLevelCombo.isDefault()); myProjectJdkConfigurable.apply(); if (myProjectName != null) { ((ProjectEx)myProject).setProjectName(getProjectName()); if (myDetailsComponent != null) myDetailsComponent.setText(getBannerSlogan()); } }); } @Override public void setDisplayName(final String name) { //do nothing } @Override public Project getEditableObject() { return myProject; } @Override public String getBannerSlogan() { return ProjectBundle.message("project.roots.project.banner.text", myProject.getName()); } @Override public String getDisplayName() { return ProjectBundle.message("project.roots.project.display.name"); } @Override public Icon getIcon(boolean open) { return AllIcons.Nodes.Project; } @Override @Nullable @NonNls public String getHelpTopic() { return "reference.settingsdialog.project.structure.general"; } @Override @SuppressWarnings({"SimplifiableIfStatement"}) public boolean isModified() { LanguageLevelProjectExtension extension = LanguageLevelProjectExtension.getInstance(myProject); if (extension.isDefault() != myLanguageLevelCombo.isDefault() || !extension.isDefault() && !extension.getLanguageLevel().equals(myLanguageLevelCombo.getSelectedLevel())) { return true; } final String compilerOutput = getOriginalCompilerOutputUrl(); if (!Comparing.strEqual(FileUtil.toSystemIndependentName(VfsUtilCore.urlToPath(compilerOutput)), FileUtil.toSystemIndependentName(myProjectCompilerOutput.getText()))) return true; if (myProjectJdkConfigurable.isModified()) return true; if (!getProjectName().equals(myProject.getName())) return true; return false; } @NotNull public String getProjectName() { return myProjectName != null ? myProjectName.getText().trim() : myProject.getName(); } @Nullable private String getOriginalCompilerOutputUrl() { final CompilerProjectExtension extension = CompilerProjectExtension.getInstance(myProject); return extension != null ? extension.getCompilerOutputUrl() : null; } private void createUIComponents() { myLanguageLevelCombo = new LanguageLevelCombo(JavaCoreBundle.message("default.language.level.description")) { @Override protected LanguageLevel getDefaultLevel() { Sdk sdk = myProjectJdkConfigurable.getSelectedProjectJdk(); if (sdk == null) return null; JavaSdkVersion version = JavaSdk.getInstance().getVersion(sdk); return version == null ? null : version.getMaxLanguageLevel(); } }; final JTextField textField = new JTextField(); final FileChooserDescriptor outputPathsChooserDescriptor = FileChooserDescriptorFactory.createSingleFolderDescriptor(); InsertPathAction.addTo(textField, outputPathsChooserDescriptor); outputPathsChooserDescriptor.setHideIgnored(false); BrowseFilesListener listener = new BrowseFilesListener(textField, "", ProjectBundle.message("project.compiler.output"), outputPathsChooserDescriptor); myProjectCompilerOutput = new FieldPanel(textField, null, null, listener, EmptyRunnable.getInstance()); FileChooserFactory.getInstance().installFileCompletion(myProjectCompilerOutput.getTextField(), outputPathsChooserDescriptor, true, null); } public String getCompilerOutputUrl() { return VfsUtilCore.pathToUrl(myProjectCompilerOutput.getText().trim()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.server.lookup.namespace; import com.google.common.base.Strings; import org.apache.druid.data.input.MapPopulator; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.JodaUtils; import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.query.lookup.namespace.CacheGenerator; import org.apache.druid.query.lookup.namespace.JdbcExtractionNamespace; import org.apache.druid.server.lookup.namespace.cache.CacheScheduler; import org.apache.druid.utils.JvmUtils; import org.skife.jdbi.v2.DBI; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.ResultIterator; import org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException; import org.skife.jdbi.v2.util.TimestampMapper; import javax.annotation.Nullable; import java.sql.Timestamp; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; /** * */ public final class JdbcCacheGenerator implements CacheGenerator<JdbcExtractionNamespace> { private static final Logger LOG = new Logger(JdbcCacheGenerator.class); private static final String NO_SUITABLE_DRIVER_FOUND_ERROR = "No suitable driver found"; private static final String JDBC_DRIVER_JAR_FILES_MISSING_ERROR = "JDBC driver JAR files missing from extensions/druid-lookups-cached-global directory"; private static final long MAX_MEMORY = JvmUtils.getRuntimeInfo().getMaxHeapSizeBytes(); private final ConcurrentMap<CacheScheduler.EntryImpl<JdbcExtractionNamespace>, DBI> dbiCache = new ConcurrentHashMap<>(); @Override @Nullable public CacheScheduler.VersionedCache generateCache( final JdbcExtractionNamespace namespace, final CacheScheduler.EntryImpl<JdbcExtractionNamespace> entryId, final String lastVersion, final CacheScheduler scheduler ) { final long lastCheck = lastVersion == null ? JodaUtils.MIN_INSTANT : Long.parseLong(lastVersion); final Long lastDBUpdate; final long dbQueryStart; try { lastDBUpdate = lastUpdates(entryId, namespace); if (lastDBUpdate != null && lastDBUpdate <= lastCheck) { return null; } } catch (UnableToObtainConnectionException e) { if (e.getMessage().contains(NO_SUITABLE_DRIVER_FOUND_ERROR)) { throw new ISE( e, JDBC_DRIVER_JAR_FILES_MISSING_ERROR ); } else { throw e; } } dbQueryStart = System.currentTimeMillis(); LOG.debug("Updating %s", entryId); final String newVersion; if (lastDBUpdate != null) { newVersion = lastDBUpdate.toString(); } else { newVersion = StringUtils.format("%d", dbQueryStart); } final CacheScheduler.VersionedCache versionedCache = scheduler.createVersionedCache(entryId, newVersion); final long startNs = System.nanoTime(); try ( Handle handle = getHandle(entryId, namespace); ResultIterator<Pair<String, String>> pairs = getLookupPairs(handle, namespace)) { final Map<String, String> cache = versionedCache.getCache(); final MapPopulator.PopulateResult populateResult = MapPopulator.populateAndWarnAtByteLimit( pairs, cache, (long) (MAX_MEMORY * namespace.getMaxHeapPercentage() / 100.0), null == entryId ? null : entryId.toString() ); final long duration = System.nanoTime() - startNs; LOG.info( "Finished loading %,d values (%d bytes) for [%s] in %,d ns", populateResult.getEntries(), populateResult.getBytes(), entryId, duration ); return versionedCache; } catch (UnableToObtainConnectionException e) { if (e.getMessage().contains(NO_SUITABLE_DRIVER_FOUND_ERROR)) { throw new ISE( e, JDBC_DRIVER_JAR_FILES_MISSING_ERROR ); } else { throw e; } } catch (Throwable t) { try { versionedCache.close(); } catch (Exception e) { t.addSuppressed(e); } throw t; } } private Handle getHandle( final CacheScheduler.EntryImpl<JdbcExtractionNamespace> key, final JdbcExtractionNamespace namespace ) { final DBI dbi = ensureDBI(key, namespace); return dbi.open(); } private ResultIterator<Pair<String, String>> getLookupPairs( final Handle handle, final JdbcExtractionNamespace namespace ) { final String table = namespace.getTable(); final String filter = namespace.getFilter(); final String valueColumn = namespace.getValueColumn(); final String keyColumn = namespace.getKeyColumn(); return handle.createQuery(buildLookupQuery(table, filter, keyColumn, valueColumn)) .map((index1, r1, ctx1) -> new Pair<>(r1.getString(keyColumn), r1.getString(valueColumn))) .iterator(); } private static String buildLookupQuery(String table, String filter, String keyColumn, String valueColumn) { if (Strings.isNullOrEmpty(filter)) { return StringUtils.format( "SELECT %s, %s FROM %s WHERE %s IS NOT NULL", keyColumn, valueColumn, table, valueColumn ); } return StringUtils.format( "SELECT %s, %s FROM %s WHERE %s AND %s IS NOT NULL", keyColumn, valueColumn, table, filter, valueColumn ); } private DBI ensureDBI(CacheScheduler.EntryImpl<JdbcExtractionNamespace> key, JdbcExtractionNamespace namespace) { DBI dbi = null; if (dbiCache.containsKey(key)) { dbi = dbiCache.get(key); } if (dbi == null) { final DBI newDbi = new DBI( namespace.getConnectorConfig().getConnectURI(), namespace.getConnectorConfig().getUser(), namespace.getConnectorConfig().getPassword() ); dbiCache.putIfAbsent(key, newDbi); dbi = dbiCache.get(key); } return dbi; } @Nullable private Long lastUpdates(CacheScheduler.EntryImpl<JdbcExtractionNamespace> key, JdbcExtractionNamespace namespace) { final DBI dbi = ensureDBI(key, namespace); final String table = namespace.getTable(); final String tsColumn = namespace.getTsColumn(); if (tsColumn == null) { return null; } final Timestamp update = dbi.withHandle( handle -> { final String query = StringUtils.format( "SELECT MAX(%s) FROM %s", tsColumn, table ); return handle .createQuery(query) .map(TimestampMapper.FIRST) .first(); } ); return update.getTime(); } }
/* * Copyright (c) 1995, 2010, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package java.net; import java.io.FileDescriptor; import java.io.FileInputStream; import java.io.IOException; import java.nio.channels.FileChannel; import sun.net.ConnectionResetException; /** * This stream extends FileInputStream to implement a * SocketInputStream. Note that this class should <b>NOT</b> be * public. * * @author Jonathan Payne * @author Arthur van Hoff */ class SocketInputStream extends FileInputStream { static { init(); } private boolean eof; private AbstractPlainSocketImpl impl = null; private byte temp[]; private Socket socket = null; /** * Creates a new SocketInputStream. Can only be called * by a Socket. This method needs to hang on to the owner Socket so * that the fd will not be closed. * @param impl the implemented socket input stream */ SocketInputStream(AbstractPlainSocketImpl impl) throws IOException { super(impl.getFileDescriptor()); this.impl = impl; socket = impl.getSocket(); } /** * Returns the unique {@link java.nio.channels.FileChannel FileChannel} * object associated with this file input stream.</p> * * The <code>getChannel</code> method of <code>SocketInputStream</code> * returns <code>null</code> since it is a socket based stream.</p> * * @return the file channel associated with this file input stream * * @since 1.4 * @spec JSR-51 */ public final FileChannel getChannel() { return null; } /** * Reads into an array of bytes at the specified offset using * the received socket primitive. * @param fd the FileDescriptor * @param b the buffer into which the data is read * @param off the start offset of the data * @param len the maximum number of bytes read * @param timeout the read timeout in ms * @return the actual number of bytes read, -1 is * returned when the end of the stream is reached. * @exception IOException If an I/O error has occurred. */ private native int socketRead0(FileDescriptor fd, byte b[], int off, int len, int timeout) throws IOException; /** * Reads into a byte array data from the socket. * @param b the buffer into which the data is read * @return the actual number of bytes read, -1 is * returned when the end of the stream is reached. * @exception IOException If an I/O error has occurred. */ public int read(byte b[]) throws IOException { return read(b, 0, b.length); } /** * Reads into a byte array <i>b</i> at offset <i>off</i>, * <i>length</i> bytes of data. * @param b the buffer into which the data is read * @param off the start offset of the data * @param len the maximum number of bytes read * @return the actual number of bytes read, -1 is * returned when the end of the stream is reached. * @exception IOException If an I/O error has occurred. */ public int read(byte b[], int off, int length) throws IOException { return read(b, off, length, impl.getTimeout()); } int read(byte b[], int off, int length, int timeout) throws IOException { int n; // EOF already encountered if (eof) { return -1; } // connection reset if (impl.isConnectionReset()) { throw new SocketException("Connection reset"); } // bounds check if (length <= 0 || off < 0 || off + length > b.length) { if (length == 0) { return 0; } throw new ArrayIndexOutOfBoundsException(); } boolean gotReset = false; // acquire file descriptor and do the read FileDescriptor fd = impl.acquireFD(); try { n = socketRead0(fd, b, off, length, timeout); if (n > 0) { return n; } } catch (ConnectionResetException rstExc) { gotReset = true; } finally { impl.releaseFD(); } /* * We receive a "connection reset" but there may be bytes still * buffered on the socket */ if (gotReset) { impl.setConnectionResetPending(); impl.acquireFD(); try { n = socketRead0(fd, b, off, length, timeout); if (n > 0) { return n; } } catch (ConnectionResetException rstExc) { } finally { impl.releaseFD(); } } /* * If we get here we are at EOF, the socket has been closed, * or the connection has been reset. */ if (impl.isClosedOrPending()) { throw new SocketException("Socket closed"); } if (impl.isConnectionResetPending()) { impl.setConnectionReset(); } if (impl.isConnectionReset()) { throw new SocketException("Connection reset"); } eof = true; return -1; } /** * Reads a single byte from the socket. */ public int read() throws IOException { if (eof) { return -1; } temp = new byte[1]; int n = read(temp, 0, 1); if (n <= 0) { return -1; } return temp[0] & 0xff; } /** * Skips n bytes of input. * @param n the number of bytes to skip * @return the actual number of bytes skipped. * @exception IOException If an I/O error has occurred. */ public long skip(long numbytes) throws IOException { if (numbytes <= 0) { return 0; } long n = numbytes; int buflen = (int) Math.min(1024, n); byte data[] = new byte[buflen]; while (n > 0) { int r = read(data, 0, (int) Math.min((long) buflen, n)); if (r < 0) { break; } n -= r; } return numbytes - n; } /** * Returns the number of bytes that can be read without blocking. * @return the number of immediately available bytes */ public int available() throws IOException { return impl.available(); } /** * Closes the stream. */ private boolean closing = false; public void close() throws IOException { // Prevent recursion. See BugId 4484411 if (closing) return; closing = true; if (socket != null) { if (!socket.isClosed()) socket.close(); } else impl.close(); closing = false; } void setEOF(boolean eof) { this.eof = eof; } /** * Overrides finalize, the fd is closed by the Socket. */ protected void finalize() {} /** * Perform class load-time initializations. */ private native static void init(); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.coders; import com.google.common.collect.ImmutableList; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.Serializable; import java.lang.reflect.Method; import java.util.List; import javax.annotation.Nullable; import org.apache.beam.sdk.PipelineRunner; import org.apache.beam.sdk.values.TypeDescriptor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A {@link Coder} for Java classes that implement {@link Serializable}. * * <p>To use, specify the coder type on a PCollection: * * <pre>{@code * PCollection<MyRecord> records = * foo.apply(...).setCoder(SerializableCoder.of(MyRecord.class)); * }</pre> * * <p>{@link SerializableCoder} does not guarantee a deterministic encoding, as Java serialization * may produce different binary encodings for two equivalent objects. * * @param <T> the type of elements handled by this coder */ public class SerializableCoder<T extends Serializable> extends CustomCoder<T> { private static final Logger LOG = LoggerFactory.getLogger(SerializableCoder.class); /** * Returns a {@link SerializableCoder} instance for the provided element type. * * @param <T> the element type */ public static <T extends Serializable> SerializableCoder<T> of(TypeDescriptor<T> type) { @SuppressWarnings("unchecked") Class<T> clazz = (Class<T>) type.getRawType(); return new SerializableCoder<>(clazz, type); } @Override public boolean consistentWithEquals() { return false; } /** * The structural value of the object is the object itself. The {@link SerializableCoder} should * be only used for objects with a proper {@link Object#equals} implementation. */ @Override public Object structuralValue(T value) { return value; } /** * Returns a {@link SerializableCoder} instance for the provided element class. * * @param <T> the element type */ public static <T extends Serializable> SerializableCoder<T> of(Class<T> clazz) { checkEqualsMethodDefined(clazz); return new SerializableCoder<>(clazz, TypeDescriptor.of(clazz)); } private static <T extends Serializable> void checkEqualsMethodDefined(Class<T> clazz) { boolean warn = clazz.isInterface(); if (!warn) { Method method; try { method = clazz.getMethod("equals", Object.class); } catch (NoSuchMethodException e) { // All concrete classes have an equals method declared in their class hierarchy. throw new AssertionError(String.format("Concrete class %s has no equals method", clazz)); } // Check if not default Object#equals implementation. warn = Object.class.equals(method.getDeclaringClass()); } if (warn) { LOG.warn( "Can't verify serialized elements of type {} have well defined equals method. " + "This may produce incorrect results on some {}", clazz.getSimpleName(), PipelineRunner.class.getSimpleName()); } } /** * Returns a {@link CoderProvider} which uses the {@link SerializableCoder} if possible for all * types. * * <p>This method is invoked reflectively from {@link DefaultCoder}. */ @SuppressWarnings("unused") public static CoderProvider getCoderProvider() { return new SerializableCoderProvider(); } /** * A {@link CoderProviderRegistrar} which registers a {@link CoderProvider} which can handle * serializable types. */ public static class SerializableCoderProviderRegistrar implements CoderProviderRegistrar { @Override public List<CoderProvider> getCoderProviders() { return ImmutableList.of(getCoderProvider()); } } /** * A {@link CoderProvider} that constructs a {@link SerializableCoder} for any class that * implements serializable. */ static class SerializableCoderProvider extends CoderProvider { @Override public <T> Coder<T> coderFor( TypeDescriptor<T> typeDescriptor, List<? extends Coder<?>> componentCoders) throws CannotProvideCoderException { if (Serializable.class.isAssignableFrom(typeDescriptor.getRawType())) { return SerializableCoder.of((TypeDescriptor) typeDescriptor); } throw new CannotProvideCoderException( "Cannot provide SerializableCoder because " + typeDescriptor + " does not implement Serializable"); } } private final Class<T> type; /** Access via {@link #getEncodedTypeDescriptor()}. */ @Nullable private transient TypeDescriptor<T> typeDescriptor; protected SerializableCoder(Class<T> type, TypeDescriptor<T> typeDescriptor) { this.type = type; this.typeDescriptor = typeDescriptor; } public Class<T> getRecordType() { return type; } @Override public void encode(T value, OutputStream outStream) throws IOException { ObjectOutputStream oos = new ObjectOutputStream(outStream); oos.writeObject(value); oos.flush(); } @Override public T decode(InputStream inStream) throws IOException, CoderException { try { ObjectInputStream ois = new ObjectInputStream(inStream); return type.cast(ois.readObject()); } catch (ClassNotFoundException e) { throw new CoderException("unable to deserialize record", e); } } /** * {@inheritDoc} * * @throws NonDeterministicException always. Java serialization is not deterministic with respect * to {@link Object#equals} for all types. */ @Override public void verifyDeterministic() throws NonDeterministicException { throw new NonDeterministicException(this, "Java Serialization may be non-deterministic."); } @Override public boolean equals(Object other) { return !(other == null || getClass() != other.getClass()) && type == ((SerializableCoder<?>) other).type; } @Override public int hashCode() { return type.hashCode(); } @Override public TypeDescriptor<T> getEncodedTypeDescriptor() { if (typeDescriptor == null) { typeDescriptor = TypeDescriptor.of(type); } return typeDescriptor; } // This coder inherits isRegisterByteSizeObserverCheap, // getEncodedElementByteSize and registerByteSizeObserver // from StructuredCoder. Looks like we cannot do much better // in this case. }
package com.thebluealliance.androidclient.database; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.database.sqlite.SQLiteQueryBuilder; import androidx.annotation.VisibleForTesting; import androidx.annotation.WorkerThread; import com.google.gson.Gson; import com.thebluealliance.androidclient.TbaLogger; import com.thebluealliance.androidclient.database.tables.AwardsTable; import com.thebluealliance.androidclient.database.tables.DistrictTeamsTable; import com.thebluealliance.androidclient.database.tables.DistrictsTable; import com.thebluealliance.androidclient.database.tables.EventDetailsTable; import com.thebluealliance.androidclient.database.tables.EventTeamsTable; import com.thebluealliance.androidclient.database.tables.EventsTable; import com.thebluealliance.androidclient.database.tables.FavoritesTable; import com.thebluealliance.androidclient.database.tables.MatchesTable; import com.thebluealliance.androidclient.database.tables.MediasTable; import com.thebluealliance.androidclient.database.tables.NotificationsTable; import com.thebluealliance.androidclient.database.tables.SubscriptionsTable; import com.thebluealliance.androidclient.database.tables.TeamsTable; //SUPPRESS CHECKSTYLE FinalClass public class Database extends SQLiteOpenHelper { public static final String ALL_TEAMS_LOADED_TO_DATABASE_FOR_PAGE = "all_teams_loaded_for_page_"; public static final String ALL_EVENTS_LOADED_TO_DATABASE_FOR_YEAR = "all_events_loaded_for_year_"; public static final String ALL_DISTRICTS_LOADED_TO_DATABASE_FOR_YEAR = "all_districts_loaded_for_year_"; static final int DATABASE_VERSION = 36; private static final String DATABASE_NAME = "the-blue-alliance-android-database"; static final @Deprecated String TABLE_API = "api"; public static final String TABLE_TEAMS = "teams", TABLE_EVENTS = "events", TABLE_AWARDS = "awards", TABLE_MATCHES = "matches", TABLE_MEDIAS = "medias", TABLE_EVENTTEAMS = "eventTeams", TABLE_DISTRICTS = "districts", TABLE_DISTRICTTEAMS = "districtTeams", TABLE_EVENTDETAILS = "eventDetails", TABLE_FAVORITES = "favorites", TABLE_SUBSCRIPTIONS = "subscriptions", TABLE_SEARCH = "search", TABLE_SEARCH_TEAMS = "search_teams", TABLE_SEARCH_EVENTS = "search_events", TABLE_NOTIFICATIONS = "notifications"; public static final String CREATE_TEAMS = "CREATE TABLE IF NOT EXISTS " + TABLE_TEAMS + "(" + TeamsTable.KEY + " TEXT PRIMARY KEY NOT NULL, " + TeamsTable.NUMBER + " INTEGER NOT NULL, " + TeamsTable.NAME + " TEXT DEFAULT '', " + TeamsTable.SHORTNAME + " TEXT DEFAULT '', " + TeamsTable.LOCATION + " TEXT DEFAULT '', " + TeamsTable.ADDRESS + " TEXT DEFAULT '', " + TeamsTable.LOCATION_NAME + " TEXT DEFAULT '', " + TeamsTable.WEBSITE + " TEXT DEFAULT '', " + TeamsTable.YEARS_PARTICIPATED + " TEXT DEFAULT '', " + TeamsTable.MOTTO + " TEXT DEFAULT '', " + TeamsTable.LAST_MODIFIED + " TIMESTAMP" + ")"; public static final String CREATE_EVENTS = "CREATE TABLE IF NOT EXISTS " + TABLE_EVENTS + "(" + EventsTable.KEY + " TEXT PRIMARY KEY NOT NULL, " + EventsTable.YEAR + " INTEGER NOT NULL, " + EventsTable.NAME + " TEXT DEFAULT '', " + EventsTable.SHORTNAME + " TEXT DEFAULT '', " + EventsTable.LOCATION + " TEXT DEFAULT '', " + EventsTable.CITY + " TEXT DEFAULT '', " + EventsTable.VENUE + " TEXT DEFAULT '', " + EventsTable.ADDRESS + " TEXT DEFAULT '', " + EventsTable.TYPE + " INTEGER DEFAULT -1, " + EventsTable.START + " TIMESTAMP, " + EventsTable.END + " TIMESTAMP, " + EventsTable.WEEK + " INTEGER DEFAULT -1, " + EventsTable.WEBCASTS + " TEXT DEFAULT '', " + EventsTable.WEBSITE + " TEXT DEFAULT '', " + EventsTable.DISTRICT_KEY + " TEXT DEFAULT '', " + EventsTable.LAST_MODIFIED + " TIMESTAMP" + ")"; public static final String CREATE_AWARDS = "CREATE TABLE IF NOT EXISTS " + TABLE_AWARDS + "(" + AwardsTable.KEY + " TEXT PRIMARY KEY NOT NULL, " + AwardsTable.ENUM + " INTEGER DEFAULT -1, " + AwardsTable.EVENTKEY + " TEXT DEFAULT '', " + AwardsTable.NAME + " TEXT DEFAULT '', " + AwardsTable.YEAR + " INTEGER DEFAULT -1, " + AwardsTable.WINNERS + " TEXT DEFAULT '', " + AwardsTable.LAST_MODIFIED + " TIMESTAMP" + ")"; public static final String CREATE_MATCHES = "CREATE TABLE IF NOT EXISTS " + TABLE_MATCHES + "(" + MatchesTable.KEY + " TEXT PRIMARY KEY NOT NULL, " + MatchesTable.SETNUM + " INTEGER DEFAULT -1," + MatchesTable.MATCHNUM + " INTEGER DEFAULT -1," + MatchesTable.EVENT + " TEXT DEFAULT '', " + MatchesTable.TIME + " TIMESTAMP, " + MatchesTable.ALLIANCES + " TEXT DEFAULT '', " + MatchesTable.WINNER + " TEXT DEFAULT '', " + MatchesTable.VIDEOS + " TEXT DEFAULT '', " + MatchesTable.BREAKDOWN + " TEXT DEFAULT '', " + MatchesTable.LAST_MODIFIED + " TIMESTAMP" + ")"; public static final String CREATE_MEDIAS = "CREATE TABLE IF NOT EXISTS " + TABLE_MEDIAS + "(" + MediasTable.TYPE + " TEXT DEFAULT '', " + MediasTable.FOREIGNKEY + " TEXT DEFAULT '', " + MediasTable.TEAMKEY + " TEXT DEFAULT '', " + MediasTable.DETAILS + " TEXT DEFAULT '', " + MediasTable.B64_IMAGE + " TEXT DEFAULT '', " + MediasTable.YEAR + " INTEGER DEFAULT -1, " + MediasTable.LAST_MODIFIED + " TIMESTAMP" + ")"; public static final String CREATE_EVENTTEAMS = "CREATE TABLE IF NOT EXISTS " + TABLE_EVENTTEAMS + "(" + EventTeamsTable.KEY + " TEXT PRIMARY KEY NOT NULL, " + EventTeamsTable.TEAMKEY + " TEXT DEFAULT '', " + EventTeamsTable.EVENTKEY + " TEXT DEFAULT '', " + EventTeamsTable.YEAR + " INTEGER DEFAULT -1, " + EventTeamsTable.STATUS + " TEXT DEFAULT '', " + EventTeamsTable.LAST_MODIFIED + " TIMESTAMP" + ")"; public static final String CREATE_DISTRICTS = "CREATE TABLE IF NOT EXISTS " + TABLE_DISTRICTS + "(" + DistrictsTable.KEY + " TEXT PRIMARY KEY NOT NULL, " + DistrictsTable.ABBREV + " TEXT NOT NULL, " + DistrictsTable.YEAR + " INTEGER NOT NULL, " + DistrictsTable.NAME + " TEXT DEFAULT '', " + DistrictsTable.LAST_MODIFIED + " TIMESTAMP" + ")"; public static final String CREATE_DISTRICTTEAMS = "CREATE TABLE IF NOT EXISTS " + TABLE_DISTRICTTEAMS + "(" + DistrictTeamsTable.KEY + " TEXT PRIMARY KEY NOT NULL, " + DistrictTeamsTable.TEAM_KEY + " TEXT NOT NULL, " + DistrictTeamsTable.DISTRICT_KEY + " TEXT NOT NULL, " + DistrictTeamsTable.RANK + " INTEGER DEFAULT -1, " + DistrictTeamsTable.EVENT1_KEY + " TEXT DEFAULT '', " + DistrictTeamsTable.EVENT1_POINTS + " INTEGER DEFAULT 0, " + DistrictTeamsTable.EVENT2_KEY + " TEXT DEFAULT '', " + DistrictTeamsTable.EVENT2_POINTS + " INTEGER DEFAULT 0, " + DistrictTeamsTable.CMP_KEY + " TEXT DEFAULT '', " + DistrictTeamsTable.CMP_POINTS + " INTEGER DEFAULT 0, " + DistrictTeamsTable.ROOKIE_POINTS + " INTEGER DEFAULT 0, " + DistrictTeamsTable.TOTAL_POINTS + " INTEGER DEFAULT 0, " + DistrictTeamsTable.LAST_MODIFIED + " TIMESTAMP" + ")"; public static final String CREATE_EVENTDETAILS = "CREATE TABLE IF NOT EXISTS " + TABLE_EVENTDETAILS + "(" + EventDetailsTable.KEY + " TEXT PRIMARY KEY NOT NULL, " + EventDetailsTable.EVENT_KEY + " TEXT NOT NULL, " + EventDetailsTable.DETAIL_TYPE + " INTEGER NOT NULL, " + EventDetailsTable.JSON_DATA + " TEXT DEFAULT '', " + EventDetailsTable.LAST_MODIFIED + " TIMESTAMP" + ")"; public static final String CREATE_FAVORITES = "CREATE TABLE IF NOT EXISTS " + TABLE_FAVORITES + "(" + FavoritesTable.KEY + " TEXT PRIMARY KEY NOT NULL," + FavoritesTable.USER_NAME + " TEXT NOT NULL, " + FavoritesTable.MODEL_KEY + " TEXT NOT NULL," + FavoritesTable.MODEL_ENUM + " INTEGER NOT NULL" + ")"; public static final String CREATE_SUBSCRIPTIONS = "CREATE TABLE IF NOT EXISTS " + TABLE_SUBSCRIPTIONS + "(" + SubscriptionsTable.KEY + " TEXT PRIMARY KEY NOT NULL," + SubscriptionsTable.USER_NAME + " TEXT NOT NULL," + SubscriptionsTable.MODEL_KEY + " TEXT NOT NULL," + SubscriptionsTable.MODEL_ENUM + " INTEGER NOT NULL," + SubscriptionsTable.NOTIFICATION_SETTINGS + " TEXT DEFAULT '[]'" + ")"; public static final String CREATE_SEARCH_TEAMS = "CREATE VIRTUAL TABLE " + TABLE_SEARCH_TEAMS + " USING fts3 (" + SearchTeam.KEY + " TEXT PRIMARY KEY, " + SearchTeam.TITLES + " TEXT, " + SearchTeam.NUMBER + " TEXT, " + ")"; public static final String CREATE_SEARCH_EVENTS = "CREATE VIRTUAL TABLE " + TABLE_SEARCH_EVENTS + " USING fts3 (" + SearchEvent.KEY + " TEXT PRIMARY KEY, " + SearchEvent.TITLES + " TEXT, " + SearchEvent.YEAR + " TEXT, " + ")"; public static final String CREATE_NOTIFICATIONS = "CREATE TABLE IF NOT EXISTS " + TABLE_NOTIFICATIONS + "(" + NotificationsTable.ID + " INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, " + NotificationsTable.TYPE + " TEXT NOT NULL, " + NotificationsTable.TITLE + " TEXT DEFAULT '', " + NotificationsTable.BODY + " TEXT DEFAULT '', " + NotificationsTable.INTENT + " TEXT DEFAULT '', " + NotificationsTable.TIME + " TIMESTAMP, " + NotificationsTable.SYSTEM_ID + " INTEGER NOT NULL, " + NotificationsTable.ACTIVE + " INTEGER DEFAULT 1, " + NotificationsTable.MSG_DATA + " TEXT DEFAULT '')"; private static Database sDbInstance = null; protected SQLiteDatabase mDb; private TeamsTable mTeamsTable; private EventsTable mEventsTable; private AwardsTable mAwardsTable; private MatchesTable mMatchesTable; private MediasTable mMediasTable; private EventTeamsTable mEventTeamsTable; private DistrictsTable mDistrictsTable; private DistrictTeamsTable mDistrictTeamsTable; private EventDetailsTable mEventDetailsTable; private FavoritesTable mFavoritesTable; private SubscriptionsTable mSubscriptionsTable; private NotificationsTable mNotificationsTable; private Gson mGson; protected Database(Context context, Gson gson) { super(context, DATABASE_NAME, null, DATABASE_VERSION); mGson = gson; mDb = getWritableDatabase(); mTeamsTable = new TeamsTable(mDb, mGson); mAwardsTable = new AwardsTable(mDb, mGson); mMatchesTable = new MatchesTable(mDb, mGson); mMediasTable = new MediasTable(mDb, mGson); mEventTeamsTable = new EventTeamsTable(mDb, mGson); mDistrictsTable = new DistrictsTable(mDb, mGson); mEventsTable = new EventsTable(mDb, mGson, mDistrictsTable); mEventDetailsTable = new EventDetailsTable(mDb, mGson); mDistrictTeamsTable = new DistrictTeamsTable(mDb, mGson); mFavoritesTable = new FavoritesTable(mDb); mSubscriptionsTable = new SubscriptionsTable(mDb); mNotificationsTable = new NotificationsTable(mDb); } public static synchronized Database getInstance(Context context, Gson gson) { if (sDbInstance == null) { sDbInstance = new Database(context.getApplicationContext(), gson); sDbInstance.setWriteAheadLoggingEnabled(true); } return sDbInstance; } public TeamsTable getTeamsTable() { return mTeamsTable; } public EventsTable getEventsTable() { return mEventsTable; } public EventDetailsTable getEventDetailsTable() { return mEventDetailsTable; } public AwardsTable getAwardsTable() { return mAwardsTable; } public MatchesTable getMatchesTable() { return mMatchesTable; } public MediasTable getMediasTable() { return mMediasTable; } public EventTeamsTable getEventTeamsTable() { return mEventTeamsTable; } public DistrictsTable getDistrictsTable() { return mDistrictsTable; } public DistrictTeamsTable getDistrictTeamsTable() { return mDistrictTeamsTable; } public FavoritesTable getFavoritesTable() { return mFavoritesTable; } public SubscriptionsTable getSubscriptionsTable() { return mSubscriptionsTable; } public NotificationsTable getNotificationsTable() { return mNotificationsTable; } @WorkerThread public void beginTransaction() { beginTransaction(mDb); } @WorkerThread public static void beginTransaction(SQLiteDatabase db) { /* DISABLED 04-05-2017, possibly causes deadlocks. -PJL do { try { db.beginTransaction(); break; } catch (SQLiteDatabaseLockedException ex) { // Ignored. Retry in a bit... } TbaLogger.i("Unable to get a DB lock to write, backing off..."); try { Thread.sleep(100); } catch (InterruptedException e) { // Ignored } } while (true); */ db.beginTransaction(); } public void setTransactionSuccessful() { mDb.setTransactionSuccessful(); } public void endTransaction() { mDb.endTransaction(); } public SQLiteDatabase getDb() { return mDb; } @Override public void onCreate(SQLiteDatabase db) { db.execSQL(CREATE_TEAMS); db.execSQL(CREATE_EVENTS); db.execSQL(CREATE_AWARDS); db.execSQL(CREATE_MATCHES); db.execSQL(CREATE_MEDIAS); db.execSQL(CREATE_EVENTTEAMS); db.execSQL(CREATE_DISTRICTS); db.execSQL(CREATE_EVENTDETAILS); db.execSQL(CREATE_DISTRICTTEAMS); db.execSQL(CREATE_FAVORITES); db.execSQL(CREATE_SUBSCRIPTIONS); db.execSQL(CREATE_NOTIFICATIONS); if (!tableExists(db, TABLE_SEARCH_EVENTS)) { db.execSQL(CREATE_SEARCH_EVENTS); } if (!tableExists(db, TABLE_SEARCH_TEAMS)) { db.execSQL(CREATE_SEARCH_TEAMS); } } @VisibleForTesting boolean tableExists(SQLiteDatabase db, String tableName) { if (tableName == null || db == null || !db.isOpen()) { return false; } Cursor cursor = db.rawQuery("SELECT 1 FROM sqlite_master WHERE type = ? AND name = ?", new String[] {"table", tableName}); if (cursor.moveToFirst()) { cursor.close(); return true; } else { cursor.close(); return false; } } @VisibleForTesting boolean columnExists(SQLiteDatabase db, String tableName, String columnName) { if (tableName == null || db == null || columnName == null) { return false; } Cursor cursor = db.rawQuery("PRAGMA table_info(" + tableName + ")", null); if (cursor.moveToFirst()) { do { int value = cursor.getColumnIndex("name"); if (value != -1 && cursor.getString(value).equals(columnName)) { cursor.close(); return true; } } while (cursor.moveToNext()); } cursor.close(); return false; } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { TbaLogger.w("Upgrading database from version " + oldVersion + " to " + newVersion); int upgradeTo = oldVersion + 1; while (upgradeTo <= newVersion) { switch (upgradeTo) { case 14: //add districts tables db.execSQL(CREATE_DISTRICTS); db.execSQL(CREATE_DISTRICTTEAMS); if (!columnExists(db, TABLE_EVENTS, EventsTable.DISTRICT_POINTS)) { db.execSQL("ALTER TABLE " + TABLE_EVENTS + " ADD COLUMN " + EventsTable.DISTRICT_POINTS + " TEXT DEFAULT '' "); } break; case 15: //add favorites and subscriptions db.execSQL(CREATE_FAVORITES); db.execSQL(CREATE_SUBSCRIPTIONS); break; case 16: // add column for individual notification settings and sorting by model type if (!columnExists(db, TABLE_SUBSCRIPTIONS, SubscriptionsTable.NOTIFICATION_SETTINGS)) { db.execSQL("ALTER TABLE " + TABLE_SUBSCRIPTIONS + " ADD COLUMN " + SubscriptionsTable.NOTIFICATION_SETTINGS + " TEXT DEFAULT '[]' "); } if (!columnExists(db, TABLE_SUBSCRIPTIONS, SubscriptionsTable.MODEL_ENUM)) { db.execSQL("ALTER TABLE " + TABLE_SUBSCRIPTIONS + " ADD COLUMN " + SubscriptionsTable.MODEL_ENUM + " INTEGER NOT NULL DEFAULT -1"); } if (!columnExists(db, TABLE_FAVORITES, FavoritesTable.MODEL_ENUM)) { db.execSQL("ALTER TABLE " + TABLE_FAVORITES + " ADD COLUMN " + FavoritesTable.MODEL_ENUM + " INTEGER NOT NULL DEFAULT -1"); } break; case 17: // add column for district name if (!columnExists(db, TABLE_DISTRICTS, DistrictsTable.NAME)) { db.execSQL("ALTER TABLE " + TABLE_DISTRICTS + " ADD COLUMN " + DistrictsTable.NAME + " TEXT DEFAULT '' "); } break; case 18: // add column for event short name if (!columnExists(db, TABLE_EVENTS, EventsTable.SHORTNAME)) { db.execSQL("ALTER TABLE " + TABLE_EVENTS + " ADD COLUMN " + EventsTable.SHORTNAME + " TEXT DEFAULT '' "); } break; case 20: // Create table for recent notification db.execSQL(CREATE_NOTIFICATIONS); break; case 23: case 24: // remove and recreate search indexes to we can create them with foreign keys db.execSQL("DROP TABLE IF EXISTS " + TABLE_SEARCH_TEAMS); db.execSQL("DROP TABLE IF EXISTS " + TABLE_SEARCH_EVENTS); onCreate(db); break; case 25: // delete deprecated responses table db.execSQL("DROP TABLE IF EXISTS " + TABLE_API); break; case 28: // recreate stored notifications table db.beginTransaction(); try { db.execSQL("DROP TABLE IF EXISTS " + TABLE_NOTIFICATIONS); db.execSQL(CREATE_NOTIFICATIONS); db.setTransactionSuccessful(); } finally { db.endTransaction(); } break; case 29: // Add team motto db.beginTransaction(); try { if (!columnExists(db, TABLE_TEAMS, TeamsTable.MOTTO)) { db.execSQL("ALTER TABLE " + TABLE_TEAMS + " ADD COLUMN " + TeamsTable.MOTTO + " TEXT DEFAULT '' "); } db.setTransactionSuccessful(); } finally { db.endTransaction(); } break; case 30: // Add match breakdown db.beginTransaction(); try { if (!columnExists(db, TABLE_MATCHES, MatchesTable.BREAKDOWN)) { db.execSQL("ALTER TABLE " + TABLE_MATCHES + " ADD COLUMN " + MatchesTable.BREAKDOWN + " TEXT DEFAULT '' "); } db.setTransactionSuccessful(); } finally { db.endTransaction(); } break; case 31: // Add last_modified columns String[] tables = {TABLE_AWARDS, TABLE_DISTRICTS, TABLE_DISTRICTTEAMS, TABLE_EVENTS, TABLE_EVENTTEAMS, TABLE_MATCHES, TABLE_MEDIAS, TABLE_TEAMS}; db.beginTransaction(); try { for (int i = 0; i < tables.length; i++) { if (!columnExists(db, tables[i], "last_modified")) { db.execSQL(String.format("ALTER TABLE %1$s ADD COLUMN last_modified TIMESTAMP", tables[i])); } } db.setTransactionSuccessful(); } finally { db.endTransaction(); } break; case 32: // Updates for apiv3 - just start over db.beginTransaction(); String[] tables32 = {TABLE_EVENTS, TABLE_TEAMS, TABLE_DISTRICTTEAMS, TABLE_EVENTTEAMS, TABLE_MATCHES}; try { for (int i = 0; i < tables32.length; i++) { db.execSQL("DROP TABLE IF EXISTS " + tables32[i]); } // Recreate Events, Teams. Create EventDetail onCreate(db); db.setTransactionSuccessful(); } finally { db.endTransaction(); } break; case 33: // Add Event city column db.beginTransaction(); try { if (!columnExists(db, TABLE_EVENTS, EventsTable.CITY)) { db.execSQL(String.format( "ALTER TABLE %1$s ADD COLUMN %2$s TEXT DEFAULT ''", TABLE_EVENTS, EventsTable.CITY)); } db.setTransactionSuccessful(); } finally { db.endTransaction(); } break; case 34: // Due to a bug, we weren't storing media properly. Wipe it now and start over db.beginTransaction(); try { db.execSQL("DROP TABLE IF EXISTS " + TABLE_MEDIAS); db.execSQL(CREATE_MEDIAS); db.setTransactionSuccessful(); } finally { db.endTransaction(); } break; case 35: // Recreate the districts table to drop the enum column db.beginTransaction(); try { db.execSQL("DROP TABLE IF EXISTS " + TABLE_DISTRICTS); db.execSQL(CREATE_DISTRICTS); db.setTransactionSuccessful(); } finally { db.endTransaction(); } break; case 36: // Add the b64_image column to Media table db.beginTransaction(); try { if (!columnExists(db, TABLE_MEDIAS, MediasTable.B64_IMAGE)) { db.execSQL(String.format( "ALTER TABLE %1$s ADD COLUMN %2$s TEXT DEFAULT ''", TABLE_MEDIAS, MediasTable.B64_IMAGE)); } db.setTransactionSuccessful(); } finally { db.endTransaction(); } break; } upgradeTo++; } } public final class SearchTeam { public static final String KEY = "key", TITLES = "titles", NUMBER = "number"; private SearchTeam() { // unused } } public final class SearchEvent { public static final String KEY = "key", TITLES = "titles", YEAR = "year"; private SearchEvent() { // unused } } public Cursor getMatchesForTeamQuery(String query) { String selection = SearchTeam.TITLES + " MATCH ?"; String[] selectionArgs = new String[]{query}; SQLiteQueryBuilder builder = new SQLiteQueryBuilder(); builder.setTables(TABLE_SEARCH_TEAMS); builder.setDistinct(true); Cursor cursor = builder.query(mDb, new String[]{SearchTeam.KEY + " as _id", SearchTeam.TITLES, SearchTeam.NUMBER}, selection, selectionArgs, null, null, SearchTeam.NUMBER + " ASC"); if (cursor == null) { return null; } else if (!cursor.moveToFirst()) { cursor.close(); return null; } return cursor; } public Cursor getMatchesForEventQuery(String query) { String selection = SearchEvent.TITLES + " MATCH ?"; String[] selectionArgs = new String[]{query}; SQLiteQueryBuilder builder = new SQLiteQueryBuilder(); builder.setTables(TABLE_SEARCH_EVENTS); builder.setDistinct(true); Cursor cursor = builder.query(mDb, new String[]{SearchEvent.KEY + " as _id", SearchEvent.TITLES, SearchEvent.YEAR}, selection, selectionArgs, null, null, SearchEvent.YEAR + " DESC"); if (cursor == null) { return null; } else if (!cursor.moveToFirst()) { cursor.close(); return null; } return cursor; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.query.h2.twostep; import java.util.AbstractList; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.RandomAccess; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; import javax.cache.CacheException; import org.apache.ignite.IgniteException; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.processors.query.h2.twostep.messages.GridQueryNextPageResponse; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.h2.engine.Session; import org.h2.index.BaseIndex; import org.h2.index.Cursor; import org.h2.index.IndexType; import org.h2.message.DbException; import org.h2.result.Row; import org.h2.result.SearchRow; import org.h2.table.Column; import org.h2.table.IndexColumn; import org.h2.value.Value; import org.jetbrains.annotations.Nullable; import static java.util.Objects.requireNonNull; import static org.apache.ignite.IgniteSystemProperties.IGNITE_SQL_MERGE_TABLE_MAX_SIZE; import static org.apache.ignite.IgniteSystemProperties.IGNITE_SQL_MERGE_TABLE_PREFETCH_SIZE; import static org.apache.ignite.IgniteSystemProperties.getInteger; /** * Merge index. */ public abstract class GridMergeIndex extends BaseIndex { /** */ private static final int MAX_FETCH_SIZE = getInteger(IGNITE_SQL_MERGE_TABLE_MAX_SIZE, 10_000); /** */ private static final int PREFETCH_SIZE = getInteger(IGNITE_SQL_MERGE_TABLE_PREFETCH_SIZE, 1024); /** */ private static final AtomicReferenceFieldUpdater<GridMergeIndex, ConcurrentMap> lastPagesUpdater = AtomicReferenceFieldUpdater.newUpdater(GridMergeIndex.class, ConcurrentMap.class, "lastPages"); static { if (!U.isPow2(PREFETCH_SIZE)) { throw new IllegalArgumentException(IGNITE_SQL_MERGE_TABLE_PREFETCH_SIZE + " (" + PREFETCH_SIZE + ") must be positive and a power of 2."); } if (PREFETCH_SIZE >= MAX_FETCH_SIZE) { throw new IllegalArgumentException(IGNITE_SQL_MERGE_TABLE_PREFETCH_SIZE + " (" + PREFETCH_SIZE + ") must be less than " + IGNITE_SQL_MERGE_TABLE_MAX_SIZE + " (" + MAX_FETCH_SIZE + ")."); } } /** */ protected final Comparator<SearchRow> firstRowCmp = new Comparator<SearchRow>() { @Override public int compare(SearchRow rowInList, SearchRow searchRow) { int res = compareRows(rowInList, searchRow); return res == 0 ? 1 : res; } }; /** */ protected final Comparator<SearchRow> lastRowCmp = new Comparator<SearchRow>() { @Override public int compare(SearchRow rowInList, SearchRow searchRow) { int res = compareRows(rowInList, searchRow); return res == 0 ? -1 : res; } }; /** Row source nodes. */ private Set<UUID> sources; /** */ private int pageSize; /** * Will be r/w from query execution thread only, does not need to be threadsafe. */ private final BlockList<Row> fetched; /** */ private Row lastEvictedRow; /** */ private volatile int fetchedCnt; /** */ private final GridKernalContext ctx; /** */ private volatile ConcurrentMap<SourceKey, Integer> lastPages; /** * @param ctx Context. * @param tbl Table. * @param name Index name. * @param type Type. * @param cols Columns. */ public GridMergeIndex(GridKernalContext ctx, GridMergeTable tbl, String name, IndexType type, IndexColumn[] cols ) { this(ctx); initBaseIndex(tbl, 0, name, cols, type); } /** * @param ctx Context. */ protected GridMergeIndex(GridKernalContext ctx) { this.ctx = ctx; fetched = new BlockList<>(PREFETCH_SIZE); } /** * @return Return source nodes for this merge index. */ public Set<UUID> sources() { return sources; } /** * Fails index if any source node is left. */ private void checkSourceNodesAlive() { for (UUID nodeId : sources()) { if (!ctx.discovery().alive(nodeId)) { fail(nodeId, null); return; } } } /** * @param nodeId Node ID. * @return {@code true} If this index needs data from the given source node. */ public boolean hasSource(UUID nodeId) { return sources.contains(nodeId); } /** {@inheritDoc} */ @Override public long getRowCount(Session ses) { Cursor c = find(ses, null, null); long cnt = 0; while (c.next()) cnt++; return cnt; } /** {@inheritDoc} */ @Override public long getRowCountApproximation() { return 10_000; } /** * Set source nodes. * * @param nodes Nodes. * @param segmentsCnt Index segments per table. */ public void setSources(Collection<ClusterNode> nodes, int segmentsCnt) { assert sources == null; sources = new HashSet<>(); for (ClusterNode node : nodes) { if (!sources.add(node.id())) throw new IllegalStateException(); } } /** * @param pageSize Page size. */ public void setPageSize(int pageSize) { this.pageSize = pageSize; } /** * @param queue Queue to poll. * @return Next page. */ private GridResultPage takeNextPage(Pollable<GridResultPage> queue) { GridResultPage page; for (;;) { try { page = queue.poll(500, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { throw new CacheException("Query execution was interrupted.", e); } if (page != null) break; checkSourceNodesAlive(); } return page; } /** * @param queue Queue to poll. * @param iter Current iterator. * @return The same or new iterator. */ protected final Iterator<Value[]> pollNextIterator(Pollable<GridResultPage> queue, Iterator<Value[]> iter) { if (!iter.hasNext()) { GridResultPage page = takeNextPage(queue); if (!page.isLast()) page.fetchNextPage(); // Failed will throw an exception here. iter = page.rows(); // The received iterator must be empty in the dummy last page or on failure. assert iter.hasNext() || page.isDummyLast() || page.isFail(); } return iter; } /** * @param e Error. */ public void fail(final CacheException e) { for (UUID nodeId : sources) fail(nodeId, e); } /** * @param nodeId Node ID. * @param e Exception. */ public void fail(UUID nodeId, final CacheException e) { if (nodeId == null) nodeId = F.first(sources); addPage0(new GridResultPage(null, nodeId, null) { @Override public boolean isFail() { return true; } @Override public void fetchNextPage() { if (e == null) super.fetchNextPage(); else throw e; } }); } /** * @param nodeId Node ID. * @param res Response. */ private void initLastPages(UUID nodeId, GridQueryNextPageResponse res) { int allRows = res.allRows(); // If the old protocol we send all rows number in the page 0, other pages have -1. // In the new protocol we do not know it and always have -1, except terminating page, // which has -2. Thus we have to init page counters only when we receive positive value // in the first page. if (allRows < 0 || res.page() != 0) return; ConcurrentMap<SourceKey,Integer> lp = lastPages; if (lp == null && !lastPagesUpdater.compareAndSet(this, null, lp = new ConcurrentHashMap<>())) lp = lastPages; assert pageSize > 0: pageSize; int lastPage = allRows == 0 ? 0 : (allRows - 1) / pageSize; assert lastPage >= 0: lastPage; if (lp.put(new SourceKey(nodeId, res.segmentId()), lastPage) != null) throw new IllegalStateException(); } /** * @param page Page. */ private void markLastPage(GridResultPage page) { GridQueryNextPageResponse res = page.response(); if (res.allRows() != -2) { // -2 means the last page. UUID nodeId = page.source(); initLastPages(nodeId, res); ConcurrentMap<SourceKey,Integer> lp = lastPages; if (lp == null) return; // It was not initialized --> wait for -2. Integer lastPage = lp.get(new SourceKey(nodeId, res.segmentId())); if (lastPage == null) return; // This node may use the new protocol --> wait for -2. if (lastPage != res.page()) { assert lastPage > res.page(); return; // This is not the last page. } } page.setLast(true); } /** * @param page Page. */ public final void addPage(GridResultPage page) { markLastPage(page); addPage0(page); } /** * @param lastPage Real last page. * @return Created dummy page. */ protected final GridResultPage createDummyLastPage(GridResultPage lastPage) { assert !lastPage.isDummyLast(); // It must be a real last page. return new GridResultPage(ctx, lastPage.source(), null).setLast(true); } /** * @param page Page. */ protected abstract void addPage0(GridResultPage page); /** {@inheritDoc} */ @Override public final Cursor find(Session ses, SearchRow first, SearchRow last) { checkBounds(lastEvictedRow, first, last); if (fetchedAll()) return findAllFetched(fetched, first, last); return findInStream(first, last); } /** * @return {@code true} If we have fetched all the remote rows into a fetched list. */ public abstract boolean fetchedAll(); /** * @param lastEvictedRow Last evicted fetched row. * @param first Lower bound. * @param last Upper bound. */ protected void checkBounds(Row lastEvictedRow, SearchRow first, SearchRow last) { if (lastEvictedRow != null) throw new IgniteException("Fetched result set was too large."); } /** * @param first Lower bound. * @param last Upper bound. * @return Cursor. Usually it must be {@link FetchingCursor} instance. */ protected abstract Cursor findInStream(@Nullable SearchRow first, @Nullable SearchRow last); /** * @param fetched Fetched rows. * @param first Lower bound. * @param last Upper bound. * @return Cursor. */ protected abstract Cursor findAllFetched(List<Row> fetched, @Nullable SearchRow first, @Nullable SearchRow last); /** {@inheritDoc} */ @Override public void checkRename() { throw DbException.getUnsupportedException("rename"); } /** {@inheritDoc} */ @Override public void close(Session ses) { // No-op. } /** {@inheritDoc} */ @Override public void add(Session ses, Row row) { throw DbException.getUnsupportedException("add"); } /** {@inheritDoc} */ @Override public void remove(Session ses, Row row) { throw DbException.getUnsupportedException("remove row"); } /** {@inheritDoc} */ @Override public void remove(Session ses) { throw DbException.getUnsupportedException("remove index"); } /** {@inheritDoc} */ @Override public void truncate(Session ses) { throw DbException.getUnsupportedException("truncate"); } /** {@inheritDoc} */ @Override public boolean canGetFirstOrLast() { return false; } /** {@inheritDoc} */ @Override public Cursor findFirstOrLast(Session ses, boolean first) { throw DbException.getUnsupportedException("findFirstOrLast"); } /** {@inheritDoc} */ @Override public boolean needRebuild() { return false; } /** {@inheritDoc} */ @Override public long getDiskSpaceUsed() { return 0; } /** * @param rows Sorted rows list. * @param searchRow Search row. * @param cmp Comparator. * @param checkLast If we need to optimistically check the last row right away. * @return Insertion point for the search row. */ protected static int binarySearchRow( List<Row> rows, SearchRow searchRow, Comparator<SearchRow> cmp, boolean checkLast ) { assert !rows.isEmpty(); // Optimistically compare with the last row as a first step. if (checkLast) { int res = cmp.compare(last(rows), searchRow); assert res != 0; // Comparators must never return 0 here. if (res < 0) return rows.size(); // The search row is greater than the last row. } int res = Collections.binarySearch(rows, searchRow, cmp); assert res < 0: res; // Comparator must never return 0. return -res - 1; } /** * @param evictedBlock Evicted block. */ private void onBlockEvict(List<Row> evictedBlock) { assert evictedBlock.size() == PREFETCH_SIZE; // Remember the last row (it will be max row) from the evicted block. lastEvictedRow = requireNonNull(last(evictedBlock)); } /** * @param l List. * @return Last element. */ private static <Z> Z last(List<Z> l) { return l.get(l.size() - 1); } /** * Fetching cursor. */ protected class FetchingCursor implements Cursor { /** */ Iterator<Row> stream; /** */ List<Row> rows; /** */ int cur; /** */ SearchRow first; /** */ SearchRow last; /** */ int lastFound = Integer.MAX_VALUE; /** * @param first Lower bound. * @param last Upper bound. * @param stream Stream of all the rows from remote nodes. */ public FetchingCursor(SearchRow first, SearchRow last, Iterator<Row> stream) { assert stream != null; // Initially we will use all the fetched rows, after we will switch to the last block. rows = fetched; this.stream = stream; this.first = first; this.last = last; if (haveBounds() && !rows.isEmpty()) cur = findBounds(); cur--; // Set current position before the first row. } /** * @return {@code true} If we have bounds. */ private boolean haveBounds() { return first != null || last != null; } /** * @return Lower bound. */ private int findBounds() { assert !rows.isEmpty(): "rows"; int firstFound = cur; // Find the lower bound. if (first != null) { firstFound = binarySearchRow(rows, first, firstRowCmp, true); assert firstFound >= cur && firstFound <= rows.size(): "firstFound"; if (firstFound == rows.size()) return firstFound; // The lower bound is greater than all the rows we have. first = null; // We have found the lower bound, do not need it anymore. } // Find the upper bound. if (last != null) { assert lastFound == Integer.MAX_VALUE: "lastFound"; int lastFound0 = binarySearchRow(rows, last, lastRowCmp, true); // If the upper bound is too large we will ignore it. if (lastFound0 != rows.size()) lastFound = lastFound0; } return firstFound; } /** * Fetch rows from the stream. */ private void fetchRows() { for (;;) { // Take the current last block and set the position after last. rows = fetched.lastBlock(); cur = rows.size(); // Fetch stream. while (stream.hasNext()) { fetched.add(requireNonNull(stream.next())); // Evict block if we've fetched too many rows. if (fetched.size() == MAX_FETCH_SIZE) { onBlockEvict(fetched.evictFirstBlock()); assert fetched.size() < MAX_FETCH_SIZE; } // No bounds -> no need to do binary search, can return the fetched row right away. if (!haveBounds()) break; // When the last block changed, it means that we've filled the current last block. // We have fetched the needed number of rows for binary search. if (fetched.lastBlock() != rows) { assert fetched.lastBlock().isEmpty(); // The last row must be added to the previous block. break; } } if (cur == rows.size()) cur = Integer.MAX_VALUE; // We were not able to fetch anything. Done. else { // Update fetched count. fetchedCnt += rows.size() - cur; if (haveBounds()) { cur = findBounds(); if (cur == rows.size()) continue; // The lower bound is too large, continue fetching rows. } } return; } } /** {@inheritDoc} */ @Override public boolean next() { if (++cur == rows.size()) fetchRows(); return cur < lastFound; } /** {@inheritDoc} */ @Override public Row get() { return rows.get(cur); } /** {@inheritDoc} */ @Override public SearchRow getSearchRow() { return get(); } /** {@inheritDoc} */ @Override public boolean previous() { // Should never be called. throw DbException.getUnsupportedException("previous"); } } /** */ enum State { UNINITIALIZED, INITIALIZED, FINISHED } /** */ private static final class BlockList<Z> extends AbstractList<Z> implements RandomAccess { /** */ private final List<List<Z>> blocks; /** */ private int size; /** */ private final int maxBlockSize; /** */ private final int shift; /** */ private final int mask; /** * @param maxBlockSize Max block size. */ private BlockList(int maxBlockSize) { assert U.isPow2(maxBlockSize); this.maxBlockSize = maxBlockSize; shift = Integer.numberOfTrailingZeros(maxBlockSize); mask = maxBlockSize - 1; blocks = new ArrayList<>(); blocks.add(new ArrayList<Z>()); } /** {@inheritDoc} */ @Override public int size() { return size; } /** {@inheritDoc} */ @Override public boolean add(Z z) { size++; List<Z> lastBlock = lastBlock(); lastBlock.add(z); if (lastBlock.size() == maxBlockSize) blocks.add(new ArrayList<Z>()); return true; } /** {@inheritDoc} */ @Override public Z get(int idx) { return blocks.get(idx >>> shift).get(idx & mask); } /** * @return Last block. */ private List<Z> lastBlock() { return last(blocks); } /** * @return Evicted block. */ private List<Z> evictFirstBlock() { // Remove head block. List<Z> res = blocks.remove(0); size -= res.size(); return res; } } /** * Pollable. */ protected static interface Pollable<E> { /** * @param timeout Timeout. * @param unit Time unit. * @return Polled value or {@code null} if none. * @throws InterruptedException If interrupted. */ E poll(long timeout, TimeUnit unit) throws InterruptedException; } /** */ private static class SourceKey { final UUID nodeId; /** */ final int segment; /** * @param nodeId Node ID. * @param segment Segment. */ SourceKey(UUID nodeId, int segment) { this.nodeId = nodeId; this.segment = segment; } /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SourceKey sourceKey = (SourceKey)o; if (segment != sourceKey.segment) return false; return nodeId.equals(sourceKey.nodeId); } /** {@inheritDoc} */ @Override public int hashCode() { int result = nodeId.hashCode(); result = 31 * result + segment; return result; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.commons.compress.archivers.zip; import org.apache.commons.compress.parallel.ScatterGatherBackingStore; import java.io.Closeable; import java.io.DataOutput; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.zip.CRC32; import java.util.zip.Deflater; import java.util.zip.ZipEntry; /** * Encapsulates a {@link Deflater} and crc calculator, handling multiple types of output streams. * Currently {@link java.util.zip.ZipEntry#DEFLATED} and {@link java.util.zip.ZipEntry#STORED} are the only * supported compression methods. * * @since 1.10 */ public abstract class StreamCompressor implements Closeable { /* * Apparently Deflater.setInput gets slowed down a lot on Sun JVMs * when it gets handed a really big buffer. See * https://issues.apache.org/bugzilla/show_bug.cgi?id=45396 * * Using a buffer size of 8 kB proved to be a good compromise */ private static final int DEFLATER_BLOCK_SIZE = 8192; private final Deflater def; private final CRC32 crc = new CRC32(); private long writtenToOutputStreamForLastEntry = 0; private long sourcePayloadLength = 0; private long totalWrittenToOutputStream = 0; private static final int bufferSize = 4096; private final byte[] outputBuffer = new byte[bufferSize]; private final byte[] readerBuf = new byte[bufferSize]; StreamCompressor(final Deflater deflater) { this.def = deflater; } /** * Create a stream compressor with the given compression level. * * @param os The stream to receive output * @param deflater The deflater to use * @return A stream compressor */ static StreamCompressor create(final OutputStream os, final Deflater deflater) { return new OutputStreamCompressor(deflater, os); } /** * Create a stream compressor with the default compression level. * * @param os The stream to receive output * @return A stream compressor */ static StreamCompressor create(final OutputStream os) { return create(os, new Deflater(Deflater.DEFAULT_COMPRESSION, true)); } /** * Create a stream compressor with the given compression level. * * @param os The DataOutput to receive output * @param deflater The deflater to use for the compressor * @return A stream compressor */ static StreamCompressor create(final DataOutput os, final Deflater deflater) { return new DataOutputCompressor(deflater, os); } /** * Create a stream compressor with the given compression level. * * @param compressionLevel The {@link Deflater} compression level * @param bs The ScatterGatherBackingStore to receive output * @return A stream compressor */ public static StreamCompressor create(final int compressionLevel, final ScatterGatherBackingStore bs) { final Deflater deflater = new Deflater(compressionLevel, true); return new ScatterGatherBackingStoreCompressor(deflater, bs); } /** * Create a stream compressor with the default compression level. * * @param bs The ScatterGatherBackingStore to receive output * @return A stream compressor */ public static StreamCompressor create(final ScatterGatherBackingStore bs) { return create(Deflater.DEFAULT_COMPRESSION, bs); } /** * The crc32 of the last deflated file * * @return the crc32 */ public long getCrc32() { return crc.getValue(); } /** * Return the number of bytes read from the source stream * * @return The number of bytes read, never negative */ public long getBytesRead() { return sourcePayloadLength; } /** * The number of bytes written to the output for the last entry * * @return The number of bytes, never negative */ public long getBytesWrittenForLastEntry() { return writtenToOutputStreamForLastEntry; } /** * The total number of bytes written to the output for all files * * @return The number of bytes, never negative */ public long getTotalBytesWritten() { return totalWrittenToOutputStream; } /** * Deflate the given source using the supplied compression method * * @param source The source to compress * @param method The #ZipArchiveEntry compression method * @throws IOException When failures happen */ public void deflate(final InputStream source, final int method) throws IOException { reset(); int length; while ((length = source.read(readerBuf, 0, readerBuf.length)) >= 0) { write(readerBuf, 0, length, method); } if (method == ZipEntry.DEFLATED) { flushDeflater(); } } /** * Writes bytes to ZIP entry. * * @param b the byte array to write * @param offset the start position to write from * @param length the number of bytes to write * @param method the comrpession method to use * @return the number of bytes written to the stream this time * @throws IOException on error */ long write(final byte[] b, final int offset, final int length, final int method) throws IOException { final long current = writtenToOutputStreamForLastEntry; crc.update(b, offset, length); if (method == ZipEntry.DEFLATED) { writeDeflated(b, offset, length); } else { writeCounted(b, offset, length); } sourcePayloadLength += length; return writtenToOutputStreamForLastEntry - current; } void reset() { crc.reset(); def.reset(); sourcePayloadLength = 0; writtenToOutputStreamForLastEntry = 0; } @Override public void close() throws IOException { def.end(); } void flushDeflater() throws IOException { def.finish(); while (!def.finished()) { deflate(); } } private void writeDeflated(final byte[] b, final int offset, final int length) throws IOException { if (length > 0 && !def.finished()) { if (length <= DEFLATER_BLOCK_SIZE) { def.setInput(b, offset, length); deflateUntilInputIsNeeded(); } else { final int fullblocks = length / DEFLATER_BLOCK_SIZE; for (int i = 0; i < fullblocks; i++) { def.setInput(b, offset + i * DEFLATER_BLOCK_SIZE, DEFLATER_BLOCK_SIZE); deflateUntilInputIsNeeded(); } final int done = fullblocks * DEFLATER_BLOCK_SIZE; if (done < length) { def.setInput(b, offset + done, length - done); deflateUntilInputIsNeeded(); } } } } private void deflateUntilInputIsNeeded() throws IOException { while (!def.needsInput()) { deflate(); } } void deflate() throws IOException { final int len = def.deflate(outputBuffer, 0, outputBuffer.length); if (len > 0) { writeCounted(outputBuffer, 0, len); } } public void writeCounted(final byte[] data) throws IOException { writeCounted(data, 0, data.length); } public void writeCounted(final byte[] data, final int offset, final int length) throws IOException { writeOut(data, offset, length); writtenToOutputStreamForLastEntry += length; totalWrittenToOutputStream += length; } protected abstract void writeOut(byte[] data, int offset, int length) throws IOException; private static final class ScatterGatherBackingStoreCompressor extends StreamCompressor { private final ScatterGatherBackingStore bs; public ScatterGatherBackingStoreCompressor(final Deflater deflater, final ScatterGatherBackingStore bs) { super(deflater); this.bs = bs; } @Override protected final void writeOut(final byte[] data, final int offset, final int length) throws IOException { bs.writeOut(data, offset, length); } } private static final class OutputStreamCompressor extends StreamCompressor { private final OutputStream os; public OutputStreamCompressor(final Deflater deflater, final OutputStream os) { super(deflater); this.os = os; } @Override protected final void writeOut(final byte[] data, final int offset, final int length) throws IOException { os.write(data, offset, length); } } private static final class DataOutputCompressor extends StreamCompressor { private final DataOutput raf; public DataOutputCompressor(final Deflater deflater, final DataOutput raf) { super(deflater); this.raf = raf; } @Override protected final void writeOut(final byte[] data, final int offset, final int length) throws IOException { raf.write(data, offset, length); } } }
/* * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* test @bug 6243382 8006070 @summary Dragging of mouse outside of a List and Choice area don't work properly on XAWT @author Dmitry.Cherepanov@SUN.COM area=awt.list @run applet/manual=yesno MouseDraggedOutCauseScrollingTest.html */ import java.applet.Applet; import java.awt.*; public class MouseDraggedOutCauseScrollingTest extends Applet { Choice choice; List singleList; List multipleList; public void init() { this.setLayout (new GridLayout (1, 3)); choice = new Choice(); singleList = new List(3, false); multipleList = new List(3, true); choice.add("Choice"); for (int i = 1; i < 100; i++){ choice.add(""+i); } singleList.add("Single list"); for (int i = 1; i < 100; i++) singleList.add(""+i); multipleList.add("Multiple list"); for (int i = 1; i < 100; i++) multipleList.add(""+i); this.add(choice); this.add(singleList); this.add(multipleList); String toolkitName = Toolkit.getDefaultToolkit().getClass().getName(); if (!toolkitName.equals("sun.awt.X11.XToolkit")) { String[] instructions = { "This test is not applicable to the current platform. Press PASS" }; Sysout.createDialogWithInstructions( instructions ); } else { String[] instructions = { "0) Please note, that this is only Motif/XAWT test. At first, make the applet active", "1.1) Click on the choice", "1.2) Press the left button of the mouse and keep on any item of the choice, for example 5", "1.3) Drag mouse out of the area of the unfurled list, at the same time hold the X coordinate of the mouse position about the same", "1.4) To make sure, that when the Y coordinate of the mouse position higher of the upper bound of the list then scrolling UP of the list and selected item changes on the upper. If not, the test failed", "1.5) To make sure, that when the Y coordinate of the mouse position under of the lower bound of the list then scrolling DOWN of the list and selected item changes on the lower. If not, the test failed", "-----------------------------------", "2.1) Click on the single list", "2.2) Press the left button of the mouse and keep on any item of the list, for example 5", "2.3) Drag mouse out of the area of the unfurled list, at the same time hold the X coordinate of the mouse position about the same", "2.4) To make sure, that when the Y coordinate of the mouse position higher of the upper bound of the list then scrolling UP of the list and selected item changes on the upper. If not, the test failed", "2.5) To make sure, that when the Y coordinate of the mouse position under of the lower bound of the list then scrolling DOWN of the list and selected item changes on the lower. If not, the test failed", "-----------------------------------", "3.1) Click on the multiple list", "3.2) Press the left button of the mouse and keep on any item of the list, for example 5", "3.3) Drag mouse out of the area of the unfurled list, at the same time hold the X coordinate of the mouse position about the same", "3.4) To make sure, that when the Y coordinate of the mouse position higher of the upper bound of the list then scrolling of the list NO OCCURED and selected item NO CHANGES on the upper. If not, the test failed", "3.5) To make sure, that when the Y coordinate of the mouse position under of the lower bound of the list then scrolling of the list NO OCCURED and selected item NO CHANGES on the lower. If not, the test failed", "4) Test passed." }; Sysout.createDialogWithInstructions( instructions ); } }//End init() public void start () { setSize (400,100); setVisible(true); validate(); }// start() }// class ManualYesNoTest /**************************************************** Standard Test Machinery DO NOT modify anything below -- it's a standard chunk of code whose purpose is to make user interaction uniform, and thereby make it simpler to read and understand someone else's test. ****************************************************/ /** This is part of the standard test machinery. It creates a dialog (with the instructions), and is the interface for sending text messages to the user. To print the instructions, send an array of strings to Sysout.createDialog WithInstructions method. Put one line of instructions per array entry. To display a message for the tester to see, simply call Sysout.println with the string to be displayed. This mimics System.out.println but works within the test harness as well as standalone. */ class Sysout { private static TestDialog dialog; public static void createDialogWithInstructions( String[] instructions ) { dialog = new TestDialog( new Frame(), "Instructions" ); dialog.printInstructions( instructions ); dialog.setVisible(true); println( "Any messages for the tester will display here." ); } public static void createDialog( ) { dialog = new TestDialog( new Frame(), "Instructions" ); String[] defInstr = { "Instructions will appear here. ", "" } ; dialog.printInstructions( defInstr ); dialog.setVisible(true); println( "Any messages for the tester will display here." ); } public static void printInstructions( String[] instructions ) { dialog.printInstructions( instructions ); } public static void println( String messageIn ) { dialog.displayMessage( messageIn ); } }// Sysout class /** This is part of the standard test machinery. It provides a place for the test instructions to be displayed, and a place for interactive messages to the user to be displayed. To have the test instructions displayed, see Sysout. To have a message to the user be displayed, see Sysout. Do not call anything in this dialog directly. */ class TestDialog extends Dialog { TextArea instructionsText; TextArea messageText; int maxStringLength = 80; //DO NOT call this directly, go through Sysout public TestDialog( Frame frame, String name ) { super( frame, name ); int scrollBoth = TextArea.SCROLLBARS_BOTH; instructionsText = new TextArea( "", 15, maxStringLength, scrollBoth ); add( "North", instructionsText ); messageText = new TextArea( "", 5, maxStringLength, scrollBoth ); add("Center", messageText); pack(); setVisible(true); }// TestDialog() //DO NOT call this directly, go through Sysout public void printInstructions( String[] instructions ) { //Clear out any current instructions instructionsText.setText( "" ); //Go down array of instruction strings String printStr, remainingStr; for( int i=0; i < instructions.length; i++ ) { //chop up each into pieces maxSringLength long remainingStr = instructions[ i ]; while( remainingStr.length() > 0 ) { //if longer than max then chop off first max chars to print if( remainingStr.length() >= maxStringLength ) { //Try to chop on a word boundary int posOfSpace = remainingStr. lastIndexOf( ' ', maxStringLength - 1 ); if( posOfSpace <= 0 ) posOfSpace = maxStringLength - 1; printStr = remainingStr.substring( 0, posOfSpace + 1 ); remainingStr = remainingStr.substring( posOfSpace + 1 ); } //else just print else { printStr = remainingStr; remainingStr = ""; } instructionsText.append( printStr + "\n" ); }// while }// for }//printInstructions() //DO NOT call this directly, go through Sysout public void displayMessage( String messageIn ) { messageText.append( messageIn + "\n" ); System.out.println(messageIn); } }// TestDialog class
// $Id: Jpdbi.java 14470 2010-11-30 22:19:21Z kianusch $ package com.agfa.db.tools; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.sql.Blob; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import org.dcm4che.data.Dataset; import org.dcm4che.data.DcmEncodeParam; import org.dcm4che.data.DcmObjectFactory; import org.dcm4che.dict.Tags; public class Jpdbi { public final static String VERSION = "2.1.1"; public final static String ID = "$Id: Jpdbi.java 14470 2010-11-30 22:19:21Z kianusch $"; public final static String REVISION = "$Revision: 14470 $"; // final static String[] Tables = { "PATIENT", "STUDY", "SERIES", "INSTANCE", "FILES", "FILESYSTEM" }; // final static String[] Attrs = { "PAT_ATTRS", "STUDY_ATTRS", "SERIES_ATTRS", "INST_ATTRS", null }; // final static int PATIENT = 0; final static int STUDY = 1; final static int SERIE = 2; final static int INSTANCE = 3; final static int PATH = 4; final static int FILESYSTEM = 5; // final static int QUERY_SELECT = 0; final static int QUERY_FROM = 1; final static int QUERY_JOIN = 2; final static int QUERY_LINKS = 3; final static int QUERY_WHERE = 4; final static int QUERY_GROUP = 5; final static int QUERY_ORDER = 6; // DB Types public static final int DBTYPE_UNKNOWN = 0; public static final int DBTYPE_ORACLE = 1; public static final int DBTYPE_MYSQL = 2; // static void exit(int ExitCode) { System.out.flush(); System.out.close(); System.err.flush(); System.err.close(); System.exit(ExitCode); } static void exit(int ExitCode, String ErrorText) { System.err.println(ErrorText); exit(ExitCode); } private static int CountCharInString(char c, String s) { int cnt = 0; for (int pos = 0; pos < s.length(); pos++) if (s.charAt(pos) == c) cnt++; return cnt; } public static void UpdateStudyModality(Connection connection, long pk, Config cfg) { String sql = "select distinct MODALITY from SERIES where STUDY_FK=" + pk; try { Statement stmt = connection.createStatement(); ResultSet rs = stmt.executeQuery(sql); String MODALITIES = ""; String MOD = null; while (rs.next()) { MOD = rs.getString(1); if (MODALITIES.length() > 0) MODALITIES += "\\"; MODALITIES += MOD; } rs.close(); sql = "update STUDY set MODS_IN_STUDY='" + MODALITIES + "' where PK=" + pk; if (cfg.isDebug()) System.err.println("DEBUG: " + sql); else stmt.executeUpdate(sql); stmt.close(); } catch (SQLException e) { e.printStackTrace(); } } // Update DB Field only private static void UpdateField(PreparedStatement stmt, Long pk, String uid, boolean debug) throws SQLException { if (debug) System.err.println("DEBUG: Update: < PK=" + pk + " >"); else { int p = 1; if (uid != null) stmt.setString(p++, uid); stmt.setLong(p, pk); stmt.execute(); } } // Update DicomBlob and DB Field or both static void UpdateField(ResultSet rs, PreparedStatement stmt, Long pk, String field, String[][] update, String uid, boolean debug) throws SQLException, IOException { if (debug) System.err.println("DEBUG: Reading " + field + "..."); Blob bl = rs.getBlob(field); if (bl != null) { InputStream bis = bl.getBinaryStream(); Dataset ds = DcmObjectFactory.getInstance().newDataset(); ds.readFile(bis, null, -1); bis.close(); if (debug) { System.err.println("<" + field + " OLD>"); ds.dumpDataset(System.err, null); System.err.println("</" + field + "OLD>"); } for (int loop = 0; loop < update.length; loop++) { String DcmField = update[loop][1]; String DcmValue = update[loop][2]; if (DcmField != null) { int TAG = 0; if (DcmField.startsWith("x", 0)) TAG = Integer.parseInt(DcmField.substring(1), 16); else TAG = Tags.forName(DcmField); if (DcmValue == null) { if (ds.contains(TAG)) ds.remove(TAG); else ds = null; } else { if (DcmValue.equals("%UID%")) { ds.putXX(TAG, uid); } else { ds.putXX(TAG, DcmValue); } } } } if (debug) { System.err.println("<" + field + " NEW>"); if (ds == null) { System.err.println("No Changes to DataSet"); } else { ds.dumpDataset(System.err, null); } System.err.println("</" + field + " NEW>"); } if (ds != null) { if (debug) System.err.println("DEBUG: Update < PK=" + pk + " >"); else { int p = 1; int len = ds.calcLength(DcmEncodeParam.EVR_LE); ByteArrayOutputStream bos = new ByteArrayOutputStream(len); ds.writeDataset(bos, DcmEncodeParam.EVR_LE); if (uid != null) stmt.setString(p++, uid); stmt.setBinaryStream(p++, new ByteArrayInputStream(bos.toByteArray()), len); stmt.setLong(p, pk); stmt.execute(); } } } else { System.err.println("DEBUG: Reading " + field + " failed."); } } private static void ParseQuery(Connection conn, Config cfg) throws SQLException, IOException { Statement stmt = conn.createStatement(); ResultSet rs = null; boolean updUid = false; String uidTemplate = null; String[] query = cfg.getSqlPortions(); String SQLStatement = ""; if (cfg.isIgnoreEmpty()) { SQLStatement += "from " + query[QUERY_FROM] + " " + query[QUERY_JOIN] + " "; SQLStatement += "where " + query[QUERY_LINKS] + " " + query[QUERY_WHERE]; } else { SQLStatement += "from " + query[QUERY_FROM] + " " + query[QUERY_JOIN] + " "; SQLStatement += "where " + query[QUERY_LINKS] + " " + query[QUERY_WHERE]; } // Construct Count Statement String CountStatement = "select COUNT(*) CNT " + SQLStatement; // Construct Select Statement String QueryStatement = "select " + query[QUERY_SELECT] + " " + SQLStatement; if (cfg.isDebug()) { System.err.println("DEBUG: Count: < " + CountStatement + " >"); System.err.println("DEBUG: Query: < " + QueryStatement + " >"); } String[][] update = cfg.getUpdDicom(); boolean multi = false; boolean UpdModality = false; String UpdateStatement = null; int UpdateLevel = -1; boolean DoUpdate = false; PreparedStatement UpdStmt = null; if (update != null) { UpdateLevel = cfg.getUpdateLevel().nextSetBit(0); if (update[0][4].equals("t")) multi = true; for (int loop = 0; loop < update.length; loop++) { if (update[loop][0] != null) { String UpdValue = update[loop][2]; // Special Case Patient Name if (update[loop][0].equals("PAT_NAME") && UpdateLevel == Jpdbi.PATIENT && UpdValue != null) { int cnt = CountCharInString('^', UpdValue); while (cnt++ < 4) UpdValue += "^"; update[loop][2] = UpdValue; } // Special Case Modality if (update[loop][0].equals("MODALITY")) UpdModality = true; if (UpdateStatement == null) UpdateStatement = ""; else if (UpdateStatement.length() > 0) UpdateStatement += ","; UpdateStatement += update[loop][0].toUpperCase(); if (UpdValue.equals("%UID%") || UpdValue.startsWith("+")) { UpdateStatement += "=?"; if (UpdValue.startsWith("+")) { uidTemplate = UpdValue.substring(1); } else { uidTemplate = cfg.getUidBase(); } if (cfg.isDebug()) { System.err.println("DEBUG: Update Template: < " + uidTemplate + " >"); } } else { UpdateStatement += (UpdValue == null) ? "=null" : "='" + UpdValue + "'"; } } } if (!cfg.getUpdateDS().isEmpty()) { if (UpdateStatement == null) UpdateStatement = ""; else if (UpdateStatement.length() > 0) UpdateStatement += ","; UpdateStatement += Jpdbi.Attrs[cfg.getUpdateDS().nextSetBit(0)]; UpdateStatement += "=?"; } UpdateStatement += " where PK=?"; DoUpdate = true; if (cfg.isDebug()) { System.err.println("DEBUG: Update: < " + "update " + Jpdbi.Tables[UpdateLevel] + " set " + UpdateStatement + " >"); } } rs = stmt.executeQuery(CountStatement); rs.next(); long rows = rs.getLong(1); rs.close(); if (rows > 0) { if (DoUpdate) { if (!multi && rows != 1) { Jpdbi.exit(1, "Multiple Updates not allowed on this Configuration."); } if (cfg.getUpdateCount() == -666 || rows == cfg.getUpdateCount()) { UpdStmt = conn.prepareStatement("update " + Jpdbi.Tables[UpdateLevel] + " set " + UpdateStatement); } else { Jpdbi.exit(1, "Updating [" + rows + "] rows. Please supply correct \"--count\" option."); } } rs = stmt.executeQuery(QueryStatement); ResultSetMetaData md = rs.getMetaData(); long PK = -1; long LastPK = -1; long cnt = 0L; while (rs.next()) { cnt++; for (int i = 0; i < Jpdbi.Tables.length; i++) { if (cfg.isDisplayLevel(i)) { switch (i) { case Jpdbi.PATIENT: PK = Display.Patient(rs, md, cfg); break; case Jpdbi.STUDY: PK = Display.Study(rs, md, cfg); if (PK > -1) { if (UpdModality && PK != LastPK && LastPK > -1) UpdateStudyModality(conn, LastPK, cfg); LastPK = PK; } break; case Jpdbi.SERIE: PK = Display.Serie(rs, md, cfg); break; case Jpdbi.INSTANCE: PK = Display.Instance(rs, md, cfg); break; case Jpdbi.PATH: Display.Path(rs, md, cfg); break; default: PK = -1; break; } if (cfg.isUpdateLevel(i) && PK > -1) { String uid = null; if (uidTemplate != null) uid = Uid.Generate(uidTemplate, cnt); if (cfg.getUpdateDS().isEmpty()) UpdateField(UpdStmt, PK, uid, cfg.isDebug()); else UpdateField(rs, UpdStmt, PK, Jpdbi.Attrs[i], update, uid, cfg.isDebug()); } } } } if (UpdModality && LastPK > -1) UpdateStudyModality(conn, LastPK, cfg); rs.close(); } else { System.err.println("Query returns 0 rows."); } stmt.close(); } public static void main(String[] argv) { System.setProperty("java.awt.headless", "true"); Connection conn = null; final Config cfg = new Config(); cfg.ParseCommandLine(argv); if (cfg.isDebug()) { System.err.println("DEBUG: Connect Url: < " + cfg.getJdbcUrl() + " >"); } try { conn = DriverManager.getConnection(cfg.getJdbcUrl()); DatabaseMetaData dmd = conn.getMetaData(); cfg.setDbType(dmd.getDatabaseProductName()); // setPreparedStatements(conn); ParseQuery(conn, cfg); conn.close(); } catch (Exception e) { if (cfg.isDebug()) e.printStackTrace(); else Jpdbi.exit(1, e.toString()); } Jpdbi.exit(0); } }
package prefuse.data.tuple; import java.util.Date; import prefuse.data.Graph; import prefuse.data.Schema; import prefuse.data.Table; import prefuse.data.Tuple; /** * Tuple implementation that pulls values from a backing data Table. * * @author <a href="http://jheer.org">jeffrey heer</a> */ public class TableTuple implements Tuple { protected Table m_table; protected int m_row; /** * Initialize a new TableTuple for the given table and row. Tuples are * automatically generated by {@link TupleManager} instances, and * so application code should never need to invoke this method. * @param table the data Table * @param graph ignored by this class * @param row the table row index */ protected void init(Table table, Graph graph, int row) { m_table = table; m_row = m_table.isValidRow(row) ? row : -1; } /** * @see prefuse.data.Tuple#getSchema() */ public Schema getSchema() { return m_table.getSchema(); } /** * @see prefuse.data.Tuple#getTable() */ public Table getTable() { return m_table; } /** * @see prefuse.data.Tuple#getRow() */ public int getRow() { return m_row; } // ------------------------------------------------------------------------ // Index Checking /** * @see prefuse.data.Tuple#isValid() */ public boolean isValid() { return m_row != -1; } /** * Invalidates this tuple. Called by an enclosing table when a row * is deleted. */ void invalidate() { m_row = -1; } /** * Internal validity check. Throw an exception if the tuple is not valid. */ private void validityCheck() { if ( m_row == -1 ) { throw new IllegalStateException("This tuple is no longer valid. " + "It has been deleted from its table"); } } // ------------------------------------------------------------------------ // Column Methods /** * @see prefuse.data.Tuple#getColumnType(java.lang.String) */ public Class getColumnType(String field) { return m_table.getColumnType(field); } /** * @see prefuse.data.Tuple#getColumnType(int) */ public Class getColumnType(int col) { return m_table.getColumnType(col); } /** * @see prefuse.data.Tuple#getColumnIndex(java.lang.String) */ public int getColumnIndex(String field) { return m_table.getColumnNumber(field); } /** * @see prefuse.data.Tuple#getColumnCount() */ public int getColumnCount() { return m_table.getColumnCount(); } /** * @see prefuse.data.Tuple#getColumnName(int) */ public String getColumnName(int col) { return m_table.getColumnName(col); } // ------------------------------------------------------------------------ // Data Access Methods /** * @see prefuse.data.Tuple#canGet(java.lang.String, java.lang.Class) */ public boolean canGet(String field, Class type) { return m_table.canGet(field, type); } /** * @see prefuse.data.Tuple#canSet(java.lang.String, java.lang.Class) */ public boolean canSet(String field, Class type) { return m_table.canSet(field, type); } /** * @see prefuse.data.Tuple#get(java.lang.String) */ public final Object get(String field) { validityCheck(); return m_table.get(m_row, field); } /** * @see prefuse.data.Tuple#set(java.lang.String, java.lang.Object) */ public final void set(String field, Object value) { validityCheck(); m_table.set(m_row, field, value); } /** * @see prefuse.data.Tuple#get(int) */ public final Object get(int idx) { validityCheck(); return m_table.get(m_row, idx); } /** * @see prefuse.data.Tuple#set(int, java.lang.Object) */ public final void set(int idx, Object value) { validityCheck(); m_table.set(m_row, idx, value); } /** * @see prefuse.data.Tuple#getDefault(java.lang.String) */ public Object getDefault(String field) { validityCheck(); return m_table.getDefault(field); } /** * @see prefuse.data.Tuple#revertToDefault(java.lang.String) */ public void revertToDefault(String field) { validityCheck(); m_table.revertToDefault(m_row, field); } // ------------------------------------------------------------------------ // Convenience Data Access Methods /** * @see prefuse.data.Tuple#canGetInt(java.lang.String) */ public final boolean canGetInt(String field) { return m_table.canGetInt(field); } /** * @see prefuse.data.Tuple#canSetInt(java.lang.String) */ public final boolean canSetInt(String field) { return m_table.canSetInt(field); } /** * @see prefuse.data.Tuple#getInt(java.lang.String) */ public final int getInt(String field) { validityCheck(); return m_table.getInt(m_row, field); } /** * @see prefuse.data.Tuple#setInt(java.lang.String, int) */ public final void setInt(String field, int val) { validityCheck(); m_table.setInt(m_row, field, val); } /** * @see prefuse.data.Tuple#getInt(int) */ public final int getInt(int col) { validityCheck(); return m_table.getInt(m_row, col); } /** * @see prefuse.data.Tuple#setInt(int, int) */ public final void setInt(int col, int val) { validityCheck(); m_table.setInt(m_row, col, val); } // -------------------------------------------------------------- /** * @see prefuse.data.Tuple#canGetLong(java.lang.String) */ public final boolean canGetLong(String field) { return m_table.canGetLong(field); } /** * @see prefuse.data.Tuple#canSetLong(java.lang.String) */ public final boolean canSetLong(String field) { return m_table.canSetLong(field); } /** * @see prefuse.data.Tuple#getLong(java.lang.String) */ public final long getLong(String field) { validityCheck(); return m_table.getLong(m_row, field); } /** * @see prefuse.data.Tuple#setLong(java.lang.String, long) */ public final void setLong(String field, long val) { validityCheck(); m_table.setLong(m_row, field, val); } /** * @see prefuse.data.Tuple#getLong(int) */ public final long getLong(int col) { validityCheck(); return m_table.getLong(m_row, col); } /** * @see prefuse.data.Tuple#setLong(int, long) */ public final void setLong(int col, long val) { validityCheck(); m_table.setLong(m_row, col, val); } // -------------------------------------------------------------- /** * @see prefuse.data.Tuple#canGetFloat(java.lang.String) */ public final boolean canGetFloat(String field) { return m_table.canGetFloat(field); } /** * @see prefuse.data.Tuple#canSetFloat(java.lang.String) */ public final boolean canSetFloat(String field) { return m_table.canSetFloat(field); } /** * @see prefuse.data.Tuple#getFloat(java.lang.String) */ public final float getFloat(String field) { validityCheck(); return m_table.getFloat(m_row, field); } /** * @see prefuse.data.Tuple#setFloat(java.lang.String, float) */ public final void setFloat(String field, float val) { validityCheck(); m_table.setFloat(m_row, field, val); } /** * @see prefuse.data.Tuple#getFloat(int) */ public final float getFloat(int col) { validityCheck(); return m_table.getFloat(m_row, col); } /** * @see prefuse.data.Tuple#setFloat(int, float) */ public final void setFloat(int col, float val) { validityCheck(); m_table.setFloat(m_row, col, val); } // -------------------------------------------------------------- /** * @see prefuse.data.Tuple#canGetDouble(java.lang.String) */ public final boolean canGetDouble(String field) { return m_table.canGetDouble(field); } /** * @see prefuse.data.Tuple#canSetDouble(java.lang.String) */ public final boolean canSetDouble(String field) { return m_table.canSetDouble(field); } /** * @see prefuse.data.Tuple#getDouble(java.lang.String) */ public final double getDouble(String field) { validityCheck(); return m_table.getDouble(m_row, field); } /** * @see prefuse.data.Tuple#setDouble(java.lang.String, double) */ public final void setDouble(String field, double val) { validityCheck(); m_table.setDouble(m_row, field, val); } /** * @see prefuse.data.Tuple#getDouble(int) */ public final double getDouble(int col) { validityCheck(); return m_table.getDouble(m_row, col); } /** * @see prefuse.data.Tuple#setDouble(int, double) */ public final void setDouble(int col, double val) { validityCheck(); m_table.setDouble(m_row, col, val); } // -------------------------------------------------------------- /** * @see prefuse.data.Tuple#canGetBoolean(java.lang.String) */ public final boolean canGetBoolean(String field) { return m_table.canGetBoolean(field); } /** * @see prefuse.data.Tuple#canSetBoolean(java.lang.String) */ public final boolean canSetBoolean(String field) { return m_table.canSetBoolean(field); } /** * @see prefuse.data.Tuple#getBoolean(java.lang.String) */ public final boolean getBoolean(String field) { validityCheck(); return m_table.getBoolean(m_row, field); } /** * @see prefuse.data.Tuple#setBoolean(java.lang.String, boolean) */ public final void setBoolean(String field, boolean val) { validityCheck(); m_table.setBoolean(m_row, field, val); } /** * @see prefuse.data.Tuple#getBoolean(int) */ public final boolean getBoolean(int col) { validityCheck(); return m_table.getBoolean(m_row, col); } /** * @see prefuse.data.Tuple#setBoolean(java.lang.String, boolean) */ public final void setBoolean(int col, boolean val) { validityCheck(); m_table.setBoolean(m_row, col, val); } // -------------------------------------------------------------- /** * @see prefuse.data.Tuple#canGetString(java.lang.String) */ public final boolean canGetString(String field) { return m_table.canGetString(field); } /** * @see prefuse.data.Tuple#canSetString(java.lang.String) */ public final boolean canSetString(String field) { return m_table.canSetString(field); } /** * @see prefuse.data.Tuple#getString(java.lang.String) */ public final String getString(String field) { validityCheck(); return m_table.getString(m_row, field); } /** * @see prefuse.data.Tuple#setString(java.lang.String, java.lang.String) */ public final void setString(String field, String val) { validityCheck(); m_table.setString(m_row, field, val); } /** * @see prefuse.data.Tuple#getString(int) */ public final String getString(int col) { validityCheck(); return m_table.getString(m_row, col); } /** * @see prefuse.data.Tuple#setString(int, java.lang.String) */ public final void setString(int col, String val) { validityCheck(); m_table.setString(m_row, col, val); } // -------------------------------------------------------------- /** * @see prefuse.data.Tuple#canGetDate(java.lang.String) */ public final boolean canGetDate(String field) { return m_table.canGetDate(field); } /** * @see prefuse.data.Tuple#canSetDate(java.lang.String) */ public final boolean canSetDate(String field) { return m_table.canSetDate(field); } /** * @see prefuse.data.Tuple#getDate(java.lang.String) */ public final Date getDate(String field) { validityCheck(); return m_table.getDate(m_row, field); } /** * @see prefuse.data.Tuple#setDate(java.lang.String, java.util.Date) */ public final void setDate(String field, Date val) { validityCheck(); m_table.setDate(m_row, field, val); } /** * @see prefuse.data.Tuple#getDate(int) */ public final Date getDate(int col) { validityCheck(); return m_table.getDate(m_row, col); } /** * @see prefuse.data.Tuple#setDate(java.lang.String, java.util.Date) */ public final void setDate(int col, Date val) { validityCheck(); m_table.setDate(m_row, col, val); } // ------------------------------------------------------------------------ /** * @see java.lang.Object#toString() */ public String toString() { StringBuffer sb = new StringBuffer(); sb.append("Tuple["); for ( int i=0; i<getColumnCount(); ++i ) { if ( i > 0 ) sb.append(','); try { sb.append(get(i).toString()); } catch ( Exception e ) { sb.append("?"); } } sb.append("]"); return sb.toString(); } } // end of class TableTuple
/* * Copyright 2013-2022 Erudika. https://erudika.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For issues and patches go to: https://github.com/erudika */ package com.erudika.para.core.utils; import com.erudika.para.core.utils.Config; import com.erudika.para.core.App; import com.erudika.para.core.Sysprop; import com.erudika.para.core.Tag; import com.erudika.para.core.User; import static com.erudika.para.core.validation.ValidationUtils.*; import static com.erudika.para.core.validation.Constraint.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import static org.junit.Assert.*; import org.junit.Test; /** * * @author Alex Bogdanovski [alex@erudika.com] */ public class ValidationUtilsTest { @Test public void testGetValidator() { assertNotNull(getValidator()); } @Test public void testIsValidObject() { assertFalse(isValidObject(null)); assertFalse(isValidObject(new Tag())); assertTrue(isValidObject(new Tag("tag1"))); Tag t = new Tag(""); t.setName(""); assertFalse(isValidObject(t)); assertFalse(isValidObject(new User())); User u = new User(); u.setId("123"); u.setName("asd"); assertFalse(isValidObject(u)); } @Test public void testValidateObject() { assertTrue(validateObject(null).length > 0); assertEquals(0, validateObject(new Tag("test")).length); App app = new App(Config.PARA); assertTrue(validateObject(app).length == 0); app.resetSecret(); assertTrue(validateObject(app).length == 0); Sysprop s1 = new Sysprop("s1"); assertTrue(validateObject(s1).length == 0); assertTrue(validateObject(app, s1).length == 0); s1.setType("cat"); assertTrue(validateObject(s1).length == 0); assertTrue(validateObject(app, s1).length == 0); assertTrue(app.getValidationConstraints().isEmpty()); app.addValidationConstraint(null, null, null); assertTrue(app.getValidationConstraints().isEmpty()); // required app.addValidationConstraint(s1.getType(), "paws", required()); assertFalse(app.getValidationConstraints().get(s1.getType()).isEmpty()); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("paws", 2); assertTrue(validateObject(app, s1).length == 0); // min app.addValidationConstraint(s1.getType(), "paws", min(4L)); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("paws", 4); assertTrue(validateObject(app, s1).length == 0); // max app.addValidationConstraint(s1.getType(), "paws", max(5L)); s1.addProperty("paws", 6); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("paws", 5); assertTrue(validateObject(app, s1).length == 0); // size app.addValidationConstraint(s1.getType(), "name", size(2, 3)); assertFalse(validateObject(app, s1).length == 0); s1.setName("Bob"); assertTrue(validateObject(app, s1).length == 0); s1.setName("Bobsy"); assertFalse(validateObject(app, s1).length == 0); s1.setName("Bob"); // null values are allowed and treated as valid app.addValidationConstraint(s1.getType(), "fur", size(2, 3)); assertTrue(validateObject(app, s1).length == 0); // ints are a wrong type - not valid s1.addProperty("fur", 3); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("fur", "yes"); assertTrue(validateObject(app, s1).length == 0); s1.addProperty("fur", new ArrayList <String>(0)); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("fur", Arrays.asList(new String[]{"one", "two", "three"})); assertTrue(validateObject(app, s1).length == 0); s1.addProperty("fur", new HashMap<String, String>(0)); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("fur", new HashMap<String, String>() {{ put("1", "1"); put("2", "2"); put("3", "3"); }}); assertTrue(validateObject(app, s1).length == 0); s1.addProperty("fur", new String[0]); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("fur", new String[]{"one", "two", "three"}); assertTrue(validateObject(app, s1).length == 0); // email app.addValidationConstraint(s1.getType(), "eemail", email()); assertTrue(validateObject(app, s1).length == 0); s1.addProperty("eemail", 2); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("eemail", "a@.."); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("eemail", "a@bob.com"); assertTrue(validateObject(app, s1).length == 0); // digits app.addValidationConstraint(s1.getType(), "specialnum", digits(4, 2)); s1.addProperty("specialnum", "??"); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("specialnum", 12.34); assertTrue(validateObject(app, s1).length == 0); s1.addProperty("specialnum", 1234.567); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("specialnum", 12345.67); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("specialnum", "1234.5"); assertTrue(validateObject(app, s1).length == 0); // pattern app.addValidationConstraint(s1.getType(), "regex", pattern("^test\\sok=$")); s1.addProperty("regex", "??"); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("regex", "test ok="); assertTrue(validateObject(app, s1).length == 0); // false app.addValidationConstraint(s1.getType(), "fals", falsy()); s1.addProperty("fals", "test"); assertTrue(validateObject(app, s1).length == 0); s1.addProperty("fals", "false"); assertTrue(validateObject(app, s1).length == 0); s1.addProperty("fals", "NO"); assertTrue(validateObject(app, s1).length == 0); s1.addProperty("fals", "0"); assertTrue(validateObject(app, s1).length == 0); s1.addProperty("fals", 0); assertTrue(validateObject(app, s1).length == 0); s1.addProperty("fals", false); assertTrue(validateObject(app, s1).length == 0); s1.addProperty("fals", "true"); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("fals", true); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("fals", false); // true app.addValidationConstraint(s1.getType(), "tru", truthy()); s1.addProperty("tru", "test"); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("tru", "false"); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("tru", true); assertTrue(validateObject(app, s1).length == 0); s1.addProperty("tru", "true"); assertTrue(validateObject(app, s1).length == 0); // future app.addValidationConstraint(s1.getType(), "future", future()); s1.addProperty("future", 1234); assertFalse(validateObject(app, s1).length == 0); // s1.addProperty("future", System.currentTimeMillis()); // assertFalse(validateObject(app, s1).length == 0); // might fail on some machines s1.addProperty("future", System.currentTimeMillis() + 10000); assertTrue(validateObject(app, s1).length == 0); s1.addProperty("future", new Date(System.currentTimeMillis() + 10000)); assertTrue(validateObject(app, s1).length == 0); // past app.addValidationConstraint(s1.getType(), "past", past()); s1.addProperty("past", System.currentTimeMillis() + 10000); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("past", 1234); assertTrue(validateObject(app, s1).length == 0); // s1.addProperty("past", System.currentTimeMillis()); // assertFalse(validateObject(app, s1).length == 0); // might fail on some machines s1.addProperty("past", new Date(System.currentTimeMillis()-1)); assertTrue(validateObject(app, s1).length == 0); // url app.addValidationConstraint(s1.getType(), "url", url()); s1.addProperty("url", 1234); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("url", "http"); assertFalse(validateObject(app, s1).length == 0); s1.addProperty("url", "http://www.a.com"); assertTrue(validateObject(app, s1).length == 0); } @Test public void testGetCoreValidationConstraints() { assertTrue(getCoreValidationConstraints().containsKey("app")); } @Test public void testAllConstraints() { // null is ok, because value might not be required assertTrue(email().isValid(null)); assertTrue(email().isValid("abc@de.com")); assertFalse(email().isValid("abc@de.")); assertFalse(email().isValid("abc@.c")); assertFalse(email().isValid(123)); assertFalse(email().isValid(" ")); assertTrue(falsy().isValid(null)); assertTrue(falsy().isValid("false")); assertTrue(falsy().isValid("FALSE")); assertTrue(falsy().isValid(false)); assertTrue(falsy().isValid("fals")); assertTrue(falsy().isValid(" ")); assertFalse(falsy().isValid("true")); assertFalse(falsy().isValid(true)); assertTrue(truthy().isValid(null)); assertTrue(truthy().isValid("true")); assertTrue(truthy().isValid("True")); assertTrue(truthy().isValid(true)); assertFalse(truthy().isValid(false)); assertFalse(truthy().isValid("a")); assertFalse(truthy().isValid(" ")); assertFalse(required().isValid(null)); assertFalse(required().isValid(" ")); assertTrue(required().isValid("text")); assertTrue(required().isValid(1)); assertTrue(required().isValid(true)); long now = System.currentTimeMillis(); assertTrue(future().isValid(null)); assertTrue(future().isValid(new Date(now + 1000))); assertFalse(future().isValid(new Date(now - 1000))); assertTrue(past().isValid(null)); assertTrue(past().isValid(new Date(now - 1000))); assertFalse(past().isValid(new Date(now + 1000))); assertTrue(url().isValid(null)); assertTrue(url().isValid("http://abc.co")); assertFalse(url().isValid("htp://abc.co")); assertFalse(url().isValid("abc.com")); assertFalse(url().isValid(" ")); assertFalse(url().isValid(false)); assertTrue(min(3).isValid(null)); assertTrue(min(3).isValid(3)); assertTrue(min(3).isValid(4)); assertFalse(min(4).isValid(3)); assertFalse(min(2).isValid("3")); assertFalse(min(4).isValid(true)); assertFalse(min(null).isValid(" ")); assertFalse(min(null).isValid(3)); assertTrue(max(3).isValid(null)); assertTrue(max(3).isValid(3)); assertTrue(max(4).isValid(3)); assertFalse(max(3).isValid(4)); assertFalse(max(2).isValid("3")); assertFalse(max(4).isValid(true)); assertFalse(max(null).isValid(" ")); assertFalse(max(null).isValid(3)); assertTrue(size(2, 3).isValid(null)); assertTrue(size(2, 3).isValid("xx")); assertFalse(size(3, 2).isValid("xx")); assertFalse(size(2, 3).isValid("xxxx")); assertFalse(size(2, 3).isValid("x")); assertTrue(size(0, 0).isValid("")); assertTrue(size(0, 0).isValid(new String[0])); assertTrue(size(1, 2).isValid(new String[]{"a", "b"})); assertTrue(size(1, 2).isValid(Arrays.asList(new String[]{"a", "b"}))); assertTrue(size(1, 2).isValid(Collections.singletonMap("a", "b"))); assertTrue(digits(2, 2).isValid(null)); assertTrue(digits(2, 2).isValid("22.22")); assertFalse(digits(2, 2).isValid("22.222")); assertFalse(digits(1, 2).isValid("2.222")); assertTrue(digits(1, 2).isValid("2.22")); assertTrue(digits(1, 2).isValid(2.22)); assertTrue(digits(1, 2).isValid(0)); assertFalse(digits(1, 2).isValid(12)); assertFalse(digits(0, 2).isValid(1)); assertTrue(pattern(null).isValid(null)); assertTrue(pattern("").isValid("")); assertTrue(pattern("[ab]+").isValid("bababa")); assertTrue(pattern("\\.[ab]+").isValid(".babababa")); assertFalse(pattern("").isValid(" ")); } }
/* * Copyright 2014 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.refactoring; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.javascript.jscomp.AbstractCompiler; import com.google.javascript.jscomp.JSError; import com.google.javascript.jscomp.NodeTraversal; import com.google.javascript.jscomp.NodeUtil; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Maps a JSError to a SuggestedFix. * TODO(tbreisacher): Move this into the compiler itself (i.e. into the jscomp package). This will * make it easier for people adding new warnings to also add fixes for them. */ public final class ErrorToFixMapper { private ErrorToFixMapper() {} // All static private static final Pattern DID_YOU_MEAN = Pattern.compile(".*Did you mean (.*)\\?"); private static final Pattern MISSING_REQUIRE = Pattern.compile("missing require: '([^']+)'"); private static final Pattern EXTRA_REQUIRE = Pattern.compile("extra require: '([^']+)'"); public static List<SuggestedFix> getFixesForJsError(JSError error, AbstractCompiler compiler) { SuggestedFix fix = getFixForJsError(error, compiler); if (fix != null) { return ImmutableList.of(fix); } switch (error.getType().key) { case "JSC_IMPLICITLY_NULLABLE_JSDOC": return getFixesForImplicitlyNullableJsDoc(error); default: return ImmutableList.of(); } } /** * Creates a SuggestedFix for the given error. Note that some errors have multiple fixes * so getFixesForJsError should often be used instead of this. */ public static SuggestedFix getFixForJsError(JSError error, AbstractCompiler compiler) { switch (error.getType().key) { case "JSC_MISSING_SEMICOLON": return getFixForMissingSemicolon(error); case "JSC_REQUIRES_NOT_SORTED": return getFixForUnsortedRequiresOrProvides("goog.require", error, compiler); case "JSC_PROVIDES_NOT_SORTED": return getFixForUnsortedRequiresOrProvides("goog.provide", error, compiler); case "JSC_DEBUGGER_STATEMENT_PRESENT": case "JSC_USELESS_EMPTY_STATEMENT": return removeNode(error); case "JSC_INEXISTENT_PROPERTY": return getFixForInexistentProperty(error); case "JSC_MISSING_CALL_TO_SUPER": return getFixForMissingSuper(error); case "JSC_MISSING_REQUIRE_WARNING": return getFixForMissingRequire(error, compiler); case "JSC_DUPLICATE_REQUIRE_WARNING": case "JSC_EXTRA_REQUIRE_WARNING": return getFixForExtraRequire(error, compiler); case "JSC_UNNECESSARY_CAST": return getFixForUnnecessaryCast(error, compiler); default: return null; } } private static List<SuggestedFix> getFixesForImplicitlyNullableJsDoc(JSError error) { SuggestedFix qmark = new SuggestedFix.Builder() .setOriginalMatchedNode(error.node) .insertBefore(error.node, "?") .setDescription("Make nullability explicit") .build(); SuggestedFix bang = new SuggestedFix.Builder() .setOriginalMatchedNode(error.node) .insertBefore(error.node, "!") .setDescription("Make type non-nullable") .build(); return ImmutableList.of(qmark, bang); } private static SuggestedFix removeNode(JSError error) { return new SuggestedFix.Builder() .setOriginalMatchedNode(error.node) .delete(error.node).build(); } private static SuggestedFix getFixForMissingSemicolon(JSError error) { return new SuggestedFix.Builder() .setOriginalMatchedNode(error.node) .insertAfter(error.node, ";") .build(); } private static SuggestedFix getFixForMissingSuper(JSError error) { Node body = NodeUtil.getFunctionBody(error.node); return new SuggestedFix.Builder() .setOriginalMatchedNode(error.node) .addChildToFront(body, "super();") .build(); } private static SuggestedFix getFixForInexistentProperty(JSError error) { Matcher m = DID_YOU_MEAN.matcher(error.description); if (m.matches()) { String suggestedPropName = m.group(1); return new SuggestedFix.Builder() .setOriginalMatchedNode(error.node) .rename(error.node, suggestedPropName).build(); } return null; } private static SuggestedFix getFixForMissingRequire(JSError error, AbstractCompiler compiler) { Matcher regexMatcher = MISSING_REQUIRE.matcher(error.description); Preconditions.checkState(regexMatcher.matches(), "Unexpected error description: %s", error.description); String namespaceToRequire = regexMatcher.group(1); NodeMetadata metadata = new NodeMetadata(compiler); Match match = new Match(error.node, metadata); return new SuggestedFix.Builder() .setOriginalMatchedNode(error.node) .addGoogRequire(match, namespaceToRequire) .build(); } private static SuggestedFix getFixForExtraRequire(JSError error, AbstractCompiler compiler) { Matcher regexMatcher = EXTRA_REQUIRE.matcher(error.description); Preconditions.checkState(regexMatcher.matches(), "Unexpected error description: %s", error.description); String namespace = regexMatcher.group(1); NodeMetadata metadata = new NodeMetadata(compiler); Match match = new Match(error.node, metadata); return new SuggestedFix.Builder() .setOriginalMatchedNode(error.node) .removeGoogRequire(match, namespace) .build(); } private static SuggestedFix getFixForUnnecessaryCast(JSError error, AbstractCompiler compiler) { return new SuggestedFix.Builder() .setOriginalMatchedNode(error.node) .removeCast(error.node, compiler).build(); } private static SuggestedFix getFixForUnsortedRequiresOrProvides( String closureFunction, JSError error, AbstractCompiler compiler) { SuggestedFix.Builder fix = new SuggestedFix.Builder(); fix.setOriginalMatchedNode(error.node); Node script = NodeUtil.getEnclosingScript(error.node); RequireProvideSorter cb = new RequireProvideSorter(closureFunction); NodeTraversal.traverseEs6(compiler, script, cb); Node first = cb.calls.get(0); Node last = cb.calls.get(cb.calls.size() - 1); cb.sortCallsAlphabetically(); StringBuilder sb = new StringBuilder(); for (Node n : cb.calls) { String statement = fix.generateCode(compiler, n); JSDocInfo jsDoc = NodeUtil.getBestJSDocInfo(n); if (jsDoc != null) { statement = jsDoc.getOriginalCommentString() + "\n" + statement; } sb.append(statement); } // Trim to remove the newline after the last goog.require/provide. String newContent = sb.toString().trim(); return fix.replaceRange(first, last, newContent).build(); } private static String getNamespaceFromClosureNode(Node exprResult) { Preconditions.checkState(exprResult.isExprResult()); return exprResult.getFirstChild().getLastChild().getString(); } private static class RequireProvideSorter extends NodeTraversal.AbstractShallowCallback implements Comparator<Node> { private final String closureFunction; private final List<Node> calls = new ArrayList<>(); RequireProvideSorter(String closureFunction) { this.closureFunction = closureFunction; } @Override public final void visit(NodeTraversal nodeTraversal, Node n, Node parent) { if (n.isCall() && parent.isExprResult() && n.getFirstChild().matchesQualifiedName(closureFunction)) { calls.add(parent); } } public void sortCallsAlphabetically() { Collections.sort(calls, this); } @Override public int compare(Node n1, Node n2) { String namespace1 = getNamespaceFromClosureNode(n1); String namespace2 = getNamespaceFromClosureNode(n2); return namespace1.compareTo(namespace2); } } }
package de.qaware.chronix.server.benchmark.configurator; import de.qaware.chronix.database.BenchmarkDataSource; import de.qaware.chronix.server.util.ChronixBoolean; import de.qaware.chronix.server.util.DockerCommandLineUtil; import de.qaware.chronix.server.util.ServerSystemUtil; import de.qaware.chronix.common.ServerConfig.TSDBInterfaceHandler; import de.qaware.chronix.common.dockerUtil.DockerBuildOptions; import de.qaware.chronix.common.dockerUtil.DockerRunOptions; import de.qaware.chronix.common.ServerConfig.ServerConfigAccessor; import de.qaware.chronix.common.ServerConfig.ServerConfigRecord; import org.apache.commons.compress.utils.IOUtils; //import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; //import org.glassfish.jersey.media.multipart.FormDataMultiPart; import org.glassfish.jersey.media.multipart.FormDataParam; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.*; import javax.ws.rs.Path; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.*; import java.util.LinkedList; import java.util.List; /** * Created by mcqueen666 on 15.06.16. */ @Path("/configurator") @Produces(MediaType.APPLICATION_JSON) public class BenchmarkConfiguratorResource { private ServerConfigAccessor serverConfigAccessor = ServerConfigAccessor.getInstance(); private final Logger logger = LoggerFactory.getLogger(BenchmarkConfiguratorResource.class); // JUST FOR TESTING @GET @Path("test") public String test(@QueryParam("name") String name) { return "Sent info: " + name; } //Test json @GET @Path("getjson") @Produces(MediaType.APPLICATION_JSON) public DockerRunOptions getjson(){ DockerRunOptions op = new DockerRunOptions("chronix",8983,8983,""); return op; } //TEST @GET @Path("booleanTest") public Response test(@QueryParam("value") ChronixBoolean chronixBoolean) { String[] result = {"Result is " + chronixBoolean.getValue()}; return Response.ok().entity(result).build(); } //Obsolete @GET @Path("ping") public Response ping(){ return Response.ok().build(); } @GET @Path("which") public Response which(){ //String[] command = {"which docker"}; //String[] lcom = ServerSystemUtil.getOsSpecificCommand(command); String[] result = {DockerCommandLineUtil.getDockerInstallPath()}; return Response.ok().entity(result).build(); } @POST @Path("upload/config") @Consumes(MediaType.APPLICATION_JSON) public Response uploadServerConfig(LinkedList<ServerConfigRecord> serverConfigRecords) { for(ServerConfigRecord serverConfigRecord : serverConfigRecords){ serverConfigRecord.getTsdbDockerFilesDirectoryMap().clear(); } serverConfigAccessor.setServerConfigRecords(serverConfigRecords); return Response.ok().build(); } @GET @Path("docker/running") public Response isRunning(@QueryParam("containerName") String containerName){ List<String> result = new LinkedList<String>(); if(DockerCommandLineUtil.isDockerInstalled()){ Boolean containerRunning = DockerCommandLineUtil.isDockerContainerRunning(containerName); if(containerRunning){ result.add("true"); } else { result.add("false"); } } else { result.add("Docker not installed or running."); } return Response.ok().entity(result.toArray()).build(); } @GET @Path("interface/running") public Response isInterfaceRunning(@QueryParam("tsdbName") String tsdbName){ TSDBInterfaceHandler interfaceHandler = TSDBInterfaceHandler.getInstance(); BenchmarkDataSource impl = interfaceHandler.getTSDBInstance(tsdbName); if(impl != null) { return Response.ok().entity(tsdbName + " interface is up. storage directory is: " + impl.getStorageDirectoryPath()).build(); } logger.info(tsdbName + " interface not responding"); return Response.serverError().entity(tsdbName + " interface not responding").build(); } @POST @Path("upload/jar") @Consumes({MediaType.MULTIPART_FORM_DATA}) public Response uploadJarFile(@QueryParam("tsdbName") String tsdbName, @FormDataParam("file")InputStream fileInputStream, @FormDataParam("file")FormDataContentDisposition fileMetaData){ TSDBInterfaceHandler interfaceHandler = TSDBInterfaceHandler.getInstance(); if(interfaceHandler.copyTSDBInterface(fileInputStream, tsdbName)){ BenchmarkDataSource<Object> tsdb = interfaceHandler.getTSDBInstance(tsdbName); if(tsdb != null){ //create external measurement directory if impl needs one. (done by impl) tsdb.ableToMeasureExternalDirectory(); } return Response.ok().entity("copy successful").build(); } logger.info(tsdbName + ": copy error"); return Response.serverError().entity(tsdbName + ": copy error").build(); } @POST @Path("docker/upload/{name}") @Consumes({MediaType.MULTIPART_FORM_DATA}) public Response uploadDockerFiles(@PathParam("name") String name, @FormDataParam("file")InputStream fileInputStream, @FormDataParam("file")FormDataContentDisposition fileMetaData) { String path = ServerSystemUtil.getBenchmarkDockerDirectory(); if(path == null){ return Response.serverError().entity("Server OS Unknown").build(); } // construct directory path from name String[] paths = name.split("-"); String reconstructedFilePath = ""; for(String p : paths){ reconstructedFilePath = reconstructedFilePath + p + File.separator; } String dirPath = path + reconstructedFilePath; new File(dirPath).mkdirs(); String filename = fileMetaData.getFileName(); String filePath = dirPath + filename; try { File newFile = new File(filePath); FileOutputStream outputStream = new FileOutputStream(newFile); IOUtils.copy(fileInputStream, outputStream); outputStream.close(); } catch (IOException e) { logger.error("Could not write file <" + reconstructedFilePath + filename + ">"); return Response.serverError().entity("Server could not write file <" + reconstructedFilePath + filename + ">" ).build(); } return Response.ok("Upload file <" + reconstructedFilePath + filename + "> successfull!").build(); } /** * Starts the docker specified in dockerRunOptoins. * * @param dockerRunOptions DockerRunOptions JSON * @return the response from the server and the cli output (e.g. statusCode + String[]) */ @POST @Consumes(MediaType.APPLICATION_JSON) @Path("docker/start") public Response startDockerContainer(DockerRunOptions dockerRunOptions){ if(DockerCommandLineUtil.isDockerInstalled()){ String containerName = dockerRunOptions.getContainerName(); File directory = new File(ServerSystemUtil.getBenchmarkDockerDirectory() + containerName); if(directory.exists()){ if(!DockerCommandLineUtil.isDockerContainerRunning(containerName)) { // first try restarting docker container String restartCommand = dockerRunOptions.getValidRunCommand(true); String[] prepareRestartCommand = {DockerCommandLineUtil.getDockerInstallPath() + restartCommand}; String[] specificRestartCommand = ServerSystemUtil.getOsSpecificCommand(prepareRestartCommand); List<String> restartResult = ServerSystemUtil.executeCommand(specificRestartCommand); if(!DockerCommandLineUtil.isDockerContainerRunning(containerName)) { // container was not started before, so run with initial command. String command = dockerRunOptions.getValidRunCommand(false); //TODO further check command for safety reasons if (command != null) { String[] prepareCommand = {DockerCommandLineUtil.getDockerInstallPath() + command}; String[] specificCommand = ServerSystemUtil.getOsSpecificCommand(prepareCommand); List<String> startResult = ServerSystemUtil.executeCommand(specificCommand); if (DockerCommandLineUtil.isDockerContainerRunning(containerName)) { // all went good startResult.add("Docker container " + containerName + " is running"); return Response.ok().entity(startResult.toArray()).build(); } startResult.add("Docker container " + containerName + " is not running"); logger.info("Docker container " + containerName + " is not running"); return Response.serverError().entity(startResult.toArray()).build(); } String[] response = {"Wrong docker command."}; logger.info("Wrong docker command."); return Response.serverError().entity(response).build(); } // restart successful. String[] response = {"Docker container " + containerName + " restarted."}; logger.info("Docker container " + containerName + " restarted."); return Response.ok().entity(response).build(); } String[] response = {"docker container " + containerName + " already running."}; return Response.ok().entity(response).build(); } String[] response = {"docker files missing", "directory = " + ServerSystemUtil.getBenchmarkDockerDirectory() + containerName}; logger.error("docker files missing", "directory = " + ServerSystemUtil.getBenchmarkDockerDirectory() + containerName); return Response.serverError().entity(response).build(); } String[] response = {"docker not installed or daemon not running"}; logger.error("docker not installed or daemon not running"); return Response.serverError().entity(response).build(); } @POST @Consumes(MediaType.APPLICATION_JSON) @Path("docker/build") public Response buildDockerContainer(DockerBuildOptions dockerBuildOptions){ logger.info("Building {} ... ", dockerBuildOptions.getContainerName()); if(DockerCommandLineUtil.isDockerInstalled()){ String containerName = dockerBuildOptions.getContainerName(); File directory = new File(ServerSystemUtil.getBenchmarkDockerDirectory() + containerName); if(directory.exists()){ String command = dockerBuildOptions.getValidBuildCommand(); //TODO further check command for safety reasons if(command != null){ String[] prepareCommand = {DockerCommandLineUtil.getDockerInstallPath() + command.replace(".", directory.getPath())}; String[] specificCommand = ServerSystemUtil.getOsSpecificCommand(prepareCommand); List<String> buildResult = ServerSystemUtil.executeCommand(specificCommand); // all went good return Response.ok().entity(buildResult.toArray()).build(); } String[] response = {"Wrong docker command."}; logger.error("Wrong docker command."); return Response.serverError().entity(response).build(); } String[] response = {"docker files missing", "directory = " + ServerSystemUtil.getBenchmarkDockerDirectory() + containerName}; logger.error("docker files missing", "directory = " + ServerSystemUtil.getBenchmarkDockerDirectory() + containerName); return Response.serverError().entity(response).build(); } String[] response = {"docker not installed or daemon not running"}; logger.error("docker not installed or daemon not running"); return Response.serverError().entity(response).build(); } @GET @Path("docker/stop") public Response stopDockerContainer(@QueryParam("containerName") String containerName) { if (DockerCommandLineUtil.isDockerInstalled()) { List<String> stopResult = DockerCommandLineUtil.stopContainer(containerName); if (DockerCommandLineUtil.isDockerContainerRunning(containerName)) { stopResult.add("Docker container " + containerName + " is still running"); logger.error("Docker container " + containerName + " is still running"); return Response.serverError().entity(stopResult.toArray()).build(); } // all went good stopResult.add("Docker container " + containerName + " stopped"); return Response.ok().entity(stopResult.toArray()).build(); } String[] result = {"Docker is not installed or running."}; logger.error("Docker is not installed or running."); return Response.serverError().entity(result).build(); } /** * Removes all docker containers related to imageName. * If removeFiles is selected, removes the image with imageName and the related files. * * @param imageName the image name * @param removeFiles "yes", "true" or "y" to delete image and all files. * @return the server response with server cli output in entity as String[]. */ @GET @Path("docker/remove") public Response removeDockerContainer(@QueryParam("imageName") String imageName, @QueryParam("removeFiles") ChronixBoolean removeFiles) { List<String> result = new LinkedList<String>(); if(DockerCommandLineUtil.isDockerInstalled()){ result.addAll(DockerCommandLineUtil.stopContainer(imageName)); List<String> containerIDs = DockerCommandLineUtil.getAllContainerIds(imageName); result.addAll(DockerCommandLineUtil.deleteContainer(containerIDs)); } if (removeFiles.getValue() == true) { File directory = new File(ServerSystemUtil.getBenchmarkDockerDirectory() + imageName); if (directory.exists()) { if(DockerCommandLineUtil.isDockerInstalled()){ String[] commandLine = {"docker rmi -f " + imageName}; String[] command = ServerSystemUtil.getOsSpecificCommand(commandLine); result.addAll(ServerSystemUtil.executeCommand(command)); } // delete directory result.add(ServerSystemUtil.deleteDirectory(directory.toPath())); } else { result.add("No directory named " + imageName + " found."); logger.warn("No directory named " + imageName + " found."); } } return Response.ok().entity(result.toArray()).build(); } }
/* * Copyright 2014, gRPC Authors All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.grpc.netty; import static com.google.common.base.Charsets.UTF_8; import static io.grpc.internal.GrpcUtil.DEFAULT_MAX_MESSAGE_SIZE; import static io.grpc.internal.GrpcUtil.DEFAULT_SERVER_KEEPALIVE_TIMEOUT_NANOS; import static io.grpc.internal.GrpcUtil.DEFAULT_SERVER_KEEPALIVE_TIME_NANOS; import static io.grpc.netty.NettyServerBuilder.MAX_CONNECTION_AGE_GRACE_NANOS_INFINITE; import static io.grpc.netty.NettyServerBuilder.MAX_CONNECTION_AGE_NANOS_DISABLED; import static io.grpc.netty.NettyServerBuilder.MAX_CONNECTION_IDLE_NANOS_DISABLED; import static io.grpc.netty.Utils.CONTENT_TYPE_GRPC; import static io.grpc.netty.Utils.CONTENT_TYPE_HEADER; import static io.grpc.netty.Utils.HTTP_METHOD; import static io.grpc.netty.Utils.TE_HEADER; import static io.grpc.netty.Utils.TE_TRAILERS; import static io.netty.buffer.Unpooled.directBuffer; import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_WINDOW_SIZE; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.mockito.AdditionalAnswers.delegatesTo; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import com.google.common.io.ByteStreams; import com.google.common.truth.Truth; import io.grpc.Attributes; import io.grpc.Metadata; import io.grpc.ServerStreamTracer; import io.grpc.Status; import io.grpc.Status.Code; import io.grpc.StreamTracer; import io.grpc.internal.GrpcUtil; import io.grpc.internal.KeepAliveManager; import io.grpc.internal.ServerStream; import io.grpc.internal.ServerStreamListener; import io.grpc.internal.ServerTransportListener; import io.grpc.internal.StatsTraceContext; import io.grpc.internal.StreamListener; import io.grpc.internal.testing.TestServerStreamTracer; import io.grpc.netty.GrpcHttp2HeadersUtils.GrpcHttp2ServerHeadersDecoder; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufUtil; import io.netty.buffer.Unpooled; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; import io.netty.handler.codec.http2.DefaultHttp2Headers; import io.netty.handler.codec.http2.Http2CodecUtil; import io.netty.handler.codec.http2.Http2Error; import io.netty.handler.codec.http2.Http2Headers; import io.netty.handler.codec.http2.Http2LocalFlowController; import io.netty.handler.codec.http2.Http2Settings; import io.netty.handler.codec.http2.Http2Stream; import io.netty.util.AsciiString; import java.io.InputStream; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Queue; import java.util.concurrent.TimeUnit; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.ArgumentCaptor; import org.mockito.Matchers; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; /** * Unit tests for {@link NettyServerHandler}. */ @RunWith(JUnit4.class) public class NettyServerHandlerTest extends NettyHandlerTestBase<NettyServerHandler> { @Rule public final Timeout globalTimeout = Timeout.seconds(1); private static final int STREAM_ID = 3; @Mock private ServerStreamListener streamListener; @Mock private ServerStreamTracer.Factory streamTracerFactory; private final ServerTransportListener transportListener = spy(new ServerTransportListenerImpl()); private final TestServerStreamTracer streamTracer = new TestServerStreamTracer(); private NettyServerStream stream; private KeepAliveManager spyKeepAliveManager; final Queue<InputStream> streamListenerMessageQueue = new LinkedList<InputStream>(); private int flowControlWindow = DEFAULT_WINDOW_SIZE; private int maxConcurrentStreams = Integer.MAX_VALUE; private int maxHeaderListSize = Integer.MAX_VALUE; private boolean permitKeepAliveWithoutCalls = true; private long permitKeepAliveTimeInNanos = 0; private long maxConnectionIdleInNanos = MAX_CONNECTION_IDLE_NANOS_DISABLED; private long maxConnectionAgeInNanos = MAX_CONNECTION_AGE_NANOS_DISABLED; private long maxConnectionAgeGraceInNanos = MAX_CONNECTION_AGE_GRACE_NANOS_INFINITE; private long keepAliveTimeInNanos = DEFAULT_SERVER_KEEPALIVE_TIME_NANOS; private long keepAliveTimeoutInNanos = DEFAULT_SERVER_KEEPALIVE_TIMEOUT_NANOS; private class ServerTransportListenerImpl implements ServerTransportListener { @Override public void streamCreated(ServerStream stream, String method, Metadata headers) { stream.setListener(streamListener); } @Override public Attributes transportReady(Attributes attributes) { return Attributes.EMPTY; } @Override public void transportTerminated() { } } @Override protected void manualSetUp() throws Exception { assertNull("manualSetUp should not run more than once", handler()); MockitoAnnotations.initMocks(this); when(streamTracerFactory.newServerStreamTracer(anyString(), any(Metadata.class))) .thenReturn(streamTracer); doAnswer( new Answer<Void>() { @Override public Void answer(InvocationOnMock invocation) throws Throwable { StreamListener.MessageProducer producer = (StreamListener.MessageProducer) invocation.getArguments()[0]; InputStream message; while ((message = producer.next()) != null) { streamListenerMessageQueue.add(message); } return null; } }) .when(streamListener) .messagesAvailable(Matchers.<StreamListener.MessageProducer>any()); initChannel(new GrpcHttp2ServerHeadersDecoder(GrpcUtil.DEFAULT_MAX_HEADER_LIST_SIZE)); // replace the keepAliveManager with spyKeepAliveManager spyKeepAliveManager = mock(KeepAliveManager.class, delegatesTo(handler().getKeepAliveManagerForTest())); handler().setKeepAliveManagerForTest(spyKeepAliveManager); // Simulate receipt of the connection preface handler().handleProtocolNegotiationCompleted(Attributes.EMPTY); channelRead(Http2CodecUtil.connectionPrefaceBuf()); // Simulate receipt of initial remote settings. ByteBuf serializedSettings = serializeSettings(new Http2Settings()); channelRead(serializedSettings); } @Test public void sendFrameShouldSucceed() throws Exception { manualSetUp(); createStream(); // Send a frame and verify that it was written. ChannelFuture future = enqueue( new SendGrpcFrameCommand(stream.transportState(), content(), false)); assertTrue(future.isSuccess()); verifyWrite().writeData(eq(ctx()), eq(STREAM_ID), eq(content()), eq(0), eq(false), any(ChannelPromise.class)); } @Test public void streamTracerCreated() throws Exception { manualSetUp(); createStream(); verify(streamTracerFactory).newServerStreamTracer(eq("foo/bar"), any(Metadata.class)); StatsTraceContext statsTraceCtx = stream.statsTraceContext(); List<StreamTracer> tracers = statsTraceCtx.getTracersForTest(); assertEquals(1, tracers.size()); assertSame(streamTracer, tracers.get(0)); } @Test public void inboundDataWithEndStreamShouldForwardToStreamListener() throws Exception { manualSetUp(); inboundDataShouldForwardToStreamListener(true); } @Test public void inboundDataShouldForwardToStreamListener() throws Exception { manualSetUp(); inboundDataShouldForwardToStreamListener(false); } private void inboundDataShouldForwardToStreamListener(boolean endStream) throws Exception { createStream(); stream.request(1); // Create a data frame and then trigger the handler to read it. ByteBuf frame = grpcDataFrame(STREAM_ID, endStream, contentAsArray()); channelRead(frame); verify(streamListener, atLeastOnce()) .messagesAvailable(any(StreamListener.MessageProducer.class)); InputStream message = streamListenerMessageQueue.poll(); assertArrayEquals(ByteBufUtil.getBytes(content()), ByteStreams.toByteArray(message)); message.close(); assertNull("no additional message expected", streamListenerMessageQueue.poll()); if (endStream) { verify(streamListener).halfClosed(); } verify(streamListener, atLeastOnce()).onReady(); verifyNoMoreInteractions(streamListener); } @Test public void clientHalfCloseShouldForwardToStreamListener() throws Exception { manualSetUp(); createStream(); stream.request(1); channelRead(emptyGrpcFrame(STREAM_ID, true)); verify(streamListener, atLeastOnce()) .messagesAvailable(any(StreamListener.MessageProducer.class)); InputStream message = streamListenerMessageQueue.poll(); assertArrayEquals(new byte[0], ByteStreams.toByteArray(message)); assertNull("no additional message expected", streamListenerMessageQueue.poll()); verify(streamListener).halfClosed(); verify(streamListener, atLeastOnce()).onReady(); verifyNoMoreInteractions(streamListener); } @Test public void clientCancelShouldForwardToStreamListener() throws Exception { manualSetUp(); createStream(); channelRead(rstStreamFrame(STREAM_ID, (int) Http2Error.CANCEL.code())); ArgumentCaptor<Status> statusCap = ArgumentCaptor.forClass(Status.class); verify(streamListener).closed(statusCap.capture()); assertEquals(Code.CANCELLED, statusCap.getValue().getCode()); Truth.assertThat(statusCap.getValue().getDescription()).contains("RST_STREAM"); verify(streamListener, atLeastOnce()).onReady(); assertNull("no messages expected", streamListenerMessageQueue.poll()); } @Test public void streamErrorShouldNotCloseChannel() throws Exception { manualSetUp(); createStream(); stream.request(1); // When a DATA frame is read, throw an exception. It will be converted into an // Http2StreamException. RuntimeException e = new RuntimeException("Fake Exception"); doThrow(e).when(streamListener).messagesAvailable(any(StreamListener.MessageProducer.class)); // Read a DATA frame to trigger the exception. channelRead(emptyGrpcFrame(STREAM_ID, true)); // Verify that the channel was NOT closed. assertTrue(channel().isOpen()); // Verify the stream was closed. ArgumentCaptor<Status> captor = ArgumentCaptor.forClass(Status.class); verify(streamListener).closed(captor.capture()); assertEquals(e, captor.getValue().asException().getCause()); assertEquals(Code.UNKNOWN, captor.getValue().getCode()); } @Test public void closeShouldCloseChannel() throws Exception { manualSetUp(); handler().close(ctx(), newPromise()); verifyWrite().writeGoAway(eq(ctx()), eq(0), eq(Http2Error.NO_ERROR.code()), eq(Unpooled.EMPTY_BUFFER), any(ChannelPromise.class)); // Verify that the channel was closed. assertFalse(channel().isOpen()); } @Test public void exceptionCaughtShouldCloseConnection() throws Exception { manualSetUp(); handler().exceptionCaught(ctx(), new RuntimeException("fake exception")); // TODO(nmittler): EmbeddedChannel does not currently invoke the channelInactive processing, // so exceptionCaught() will not close streams properly in this test. // Once https://github.com/netty/netty/issues/4316 is resolved, we should also verify that // any open streams are closed properly. assertFalse(channel().isOpen()); } @Test public void channelInactiveShouldCloseStreams() throws Exception { manualSetUp(); createStream(); handler().channelInactive(ctx()); ArgumentCaptor<Status> captor = ArgumentCaptor.forClass(Status.class); verify(streamListener).closed(captor.capture()); assertFalse(captor.getValue().isOk()); } @Test public void shouldAdvertiseMaxConcurrentStreams() throws Exception { maxConcurrentStreams = 314; manualSetUp(); ArgumentCaptor<Http2Settings> captor = ArgumentCaptor.forClass(Http2Settings.class); verifyWrite().writeSettings( any(ChannelHandlerContext.class), captor.capture(), any(ChannelPromise.class)); assertEquals(maxConcurrentStreams, captor.getValue().maxConcurrentStreams().intValue()); } @Test public void shouldAdvertiseMaxHeaderListSize() throws Exception { maxHeaderListSize = 123; manualSetUp(); ArgumentCaptor<Http2Settings> captor = ArgumentCaptor.forClass(Http2Settings.class); verifyWrite().writeSettings( any(ChannelHandlerContext.class), captor.capture(), any(ChannelPromise.class)); assertEquals(maxHeaderListSize, captor.getValue().maxHeaderListSize().intValue()); } @Test public void connectionWindowShouldBeOverridden() throws Exception { flowControlWindow = 1048576; // 1MiB manualSetUp(); Http2Stream connectionStream = connection().connectionStream(); Http2LocalFlowController localFlowController = connection().local().flowController(); int actualInitialWindowSize = localFlowController.initialWindowSize(connectionStream); int actualWindowSize = localFlowController.windowSize(connectionStream); assertEquals(flowControlWindow, actualWindowSize); assertEquals(flowControlWindow, actualInitialWindowSize); } @Test public void cancelShouldSendRstStream() throws Exception { manualSetUp(); createStream(); enqueue(new CancelServerStreamCommand(stream.transportState(), Status.DEADLINE_EXCEEDED)); verifyWrite().writeRstStream(eq(ctx()), eq(stream.transportState().id()), eq(Http2Error.CANCEL.code()), any(ChannelPromise.class)); } @Test public void headersWithInvalidContentTypeShouldFail() throws Exception { manualSetUp(); Http2Headers headers = new DefaultHttp2Headers() .method(HTTP_METHOD) .set(CONTENT_TYPE_HEADER, new AsciiString("application/bad", UTF_8)) .set(TE_HEADER, TE_TRAILERS) .path(new AsciiString("/foo/bar")); ByteBuf headersFrame = headersFrame(STREAM_ID, headers); channelRead(headersFrame); verifyWrite().writeRstStream(eq(ctx()), eq(STREAM_ID), eq(Http2Error.REFUSED_STREAM.code()), any(ChannelPromise.class)); } @Test public void headersSupportExtensionContentType() throws Exception { manualSetUp(); Http2Headers headers = new DefaultHttp2Headers() .method(HTTP_METHOD) .set(CONTENT_TYPE_HEADER, new AsciiString("application/grpc+json", UTF_8)) .set(TE_HEADER, TE_TRAILERS) .path(new AsciiString("/foo/bar")); ByteBuf headersFrame = headersFrame(STREAM_ID, headers); channelRead(headersFrame); ArgumentCaptor<NettyServerStream> streamCaptor = ArgumentCaptor.forClass(NettyServerStream.class); ArgumentCaptor<String> methodCaptor = ArgumentCaptor.forClass(String.class); verify(transportListener).streamCreated(streamCaptor.capture(), methodCaptor.capture(), any(Metadata.class)); stream = streamCaptor.getValue(); } @Test public void keepAliveManagerOnDataReceived_headersRead() throws Exception { manualSetUp(); ByteBuf headersFrame = headersFrame(STREAM_ID, new DefaultHttp2Headers()); channelRead(headersFrame); verify(spyKeepAliveManager).onDataReceived(); verify(spyKeepAliveManager, never()).onTransportTermination(); } @Test public void keepAliveManagerOnDataReceived_dataRead() throws Exception { manualSetUp(); createStream(); verify(spyKeepAliveManager).onDataReceived(); // received headers channelRead(grpcDataFrame(STREAM_ID, false, contentAsArray())); verify(spyKeepAliveManager, times(2)).onDataReceived(); channelRead(grpcDataFrame(STREAM_ID, false, contentAsArray())); verify(spyKeepAliveManager, times(3)).onDataReceived(); verify(spyKeepAliveManager, never()).onTransportTermination(); } @Test public void keepAliveManagerOnDataReceived_rstStreamRead() throws Exception { manualSetUp(); createStream(); verify(spyKeepAliveManager).onDataReceived(); // received headers channelRead(rstStreamFrame(STREAM_ID, (int) Http2Error.CANCEL.code())); verify(spyKeepAliveManager, times(2)).onDataReceived(); verify(spyKeepAliveManager, never()).onTransportTermination(); } @Test public void keepAliveManagerOnDataReceived_pingRead() throws Exception { manualSetUp(); ByteBuf payload = handler().ctx().alloc().buffer(8); payload.writeLong(1234L); channelRead(pingFrame(false /* isAck */, payload)); verify(spyKeepAliveManager).onDataReceived(); verify(spyKeepAliveManager, never()).onTransportTermination(); } @Test public void keepAliveManagerOnDataReceived_pingActRead() throws Exception { manualSetUp(); ByteBuf payload = handler().ctx().alloc().buffer(8); payload.writeLong(1234L); channelRead(pingFrame(true /* isAck */, payload)); verify(spyKeepAliveManager).onDataReceived(); verify(spyKeepAliveManager, never()).onTransportTermination(); } @Test public void keepAliveManagerOnTransportTermination() throws Exception { manualSetUp(); handler().channelInactive(handler().ctx()); verify(spyKeepAliveManager).onTransportTermination(); } @Test public void keepAliveManager_pingSent() throws Exception { ByteBuf pingBuf = directBuffer(8).writeLong(0xDEADL); keepAliveTimeInNanos = TimeUnit.MILLISECONDS.toNanos(10L); keepAliveTimeoutInNanos = TimeUnit.MINUTES.toNanos(30L); manualSetUp(); fakeClock().forwardNanos(keepAliveTimeInNanos); verifyWrite().writePing(eq(ctx()), eq(false), eq(pingBuf), any(ChannelPromise.class)); spyKeepAliveManager.onDataReceived(); fakeClock().forwardTime(10L, TimeUnit.MILLISECONDS); verifyWrite(times(2)) .writePing(eq(ctx()), eq(false), eq(pingBuf), any(ChannelPromise.class)); assertTrue(channel().isOpen()); } @Test public void keepAliveManager_pingTimeout() throws Exception { keepAliveTimeInNanos = TimeUnit.NANOSECONDS.toNanos(123L); keepAliveTimeoutInNanos = TimeUnit.NANOSECONDS.toNanos(456L); manualSetUp(); fakeClock().forwardNanos(keepAliveTimeInNanos); assertTrue(channel().isOpen()); fakeClock().forwardNanos(keepAliveTimeoutInNanos); assertTrue(!channel().isOpen()); } @Test public void keepAliveEnforcer_enforcesPings() throws Exception { permitKeepAliveWithoutCalls = false; permitKeepAliveTimeInNanos = TimeUnit.HOURS.toNanos(1); manualSetUp(); ByteBuf payload = handler().ctx().alloc().buffer(8); payload.writeLong(1); for (int i = 0; i < KeepAliveEnforcer.MAX_PING_STRIKES + 1; i++) { channelRead(pingFrame(false /* isAck */, payload.slice())); } payload.release(); verifyWrite().writeGoAway(eq(ctx()), eq(0), eq(Http2Error.ENHANCE_YOUR_CALM.code()), any(ByteBuf.class), any(ChannelPromise.class)); assertFalse(channel().isActive()); } @Test public void keepAliveEnforcer_sendingDataResetsCounters() throws Exception { permitKeepAliveWithoutCalls = false; permitKeepAliveTimeInNanos = TimeUnit.HOURS.toNanos(1); manualSetUp(); createStream(); Http2Headers headers = Utils.convertServerHeaders(new Metadata()); ChannelFuture future = enqueue( new SendResponseHeadersCommand(stream.transportState(), headers, false)); future.get(); ByteBuf payload = handler().ctx().alloc().buffer(8); payload.writeLong(1); for (int i = 0; i < 10; i++) { future = enqueue( new SendGrpcFrameCommand(stream.transportState(), content().retainedSlice(), false)); future.get(); channel().releaseOutbound(); channelRead(pingFrame(false /* isAck */, payload.slice())); } payload.release(); verifyWrite(never()).writeGoAway(eq(ctx()), eq(STREAM_ID), eq(Http2Error.ENHANCE_YOUR_CALM.code()), any(ByteBuf.class), any(ChannelPromise.class)); } @Test public void keepAliveEnforcer_initialIdle() throws Exception { permitKeepAliveWithoutCalls = false; permitKeepAliveTimeInNanos = 0; manualSetUp(); ByteBuf payload = handler().ctx().alloc().buffer(8); payload.writeLong(1); for (int i = 0; i < KeepAliveEnforcer.MAX_PING_STRIKES + 1; i++) { channelRead(pingFrame(false /* isAck */, payload.slice())); } payload.release(); verifyWrite().writeGoAway(eq(ctx()), eq(0), eq(Http2Error.ENHANCE_YOUR_CALM.code()), any(ByteBuf.class), any(ChannelPromise.class)); assertFalse(channel().isActive()); } @Test public void keepAliveEnforcer_noticesActive() throws Exception { permitKeepAliveWithoutCalls = false; permitKeepAliveTimeInNanos = 0; manualSetUp(); createStream(); ByteBuf payload = handler().ctx().alloc().buffer(8); payload.writeLong(1); for (int i = 0; i < 10; i++) { channelRead(pingFrame(false /* isAck */, payload.slice())); } payload.release(); verifyWrite(never()).writeGoAway(eq(ctx()), eq(STREAM_ID), eq(Http2Error.ENHANCE_YOUR_CALM.code()), any(ByteBuf.class), any(ChannelPromise.class)); } @Test public void keepAliveEnforcer_noticesInactive() throws Exception { permitKeepAliveWithoutCalls = false; permitKeepAliveTimeInNanos = 0; manualSetUp(); createStream(); channelRead(rstStreamFrame(STREAM_ID, (int) Http2Error.CANCEL.code())); ByteBuf payload = handler().ctx().alloc().buffer(8); payload.writeLong(1); for (int i = 0; i < KeepAliveEnforcer.MAX_PING_STRIKES + 1; i++) { channelRead(pingFrame(false /* isAck */, payload.slice())); } payload.release(); verifyWrite().writeGoAway(eq(ctx()), eq(STREAM_ID), eq(Http2Error.ENHANCE_YOUR_CALM.code()), any(ByteBuf.class), any(ChannelPromise.class)); assertFalse(channel().isActive()); } @Test public void noGoAwaySentBeforeMaxConnectionIdleReached() throws Exception { maxConnectionIdleInNanos = TimeUnit.MINUTES.toNanos(30L); manualSetUp(); fakeClock().forwardTime(20, TimeUnit.MINUTES); // GO_AWAY not sent yet verifyWrite(never()).writeGoAway( any(ChannelHandlerContext.class), any(Integer.class), any(Long.class), any(ByteBuf.class), any(ChannelPromise.class)); assertTrue(channel().isOpen()); } @Test public void maxConnectionIdle_goAwaySent() throws Exception { maxConnectionIdleInNanos = TimeUnit.MILLISECONDS.toNanos(10L); manualSetUp(); assertTrue(channel().isOpen()); fakeClock().forwardNanos(maxConnectionIdleInNanos); // GO_AWAY sent verifyWrite().writeGoAway( eq(ctx()), eq(Integer.MAX_VALUE), eq(Http2Error.NO_ERROR.code()), any(ByteBuf.class), any(ChannelPromise.class)); // channel closed assertTrue(!channel().isOpen()); } @Test public void maxConnectionIdle_activeThenRst() throws Exception { maxConnectionIdleInNanos = TimeUnit.MILLISECONDS.toNanos(10L); manualSetUp(); createStream(); fakeClock().forwardNanos(maxConnectionIdleInNanos); // GO_AWAY not sent when active verifyWrite(never()).writeGoAway( any(ChannelHandlerContext.class), any(Integer.class), any(Long.class), any(ByteBuf.class), any(ChannelPromise.class)); assertTrue(channel().isOpen()); channelRead(rstStreamFrame(STREAM_ID, (int) Http2Error.CANCEL.code())); fakeClock().forwardNanos(maxConnectionIdleInNanos); // GO_AWAY sent verifyWrite().writeGoAway( eq(ctx()), eq(Integer.MAX_VALUE), eq(Http2Error.NO_ERROR.code()), any(ByteBuf.class), any(ChannelPromise.class)); // channel closed assertTrue(!channel().isOpen()); } @Test public void noGoAwaySentBeforeMaxConnectionAgeReached() throws Exception { maxConnectionAgeInNanos = TimeUnit.MINUTES.toNanos(30L); manualSetUp(); fakeClock().forwardTime(20, TimeUnit.MINUTES); // GO_AWAY not sent yet verifyWrite(never()).writeGoAway( any(ChannelHandlerContext.class), any(Integer.class), any(Long.class), any(ByteBuf.class), any(ChannelPromise.class)); assertTrue(channel().isOpen()); } @Test public void maxConnectionAge_goAwaySent() throws Exception { maxConnectionAgeInNanos = TimeUnit.MILLISECONDS.toNanos(10L); manualSetUp(); assertTrue(channel().isOpen()); fakeClock().forwardNanos(maxConnectionAgeInNanos); // GO_AWAY sent verifyWrite().writeGoAway( eq(ctx()), eq(Integer.MAX_VALUE), eq(Http2Error.NO_ERROR.code()), any(ByteBuf.class), any(ChannelPromise.class)); // channel closed assertTrue(!channel().isOpen()); } @Test public void maxConnectionAgeGrace_channelStillOpenDuringGracePeriod() throws Exception { maxConnectionAgeInNanos = TimeUnit.MILLISECONDS.toNanos(10L); maxConnectionAgeGraceInNanos = TimeUnit.MINUTES.toNanos(30L); manualSetUp(); createStream(); fakeClock().forwardNanos(maxConnectionAgeInNanos); verifyWrite().writeGoAway( eq(ctx()), eq(Integer.MAX_VALUE), eq(Http2Error.NO_ERROR.code()), any(ByteBuf.class), any(ChannelPromise.class)); fakeClock().forwardTime(20, TimeUnit.MINUTES); // channel not closed yet assertTrue(channel().isOpen()); } @Test public void maxConnectionAgeGrace_channelClosedAfterGracePeriod() throws Exception { maxConnectionAgeInNanos = TimeUnit.MILLISECONDS.toNanos(10L); maxConnectionAgeGraceInNanos = TimeUnit.MINUTES.toNanos(30L); manualSetUp(); createStream(); fakeClock().forwardNanos(maxConnectionAgeInNanos); verifyWrite().writeGoAway( eq(ctx()), eq(Integer.MAX_VALUE), eq(Http2Error.NO_ERROR.code()), any(ByteBuf.class), any(ChannelPromise.class)); assertTrue(channel().isOpen()); fakeClock().forwardNanos(maxConnectionAgeGraceInNanos); // channel closed assertTrue(!channel().isOpen()); } private void createStream() throws Exception { Http2Headers headers = new DefaultHttp2Headers() .method(HTTP_METHOD) .set(CONTENT_TYPE_HEADER, CONTENT_TYPE_GRPC) .set(TE_HEADER, TE_TRAILERS) .path(new AsciiString("/foo/bar")); ByteBuf headersFrame = headersFrame(STREAM_ID, headers); channelRead(headersFrame); ArgumentCaptor<NettyServerStream> streamCaptor = ArgumentCaptor.forClass(NettyServerStream.class); ArgumentCaptor<String> methodCaptor = ArgumentCaptor.forClass(String.class); verify(transportListener).streamCreated(streamCaptor.capture(), methodCaptor.capture(), any(Metadata.class)); stream = streamCaptor.getValue(); } private ByteBuf emptyGrpcFrame(int streamId, boolean endStream) throws Exception { ByteBuf buf = NettyTestUtil.messageFrame(""); try { return dataFrame(streamId, endStream, buf); } finally { buf.release(); } } @Override protected NettyServerHandler newHandler() { return NettyServerHandler.newHandler( frameReader(), frameWriter(), transportListener, Arrays.asList(streamTracerFactory), maxConcurrentStreams, flowControlWindow, maxHeaderListSize, DEFAULT_MAX_MESSAGE_SIZE, keepAliveTimeInNanos, keepAliveTimeoutInNanos, maxConnectionIdleInNanos, maxConnectionAgeInNanos, maxConnectionAgeGraceInNanos, permitKeepAliveWithoutCalls, permitKeepAliveTimeInNanos); } @Override protected WriteQueue initWriteQueue() { return handler().getWriteQueue(); } @Override protected void makeStream() throws Exception { createStream(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.standard; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.ReadsAttribute; import org.apache.nifi.annotation.behavior.ReadsAttributes; import org.apache.nifi.annotation.behavior.SideEffectFree; import org.apache.nifi.annotation.behavior.TriggerWhenEmpty; import org.apache.nifi.annotation.behavior.WritesAttribute; import org.apache.nifi.annotation.behavior.WritesAttributes; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.SeeAlso; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnStopped; import org.apache.nifi.avro.AvroTypeUtil; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.flowfile.attributes.FragmentAttributes; import org.apache.nifi.processor.AbstractSessionFactoryProcessor; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.ProcessSessionFactory; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.FlowFileFilters; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processors.standard.merge.AttributeStrategyUtil; import org.apache.nifi.processors.standard.merge.RecordBinManager; import org.apache.nifi.schema.access.SchemaNotFoundException; import org.apache.nifi.serialization.MalformedRecordException; import org.apache.nifi.serialization.RecordReader; import org.apache.nifi.serialization.RecordReaderFactory; import org.apache.nifi.serialization.RecordSetWriterFactory; import org.apache.nifi.serialization.record.RecordSchema; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; @SideEffectFree @TriggerWhenEmpty @InputRequirement(Requirement.INPUT_REQUIRED) @Tags({"merge", "record", "content", "correlation", "stream", "event"}) @CapabilityDescription("This Processor merges together multiple record-oriented FlowFiles into a single FlowFile that contains all of the Records of the input FlowFiles. " + "This Processor works by creating 'bins' and then adding FlowFiles to these bins until they are full. Once a bin is full, all of the FlowFiles will be combined into " + "a single output FlowFile, and that FlowFile will be routed to the 'merged' Relationship. A bin will consist of potentially many 'like FlowFiles'. In order for two " + "FlowFiles to be considered 'like FlowFiles', they must have the same Schema (as identified by the Record Reader) and, if the <Correlation Attribute Name> property " + "is set, the same value for the specified attribute. See Processor Usage and Additional Details for more information.") @ReadsAttributes({ @ReadsAttribute(attribute = "fragment.identifier", description = "Applicable only if the <Merge Strategy> property is set to Defragment. " + "All FlowFiles with the same value for this attribute will be bundled together."), @ReadsAttribute(attribute = "fragment.count", description = "Applicable only if the <Merge Strategy> property is set to Defragment. This " + "attribute must be present on all FlowFiles with the same value for the fragment.identifier attribute. All FlowFiles in the same " + "bundle must have the same value for this attribute. The value of this attribute indicates how many FlowFiles should be expected " + "in the given bundle."), }) @WritesAttributes({ @WritesAttribute(attribute = "record.count", description = "The merged FlowFile will have a 'record.count' attribute indicating the number of records " + "that were written to the FlowFile."), @WritesAttribute(attribute = "mime.type", description = "The MIME Type indicated by the Record Writer"), @WritesAttribute(attribute = "merge.count", description = "The number of FlowFiles that were merged into this bundle"), @WritesAttribute(attribute = "merge.bin.age", description = "The age of the bin, in milliseconds, when it was merged and output. Effectively " + "this is the greatest amount of time that any FlowFile in this bundle remained waiting in this processor before it was output"), @WritesAttribute(attribute = "merge.uuid", description = "UUID of the merged FlowFile that will be added to the original FlowFiles attributes"), @WritesAttribute(attribute = "<Attributes from Record Writer>", description = "Any Attribute that the configured Record Writer returns will be added to the FlowFile.") }) @SeeAlso({MergeContent.class, SplitRecord.class, PartitionRecord.class}) public class MergeRecord extends AbstractSessionFactoryProcessor { // attributes for defragmentation public static final String FRAGMENT_ID_ATTRIBUTE = FragmentAttributes.FRAGMENT_ID.key(); public static final String FRAGMENT_INDEX_ATTRIBUTE = FragmentAttributes.FRAGMENT_INDEX.key(); public static final String FRAGMENT_COUNT_ATTRIBUTE = FragmentAttributes.FRAGMENT_COUNT.key(); public static final String MERGE_COUNT_ATTRIBUTE = "merge.count"; public static final String MERGE_BIN_AGE_ATTRIBUTE = "merge.bin.age"; public static final String MERGE_UUID_ATTRIBUTE = "merge.uuid"; public static final AllowableValue MERGE_STRATEGY_BIN_PACK = new AllowableValue( "Bin-Packing Algorithm", "Bin-Packing Algorithm", "Generates 'bins' of FlowFiles and fills each bin as full as possible. FlowFiles are placed into a bin based on their size and optionally " + "their attributes (if the <Correlation Attribute> property is set)"); public static final AllowableValue MERGE_STRATEGY_DEFRAGMENT = new AllowableValue( "Defragment", "Defragment", "Combines fragments that are associated by attributes back into a single cohesive FlowFile. If using this strategy, all FlowFiles must " + "have the attributes <fragment.identifier> and <fragment.count>. All FlowFiles with the same value for \"fragment.identifier\" " + "will be grouped together. All FlowFiles in this group must have the same value for the \"fragment.count\" attribute. The ordering of " + "the Records that are output is not guaranteed."); public static final PropertyDescriptor RECORD_READER = new PropertyDescriptor.Builder() .name("record-reader") .displayName("Record Reader") .description("Specifies the Controller Service to use for reading incoming data") .identifiesControllerService(RecordReaderFactory.class) .required(true) .build(); public static final PropertyDescriptor RECORD_WRITER = new PropertyDescriptor.Builder() .name("record-writer") .displayName("Record Writer") .description("Specifies the Controller Service to use for writing out the records") .identifiesControllerService(RecordSetWriterFactory.class) .required(true) .build(); public static final PropertyDescriptor MERGE_STRATEGY = new PropertyDescriptor.Builder() .name("merge-strategy") .displayName("Merge Strategy") .description("Specifies the algorithm used to merge records. The 'Defragment' algorithm combines fragments that are associated by " + "attributes back into a single cohesive FlowFile. The 'Bin-Packing Algorithm' generates a FlowFile populated by arbitrarily " + "chosen FlowFiles") .required(true) .allowableValues(MERGE_STRATEGY_BIN_PACK, MERGE_STRATEGY_DEFRAGMENT) .defaultValue(MERGE_STRATEGY_BIN_PACK.getValue()) .build(); public static final PropertyDescriptor CORRELATION_ATTRIBUTE_NAME = new PropertyDescriptor.Builder() .name("correlation-attribute-name") .displayName("Correlation Attribute Name") .description("If specified, two FlowFiles will be binned together only if they have the same value for " + "this Attribute. If not specified, FlowFiles are bundled by the order in which they are pulled from the queue.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .addValidator(StandardValidators.ATTRIBUTE_KEY_VALIDATOR) .defaultValue(null) .build(); public static final PropertyDescriptor MIN_SIZE = new PropertyDescriptor.Builder() .name("min-bin-size") .displayName("Minimum Bin Size") .description("The minimum size of for the bin") .required(true) .defaultValue("0 B") .addValidator(StandardValidators.DATA_SIZE_VALIDATOR) .build(); public static final PropertyDescriptor MAX_SIZE = new PropertyDescriptor.Builder() .name("max-bin-size") .displayName("Maximum Bin Size") .description("The maximum size for the bundle. If not specified, there is no maximum. This is a 'soft limit' in that if a FlowFile is added to a bin, " + "all records in that FlowFile will be added, so this limit may be exceeded by up to the number of bytes in last input FlowFile.") .required(false) .addValidator(StandardValidators.DATA_SIZE_VALIDATOR) .build(); public static final PropertyDescriptor MIN_RECORDS = new PropertyDescriptor.Builder() .name("min-records") .displayName("Minimum Number of Records") .description("The minimum number of records to include in a bin") .required(true) .defaultValue("1") .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .build(); public static final PropertyDescriptor MAX_RECORDS = new PropertyDescriptor.Builder() .name("max-records") .displayName("Maximum Number of Records") .description("The maximum number of Records to include in a bin. This is a 'soft limit' in that if a FlowFIle is added to a bin, all records in that FlowFile will be added, " + "so this limit may be exceeded by up to the number of records in the last input FlowFile.") .required(false) .defaultValue("1000") .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .build(); public static final PropertyDescriptor MAX_BIN_COUNT = new PropertyDescriptor.Builder() .name("max.bin.count") .displayName("Maximum Number of Bins") .description("Specifies the maximum number of bins that can be held in memory at any one time. " + "This number should not be smaller than the maximum number of conurrent threads for this Processor, " + "or the bins that are created will often consist only of a single incoming FlowFile.") .defaultValue("10") .required(true) .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR) .build(); public static final PropertyDescriptor MAX_BIN_AGE = new PropertyDescriptor.Builder() .name("max-bin-age") .displayName("Max Bin Age") .description("The maximum age of a Bin that will trigger a Bin to be complete. Expected format is <duration> <time unit> " + "where <duration> is a positive integer and time unit is one of seconds, minutes, hours") .required(false) .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .build(); public static final Relationship REL_MERGED = new Relationship.Builder() .name("merged") .description("The FlowFile containing the merged records") .build(); public static final Relationship REL_ORIGINAL = new Relationship.Builder() .name("original") .description("The FlowFiles that were used to create the bundle") .build(); public static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description("If the bundle cannot be created, all FlowFiles that would have been used to created the bundle will be transferred to failure") .build(); private final AtomicReference<RecordBinManager> binManager = new AtomicReference<>(); @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { final List<PropertyDescriptor> properties = new ArrayList<>(); properties.add(RECORD_READER); properties.add(RECORD_WRITER); properties.add(MERGE_STRATEGY); properties.add(CORRELATION_ATTRIBUTE_NAME); properties.add(AttributeStrategyUtil.ATTRIBUTE_STRATEGY); properties.add(MIN_RECORDS); properties.add(MAX_RECORDS); properties.add(MIN_SIZE); properties.add(MAX_SIZE); properties.add(MAX_BIN_AGE); properties.add(MAX_BIN_COUNT); return properties; } @Override public Set<Relationship> getRelationships() { final Set<Relationship> relationships = new HashSet<>(); relationships.add(REL_ORIGINAL); relationships.add(REL_FAILURE); relationships.add(REL_MERGED); return relationships; } @OnStopped public final void resetState() { final RecordBinManager manager = binManager.get(); if (manager != null) { manager.purge(); } binManager.set(null); } @Override protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) { final List<ValidationResult> results = new ArrayList<>(); final Integer minRecords = validationContext.getProperty(MIN_RECORDS).evaluateAttributeExpressions().asInteger(); final Integer maxRecords = validationContext.getProperty(MAX_RECORDS).evaluateAttributeExpressions().asInteger(); if (minRecords != null && maxRecords != null && maxRecords < minRecords) { results.add(new ValidationResult.Builder() .subject("Max Records") .input(String.valueOf(maxRecords)) .valid(false) .explanation("<Maximum Number of Records> property cannot be smaller than <Minimum Number of Records> property") .build()); } if (minRecords != null && minRecords <= 0) { results.add(new ValidationResult.Builder() .subject("Min Records") .input(String.valueOf(minRecords)) .valid(false) .explanation("<Minimum Number of Records> property cannot be negative or zero") .build()); } if (maxRecords != null && maxRecords <= 0) { results.add(new ValidationResult.Builder() .subject("Max Records") .input(String.valueOf(maxRecords)) .valid(false) .explanation("<Maximum Number of Records> property cannot be negative or zero") .build()); } final Double minSize = validationContext.getProperty(MIN_SIZE).asDataSize(DataUnit.B); final Double maxSize = validationContext.getProperty(MAX_SIZE).asDataSize(DataUnit.B); if (minSize != null && maxSize != null && maxSize < minSize) { results.add(new ValidationResult.Builder() .subject("Max Size") .input(validationContext.getProperty(MAX_SIZE).getValue()) .valid(false) .explanation("<Maximum Bin Size> property cannot be smaller than <Minimum Bin Size> property") .build()); } return results; } @Override public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) throws ProcessException { RecordBinManager manager = binManager.get(); while (manager == null) { manager = new RecordBinManager(context, sessionFactory, getLogger()); manager.setMaxBinAge(context.getProperty(MAX_BIN_AGE).asTimePeriod(TimeUnit.NANOSECONDS), TimeUnit.NANOSECONDS); final boolean updated = binManager.compareAndSet(null, manager); if (!updated) { manager = binManager.get(); } } final ProcessSession session = sessionFactory.createSession(); final List<FlowFile> flowFiles = session.get(FlowFileFilters.newSizeBasedFilter(250, DataUnit.KB, 250)); if (getLogger().isDebugEnabled()) { final List<String> ids = flowFiles.stream().map(ff -> "id=" + ff.getId()).collect(Collectors.toList()); getLogger().debug("Pulled {} FlowFiles from queue: {}", new Object[] {ids.size(), ids}); } final String mergeStrategy = context.getProperty(MERGE_STRATEGY).getValue(); final boolean block; if (MERGE_STRATEGY_DEFRAGMENT.getValue().equals(mergeStrategy)) { block = true; } else if (context.getProperty(CORRELATION_ATTRIBUTE_NAME).isSet()) { block = true; } else { block = false; } try { for (final FlowFile flowFile : flowFiles) { try { binFlowFile(context, flowFile, session, manager, block); } catch (final Exception e) { getLogger().error("Failed to bin {} due to {}", new Object[] {flowFile, e}); session.transfer(flowFile, REL_FAILURE); } } } finally { session.commit(); } // If there is no more data queued up, complete any bin that meets our minimum threshold int completedBins = 0; if (flowFiles.isEmpty()) { try { completedBins += manager.completeFullEnoughBins(); } catch (final Exception e) { getLogger().error("Failed to merge FlowFiles to create new bin due to " + e, e); } } // Complete any bins that have reached their expiration date try { completedBins += manager.completeExpiredBins(); } catch (final Exception e) { getLogger().error("Failed to merge FlowFiles to create new bin due to " + e, e); } if (completedBins == 0 && flowFiles.isEmpty()) { getLogger().debug("No FlowFiles to bin; will yield"); context.yield(); } } private void binFlowFile(final ProcessContext context, final FlowFile flowFile, final ProcessSession session, final RecordBinManager binManager, final boolean block) { final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class); try (final InputStream in = session.read(flowFile); final RecordReader reader = readerFactory.createRecordReader(flowFile, in, getLogger())) { final RecordSchema schema = reader.getSchema(); final String groupId = getGroupId(context, flowFile, schema, session); getLogger().debug("Got Group ID {} for {}", new Object[] {groupId, flowFile}); binManager.add(groupId, flowFile, reader, session, block); } catch (MalformedRecordException | IOException | SchemaNotFoundException e) { throw new ProcessException(e); } } protected String getGroupId(final ProcessContext context, final FlowFile flowFile, final RecordSchema schema, final ProcessSession session) { final String mergeStrategy = context.getProperty(MERGE_STRATEGY).getValue(); if (MERGE_STRATEGY_DEFRAGMENT.getValue().equals(mergeStrategy)) { return flowFile.getAttribute(FRAGMENT_ID_ATTRIBUTE); } final Optional<String> optionalText = schema.getSchemaText(); final String schemaText = optionalText.orElseGet(() -> AvroTypeUtil.extractAvroSchema(schema).toString()); final String groupId; final String correlationshipAttributeName = context.getProperty(CORRELATION_ATTRIBUTE_NAME).getValue(); if (correlationshipAttributeName != null) { final String correlationAttr = flowFile.getAttribute(correlationshipAttributeName); groupId = correlationAttr == null ? schemaText : schemaText + correlationAttr; } else { groupId = schemaText; } return groupId; } int getBinCount() { return binManager.get().getBinCount(); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2020_03_01.implementation; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewaySku; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewaySslPolicy; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayOperationalState; import java.util.List; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayIPConfiguration; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayAuthenticationCertificate; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayTrustedRootCertificate; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewaySslCertificate; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayFrontendIPConfiguration; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayFrontendPort; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayProbe; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayBackendAddressPool; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayBackendHttpSettings; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayHttpListener; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayUrlPathMap; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayRequestRoutingRule; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayRewriteRuleSet; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayRedirectConfiguration; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayWebApplicationFirewallConfiguration; import com.microsoft.azure.SubResource; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayAutoscaleConfiguration; import com.microsoft.azure.management.network.v2020_03_01.ProvisioningState; import com.microsoft.azure.management.network.v2020_03_01.ApplicationGatewayCustomError; import com.microsoft.azure.management.network.v2020_03_01.ManagedServiceIdentity; import com.fasterxml.jackson.annotation.JsonProperty; import com.microsoft.rest.serializer.JsonFlatten; import com.microsoft.rest.SkipParentValidation; import com.microsoft.azure.Resource; /** * Application gateway resource. */ @JsonFlatten @SkipParentValidation public class ApplicationGatewayInner extends Resource { /** * SKU of the application gateway resource. */ @JsonProperty(value = "properties.sku") private ApplicationGatewaySku sku; /** * SSL policy of the application gateway resource. */ @JsonProperty(value = "properties.sslPolicy") private ApplicationGatewaySslPolicy sslPolicy; /** * Operational state of the application gateway resource. Possible values * include: 'Stopped', 'Starting', 'Running', 'Stopping'. */ @JsonProperty(value = "properties.operationalState", access = JsonProperty.Access.WRITE_ONLY) private ApplicationGatewayOperationalState operationalState; /** * Subnets of the application gateway resource. For default limits, see * [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.gatewayIPConfigurations") private List<ApplicationGatewayIPConfiguration> gatewayIPConfigurations; /** * Authentication certificates of the application gateway resource. For * default limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.authenticationCertificates") private List<ApplicationGatewayAuthenticationCertificate> authenticationCertificates; /** * Trusted Root certificates of the application gateway resource. For * default limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.trustedRootCertificates") private List<ApplicationGatewayTrustedRootCertificate> trustedRootCertificates; /** * SSL certificates of the application gateway resource. For default * limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.sslCertificates") private List<ApplicationGatewaySslCertificate> sslCertificates; /** * Frontend IP addresses of the application gateway resource. For default * limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.frontendIPConfigurations") private List<ApplicationGatewayFrontendIPConfiguration> frontendIPConfigurations; /** * Frontend ports of the application gateway resource. For default limits, * see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.frontendPorts") private List<ApplicationGatewayFrontendPort> frontendPorts; /** * Probes of the application gateway resource. */ @JsonProperty(value = "properties.probes") private List<ApplicationGatewayProbe> probes; /** * Backend address pool of the application gateway resource. For default * limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.backendAddressPools") private List<ApplicationGatewayBackendAddressPool> backendAddressPools; /** * Backend http settings of the application gateway resource. For default * limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.backendHttpSettingsCollection") private List<ApplicationGatewayBackendHttpSettings> backendHttpSettingsCollection; /** * Http listeners of the application gateway resource. For default limits, * see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.httpListeners") private List<ApplicationGatewayHttpListener> httpListeners; /** * URL path map of the application gateway resource. For default limits, * see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.urlPathMaps") private List<ApplicationGatewayUrlPathMap> urlPathMaps; /** * Request routing rules of the application gateway resource. */ @JsonProperty(value = "properties.requestRoutingRules") private List<ApplicationGatewayRequestRoutingRule> requestRoutingRules; /** * Rewrite rules for the application gateway resource. */ @JsonProperty(value = "properties.rewriteRuleSets") private List<ApplicationGatewayRewriteRuleSet> rewriteRuleSets; /** * Redirect configurations of the application gateway resource. For default * limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.redirectConfigurations") private List<ApplicationGatewayRedirectConfiguration> redirectConfigurations; /** * Web application firewall configuration. */ @JsonProperty(value = "properties.webApplicationFirewallConfiguration") private ApplicationGatewayWebApplicationFirewallConfiguration webApplicationFirewallConfiguration; /** * Reference to the FirewallPolicy resource. */ @JsonProperty(value = "properties.firewallPolicy") private SubResource firewallPolicy; /** * Whether HTTP2 is enabled on the application gateway resource. */ @JsonProperty(value = "properties.enableHttp2") private Boolean enableHttp2; /** * Whether FIPS is enabled on the application gateway resource. */ @JsonProperty(value = "properties.enableFips") private Boolean enableFips; /** * Autoscale Configuration. */ @JsonProperty(value = "properties.autoscaleConfiguration") private ApplicationGatewayAutoscaleConfiguration autoscaleConfiguration; /** * The resource GUID property of the application gateway resource. */ @JsonProperty(value = "properties.resourceGuid", access = JsonProperty.Access.WRITE_ONLY) private String resourceGuid; /** * The provisioning state of the application gateway resource. Possible * values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'. */ @JsonProperty(value = "properties.provisioningState", access = JsonProperty.Access.WRITE_ONLY) private ProvisioningState provisioningState; /** * Custom error configurations of the application gateway resource. */ @JsonProperty(value = "properties.customErrorConfigurations") private List<ApplicationGatewayCustomError> customErrorConfigurations; /** * If true, associates a firewall policy with an application gateway * regardless whether the policy differs from the WAF Config. */ @JsonProperty(value = "properties.forceFirewallPolicyAssociation") private Boolean forceFirewallPolicyAssociation; /** * A unique read-only string that changes whenever the resource is updated. */ @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** * A list of availability zones denoting where the resource needs to come * from. */ @JsonProperty(value = "zones") private List<String> zones; /** * The identity of the application gateway, if configured. */ @JsonProperty(value = "identity") private ManagedServiceIdentity identity; /** * Resource ID. */ @JsonProperty(value = "id") private String id; /** * Get sKU of the application gateway resource. * * @return the sku value */ public ApplicationGatewaySku sku() { return this.sku; } /** * Set sKU of the application gateway resource. * * @param sku the sku value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withSku(ApplicationGatewaySku sku) { this.sku = sku; return this; } /** * Get sSL policy of the application gateway resource. * * @return the sslPolicy value */ public ApplicationGatewaySslPolicy sslPolicy() { return this.sslPolicy; } /** * Set sSL policy of the application gateway resource. * * @param sslPolicy the sslPolicy value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withSslPolicy(ApplicationGatewaySslPolicy sslPolicy) { this.sslPolicy = sslPolicy; return this; } /** * Get operational state of the application gateway resource. Possible values include: 'Stopped', 'Starting', 'Running', 'Stopping'. * * @return the operationalState value */ public ApplicationGatewayOperationalState operationalState() { return this.operationalState; } /** * Get subnets of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the gatewayIPConfigurations value */ public List<ApplicationGatewayIPConfiguration> gatewayIPConfigurations() { return this.gatewayIPConfigurations; } /** * Set subnets of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param gatewayIPConfigurations the gatewayIPConfigurations value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withGatewayIPConfigurations(List<ApplicationGatewayIPConfiguration> gatewayIPConfigurations) { this.gatewayIPConfigurations = gatewayIPConfigurations; return this; } /** * Get authentication certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the authenticationCertificates value */ public List<ApplicationGatewayAuthenticationCertificate> authenticationCertificates() { return this.authenticationCertificates; } /** * Set authentication certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param authenticationCertificates the authenticationCertificates value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withAuthenticationCertificates(List<ApplicationGatewayAuthenticationCertificate> authenticationCertificates) { this.authenticationCertificates = authenticationCertificates; return this; } /** * Get trusted Root certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the trustedRootCertificates value */ public List<ApplicationGatewayTrustedRootCertificate> trustedRootCertificates() { return this.trustedRootCertificates; } /** * Set trusted Root certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param trustedRootCertificates the trustedRootCertificates value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withTrustedRootCertificates(List<ApplicationGatewayTrustedRootCertificate> trustedRootCertificates) { this.trustedRootCertificates = trustedRootCertificates; return this; } /** * Get sSL certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the sslCertificates value */ public List<ApplicationGatewaySslCertificate> sslCertificates() { return this.sslCertificates; } /** * Set sSL certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param sslCertificates the sslCertificates value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withSslCertificates(List<ApplicationGatewaySslCertificate> sslCertificates) { this.sslCertificates = sslCertificates; return this; } /** * Get frontend IP addresses of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the frontendIPConfigurations value */ public List<ApplicationGatewayFrontendIPConfiguration> frontendIPConfigurations() { return this.frontendIPConfigurations; } /** * Set frontend IP addresses of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param frontendIPConfigurations the frontendIPConfigurations value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withFrontendIPConfigurations(List<ApplicationGatewayFrontendIPConfiguration> frontendIPConfigurations) { this.frontendIPConfigurations = frontendIPConfigurations; return this; } /** * Get frontend ports of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the frontendPorts value */ public List<ApplicationGatewayFrontendPort> frontendPorts() { return this.frontendPorts; } /** * Set frontend ports of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param frontendPorts the frontendPorts value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withFrontendPorts(List<ApplicationGatewayFrontendPort> frontendPorts) { this.frontendPorts = frontendPorts; return this; } /** * Get probes of the application gateway resource. * * @return the probes value */ public List<ApplicationGatewayProbe> probes() { return this.probes; } /** * Set probes of the application gateway resource. * * @param probes the probes value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withProbes(List<ApplicationGatewayProbe> probes) { this.probes = probes; return this; } /** * Get backend address pool of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the backendAddressPools value */ public List<ApplicationGatewayBackendAddressPool> backendAddressPools() { return this.backendAddressPools; } /** * Set backend address pool of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param backendAddressPools the backendAddressPools value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withBackendAddressPools(List<ApplicationGatewayBackendAddressPool> backendAddressPools) { this.backendAddressPools = backendAddressPools; return this; } /** * Get backend http settings of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the backendHttpSettingsCollection value */ public List<ApplicationGatewayBackendHttpSettings> backendHttpSettingsCollection() { return this.backendHttpSettingsCollection; } /** * Set backend http settings of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param backendHttpSettingsCollection the backendHttpSettingsCollection value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withBackendHttpSettingsCollection(List<ApplicationGatewayBackendHttpSettings> backendHttpSettingsCollection) { this.backendHttpSettingsCollection = backendHttpSettingsCollection; return this; } /** * Get http listeners of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the httpListeners value */ public List<ApplicationGatewayHttpListener> httpListeners() { return this.httpListeners; } /** * Set http listeners of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param httpListeners the httpListeners value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withHttpListeners(List<ApplicationGatewayHttpListener> httpListeners) { this.httpListeners = httpListeners; return this; } /** * Get uRL path map of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the urlPathMaps value */ public List<ApplicationGatewayUrlPathMap> urlPathMaps() { return this.urlPathMaps; } /** * Set uRL path map of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param urlPathMaps the urlPathMaps value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withUrlPathMaps(List<ApplicationGatewayUrlPathMap> urlPathMaps) { this.urlPathMaps = urlPathMaps; return this; } /** * Get request routing rules of the application gateway resource. * * @return the requestRoutingRules value */ public List<ApplicationGatewayRequestRoutingRule> requestRoutingRules() { return this.requestRoutingRules; } /** * Set request routing rules of the application gateway resource. * * @param requestRoutingRules the requestRoutingRules value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withRequestRoutingRules(List<ApplicationGatewayRequestRoutingRule> requestRoutingRules) { this.requestRoutingRules = requestRoutingRules; return this; } /** * Get rewrite rules for the application gateway resource. * * @return the rewriteRuleSets value */ public List<ApplicationGatewayRewriteRuleSet> rewriteRuleSets() { return this.rewriteRuleSets; } /** * Set rewrite rules for the application gateway resource. * * @param rewriteRuleSets the rewriteRuleSets value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withRewriteRuleSets(List<ApplicationGatewayRewriteRuleSet> rewriteRuleSets) { this.rewriteRuleSets = rewriteRuleSets; return this; } /** * Get redirect configurations of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the redirectConfigurations value */ public List<ApplicationGatewayRedirectConfiguration> redirectConfigurations() { return this.redirectConfigurations; } /** * Set redirect configurations of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param redirectConfigurations the redirectConfigurations value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withRedirectConfigurations(List<ApplicationGatewayRedirectConfiguration> redirectConfigurations) { this.redirectConfigurations = redirectConfigurations; return this; } /** * Get web application firewall configuration. * * @return the webApplicationFirewallConfiguration value */ public ApplicationGatewayWebApplicationFirewallConfiguration webApplicationFirewallConfiguration() { return this.webApplicationFirewallConfiguration; } /** * Set web application firewall configuration. * * @param webApplicationFirewallConfiguration the webApplicationFirewallConfiguration value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withWebApplicationFirewallConfiguration(ApplicationGatewayWebApplicationFirewallConfiguration webApplicationFirewallConfiguration) { this.webApplicationFirewallConfiguration = webApplicationFirewallConfiguration; return this; } /** * Get reference to the FirewallPolicy resource. * * @return the firewallPolicy value */ public SubResource firewallPolicy() { return this.firewallPolicy; } /** * Set reference to the FirewallPolicy resource. * * @param firewallPolicy the firewallPolicy value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withFirewallPolicy(SubResource firewallPolicy) { this.firewallPolicy = firewallPolicy; return this; } /** * Get whether HTTP2 is enabled on the application gateway resource. * * @return the enableHttp2 value */ public Boolean enableHttp2() { return this.enableHttp2; } /** * Set whether HTTP2 is enabled on the application gateway resource. * * @param enableHttp2 the enableHttp2 value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withEnableHttp2(Boolean enableHttp2) { this.enableHttp2 = enableHttp2; return this; } /** * Get whether FIPS is enabled on the application gateway resource. * * @return the enableFips value */ public Boolean enableFips() { return this.enableFips; } /** * Set whether FIPS is enabled on the application gateway resource. * * @param enableFips the enableFips value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withEnableFips(Boolean enableFips) { this.enableFips = enableFips; return this; } /** * Get autoscale Configuration. * * @return the autoscaleConfiguration value */ public ApplicationGatewayAutoscaleConfiguration autoscaleConfiguration() { return this.autoscaleConfiguration; } /** * Set autoscale Configuration. * * @param autoscaleConfiguration the autoscaleConfiguration value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withAutoscaleConfiguration(ApplicationGatewayAutoscaleConfiguration autoscaleConfiguration) { this.autoscaleConfiguration = autoscaleConfiguration; return this; } /** * Get the resource GUID property of the application gateway resource. * * @return the resourceGuid value */ public String resourceGuid() { return this.resourceGuid; } /** * Get the provisioning state of the application gateway resource. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'. * * @return the provisioningState value */ public ProvisioningState provisioningState() { return this.provisioningState; } /** * Get custom error configurations of the application gateway resource. * * @return the customErrorConfigurations value */ public List<ApplicationGatewayCustomError> customErrorConfigurations() { return this.customErrorConfigurations; } /** * Set custom error configurations of the application gateway resource. * * @param customErrorConfigurations the customErrorConfigurations value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withCustomErrorConfigurations(List<ApplicationGatewayCustomError> customErrorConfigurations) { this.customErrorConfigurations = customErrorConfigurations; return this; } /** * Get if true, associates a firewall policy with an application gateway regardless whether the policy differs from the WAF Config. * * @return the forceFirewallPolicyAssociation value */ public Boolean forceFirewallPolicyAssociation() { return this.forceFirewallPolicyAssociation; } /** * Set if true, associates a firewall policy with an application gateway regardless whether the policy differs from the WAF Config. * * @param forceFirewallPolicyAssociation the forceFirewallPolicyAssociation value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withForceFirewallPolicyAssociation(Boolean forceFirewallPolicyAssociation) { this.forceFirewallPolicyAssociation = forceFirewallPolicyAssociation; return this; } /** * Get a unique read-only string that changes whenever the resource is updated. * * @return the etag value */ public String etag() { return this.etag; } /** * Get a list of availability zones denoting where the resource needs to come from. * * @return the zones value */ public List<String> zones() { return this.zones; } /** * Set a list of availability zones denoting where the resource needs to come from. * * @param zones the zones value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withZones(List<String> zones) { this.zones = zones; return this; } /** * Get the identity of the application gateway, if configured. * * @return the identity value */ public ManagedServiceIdentity identity() { return this.identity; } /** * Set the identity of the application gateway, if configured. * * @param identity the identity value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withIdentity(ManagedServiceIdentity identity) { this.identity = identity; return this; } /** * Get resource ID. * * @return the id value */ public String id() { return this.id; } /** * Set resource ID. * * @param id the id value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withId(String id) { this.id = id; return this; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.segment.filter; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import org.apache.druid.collections.spatial.search.RadiusBound; import org.apache.druid.collections.spatial.search.RectangularBound; import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.SpatialDimensionSchema; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.query.Druids; import org.apache.druid.query.FinalizeResultsQueryRunner; import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.Result; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.filter.SpatialDimFilter; import org.apache.druid.query.timeseries.TimeseriesQuery; import org.apache.druid.query.timeseries.TimeseriesQueryEngine; import org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest; import org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory; import org.apache.druid.query.timeseries.TimeseriesResultValue; import org.apache.druid.segment.IncrementalIndexSegment; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.IndexMerger; import org.apache.druid.segment.IndexSpec; import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.QueryableIndexSegment; import org.apache.druid.segment.Segment; import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.joda.time.Interval; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; /** */ @RunWith(Parameterized.class) public class SpatialFilterTest { private static IndexMerger INDEX_MERGER = TestHelper.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance()); private static IndexIO INDEX_IO = TestHelper.getTestIndexIO(); public static final int NUM_POINTS = 5000; private static Interval DATA_INTERVAL = Intervals.of("2013-01-01/2013-01-07"); private static AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{ new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") }; private static List<String> DIMS = Lists.newArrayList("dim", "lat", "long", "lat2", "long2"); @Parameterized.Parameters public static Collection<?> constructorFeeder() throws IOException { final IndexSpec indexSpec = new IndexSpec(); final IncrementalIndex rtIndex = makeIncrementalIndex(); final QueryableIndex mMappedTestIndex = makeQueryableIndex(indexSpec); final QueryableIndex mergedRealtimeIndex = makeMergedQueryableIndex(indexSpec); return Arrays.asList( new Object[][]{ { new IncrementalIndexSegment(rtIndex, null) }, { new QueryableIndexSegment(mMappedTestIndex, null) }, { new QueryableIndexSegment(mergedRealtimeIndex, null) } } ); } private static IncrementalIndex makeIncrementalIndex() throws IOException { IncrementalIndex theIndex = new IncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( null, null, Arrays.asList( new SpatialDimensionSchema( "dim.geo", Arrays.asList("lat", "long") ), new SpatialDimensionSchema( "spatialIsRad", Arrays.asList("lat2", "long2") ) ) ) ).build() ) .setReportParseExceptions(false) .setMaxRowCount(NUM_POINTS) .buildOnheap(); theIndex.add( new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, "val", 17L ) ) ); theIndex.add( new MapBasedInputRow( DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, "val", 29L ) ) ); theIndex.add( new MapBasedInputRow( DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, "val", 13L ) ) ); theIndex.add( new MapBasedInputRow( DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, "val", 91L ) ) ); theIndex.add( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, "val", 47L ) ) ); theIndex.add( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", "val", 101L ) ) ); theIndex.add( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L ) ) ); theIndex.add( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L ) ) ); // Add a bunch of random points Random rand = ThreadLocalRandom.current(); for (int i = 8; i < NUM_POINTS; i++) { theIndex.add( new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), "val", i ) ) ); } return theIndex; } private static QueryableIndex makeQueryableIndex(IndexSpec indexSpec) throws IOException { IncrementalIndex theIndex = makeIncrementalIndex(); File tmpFile = File.createTempFile("billy", "yay"); tmpFile.delete(); tmpFile.mkdirs(); tmpFile.deleteOnExit(); INDEX_MERGER.persist(theIndex, tmpFile, indexSpec, null); return INDEX_IO.loadIndex(tmpFile); } private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) { try { IncrementalIndex first = new IncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( null, null, Arrays.asList( new SpatialDimensionSchema( "dim.geo", Arrays.asList("lat", "long") ), new SpatialDimensionSchema( "spatialIsRad", Arrays.asList("lat2", "long2") ) ) ) ).build() ) .setReportParseExceptions(false) .setMaxRowCount(1000) .buildOnheap(); IncrementalIndex second = new IncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( null, null, Arrays.asList( new SpatialDimensionSchema( "dim.geo", Arrays.asList("lat", "long") ), new SpatialDimensionSchema( "spatialIsRad", Arrays.asList("lat2", "long2") ) ) ) ).build() ) .setReportParseExceptions(false) .setMaxRowCount(1000) .buildOnheap(); IncrementalIndex third = new IncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( null, null, Arrays.asList( new SpatialDimensionSchema( "dim.geo", Arrays.asList("lat", "long") ), new SpatialDimensionSchema( "spatialIsRad", Arrays.asList("lat2", "long2") ) ) ) ).build() ) .setReportParseExceptions(false) .setMaxRowCount(NUM_POINTS) .buildOnheap(); first.add( new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, "val", 17L ) ) ); first.add( new MapBasedInputRow( DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, "val", 29L ) ) ); first.add( new MapBasedInputRow( DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, "val", 13L ) ) ); first.add( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", "val", 101L ) ) ); first.add( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L ) ) ); second.add( new MapBasedInputRow( DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, "val", 91L ) ) ); second.add( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, "val", 47L ) ) ); second.add( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L ) ) ); // Add a bunch of random points Random rand = ThreadLocalRandom.current(); for (int i = 8; i < NUM_POINTS; i++) { third.add( new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), "val", i ) ) ); } File tmpFile = File.createTempFile("yay", "who"); tmpFile.delete(); File firstFile = new File(tmpFile, "first"); File secondFile = new File(tmpFile, "second"); File thirdFile = new File(tmpFile, "third"); File mergedFile = new File(tmpFile, "merged"); firstFile.mkdirs(); firstFile.deleteOnExit(); secondFile.mkdirs(); secondFile.deleteOnExit(); thirdFile.mkdirs(); thirdFile.deleteOnExit(); mergedFile.mkdirs(); mergedFile.deleteOnExit(); INDEX_MERGER.persist(first, DATA_INTERVAL, firstFile, indexSpec, null); INDEX_MERGER.persist(second, DATA_INTERVAL, secondFile, indexSpec, null); INDEX_MERGER.persist(third, DATA_INTERVAL, thirdFile, indexSpec, null); QueryableIndex mergedRealtime = INDEX_IO.loadIndex( INDEX_MERGER.mergeQueryableIndex( Arrays.asList(INDEX_IO.loadIndex(firstFile), INDEX_IO.loadIndex(secondFile), INDEX_IO.loadIndex(thirdFile)), true, METRIC_AGGS, mergedFile, indexSpec, null ) ); return mergedRealtime; } catch (IOException e) { throw new RuntimeException(e); } } private final Segment segment; public SpatialFilterTest(Segment segment) { this.segment = segment; } @Test public void testSpatialQuery() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) .intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", new RadiusBound(new float[]{0.0f, 0.0f}, 5) ) ) .aggregators( Arrays.asList( new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") ) ) .build(); List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList( new Result<TimeseriesResultValue>( DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.<String, Object>builder() .put("rows", 3L) .put("val", 59L) .build() ) ) ); try { TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory( new TimeseriesQueryQueryToolChest( QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER ); QueryRunner runner = new FinalizeResultsQueryRunner( factory.createRunner(segment), factory.getToolchest() ); TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), new HashMap<>())); } catch (Exception e) { throw new RuntimeException(e); } } @Test public void testSpatialQueryWithOtherSpatialDim() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) .intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "spatialIsRad", new RadiusBound(new float[]{0.0f, 0.0f}, 5) ) ) .aggregators( Arrays.asList( new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") ) ) .build(); List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList( new Result<TimeseriesResultValue>( DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.<String, Object>builder() .put("rows", 1L) .put("val", 13L) .build() ) ) ); try { TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory( new TimeseriesQueryQueryToolChest( QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER ); QueryRunner runner = new FinalizeResultsQueryRunner( factory.createRunner(segment), factory.getToolchest() ); TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), new HashMap<>())); } catch (Exception e) { throw new RuntimeException(e); } } @Test public void testSpatialQueryMorePoints() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.DAY) .intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", new RectangularBound(new float[]{0.0f, 0.0f}, new float[]{9.0f, 9.0f}) ) ) .aggregators( Arrays.asList( new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") ) ) .build(); List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList( new Result<TimeseriesResultValue>( DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.<String, Object>builder() .put("rows", 1L) .put("val", 17L) .build() ) ), new Result<TimeseriesResultValue>( DateTimes.of("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.<String, Object>builder() .put("rows", 1L) .put("val", 29L) .build() ) ), new Result<TimeseriesResultValue>( DateTimes.of("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.<String, Object>builder() .put("rows", 1L) .put("val", 13L) .build() ) ), new Result<TimeseriesResultValue>( DateTimes.of("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.<String, Object>builder() .put("rows", 1L) .put("val", 91L) .build() ) ), new Result<TimeseriesResultValue>( DateTimes.of("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.<String, Object>builder() .put("rows", 1L) .put("val", 47L) .build() ) ) ); try { TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory( new TimeseriesQueryQueryToolChest( QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER ); QueryRunner runner = new FinalizeResultsQueryRunner( factory.createRunner(segment), factory.getToolchest() ); TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), new HashMap<>())); } catch (Exception e) { throw new RuntimeException(e); } } }
/* * Copyright 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.gwt.emultest.java.util; import com.google.gwt.junit.client.GWTTestCase; import com.google.gwt.testing.TestUtils; import java.util.ArrayList; import java.util.Date; /** * Tests for GWT's emulation of the JRE Date class. */ @SuppressWarnings("deprecation") public class DateTest extends GWTTestCase { public static final String CURRENT = "CURRENT"; public static final String TO_STRING_PATTERN = "\\w{3} \\w{3} \\d{2} \\d{2}:\\d{2}:\\d{2}( .+)? \\d{4}"; public static final long DAY_MILLISECONDS_SHIFT = 27; public static final String FUTURE = "FUTURE"; public static final String PAST = "PAST"; public static final long SECOND_MILLISECONDS_SHIFT = 10; Date theDate = new Date(); /** * Sets module name so that javascript compiler can operate. */ @Override public String getModuleName() { return "com.google.gwt.emultest.EmulSuite"; } /** Testing for public boolean java.util.Date.after(java.util.Date). */ public void testAfter() { // ///////////////////////////// // Current // ///////////////////////////// Date accum0 = create(); Date arg10 = create(); boolean a0 = accum0.after(arg10); assertFalse(a0); // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); Date arg20 = create(); boolean a1 = accum1.after(arg20); assertFalse(a1); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); Date arg30 = create(); boolean a2 = accum2.after(arg30); assertTrue(a2); } /** Testing for public boolean java.util.Date.before(java.util.Date). */ public void testBefore() { // ///////////////////////////// // Current // ///////////////////////////// Date accum0 = create(); Date arg10 = create(); boolean a0 = accum0.before(arg10); assertFalse(a0); // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); Date arg20 = create(); boolean a1 = accum1.before(arg20); assertTrue(a1); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); Date arg30 = create(); boolean a2 = accum2.before(arg30); assertFalse(a2); } /** * Tests that if daylight savings time occurs tomorrow, the current date isn't * affected. */ public void testClockForwardNextDay() { int[] monthDayHour = new int[3]; if (!findClockForwardTime(2009, monthDayHour)) { return; } int month = monthDayHour[0]; int day = monthDayHour[1] - 1; // Day before. int hour = monthDayHour[2]; Date d = new Date(2009 - 1900, month, day, hour, 0, 0); assertEquals(day, d.getDate()); assertEquals(hour, d.getHours()); // Change the minutes, which triggers fixDaylightSavings. d.setMinutes(10); assertEquals(day, d.getDate()); assertEquals(hour, d.getHours()); // Change the seconds, which triggers fixDaylightSavings. d.setSeconds(10); assertEquals(day, d.getDate()); assertEquals(hour, d.getHours()); // Change the minutes by more than an hour. d.setMinutes(80); assertEquals(day, d.getDate()); assertEquals(hour + 1, d.getHours()); } /** Testing for public java.lang.Object java.util.Date.clone(). */ public void testClone() { // ///////////////////////////// // Current // ///////////////////////////// Date accum0 = create(); Object a0 = accum0.clone(); assertFalse(a0 == accum0); assertEquals(a0, accum0); // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); Object a1 = accum1.clone(); assertFalse(a1 == accum1); assertEquals(a1, accum1); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); Object a2 = accum2.clone(); assertFalse(a2 == accum2); assertEquals(a2, accum2); } /** Testing for public int java.util.Date.compareTo(java.util.Date). */ public void testCompareTo() { // ///////////////////////////// // Current // ///////////////////////////// Date accum0 = create(); Date arg10 = create(); int a0 = accum0.compareTo(arg10); assertEquals(0, a0); // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(); Date arg20 = create(PAST); int a1 = accum1.compareTo(arg20); assertEquals(1, a1); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(); Date arg30 = create(FUTURE); int a2 = accum2.compareTo(arg30); assertEquals(-1, a2); } /** Testing for public int java.util.Date.getDate(). */ public void testGetDate() { // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); int a1 = accum1.getDate(); int expectedValue = 5; if (needsToChangeDate(accum1, 0)) { expectedValue--; } assertEquals(expectedValue, a1); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); int a2 = accum2.getDate(); expectedValue = 30; if (needsToChangeDate(accum2, 3 * 60 + 4)) { expectedValue--; } assertEquals(expectedValue, a2); } private boolean needsToChangeDate(Date d, int minuteOffset) { int timezoneOffset = d.getTimezoneOffset(); return timezoneOffset > minuteOffset; } /** Testing for public int java.util.Date.getDay(). */ public void testGetDay() { // ///////////////////////////// // Current // ///////////////////////////// Date accum0 = create(); int a0 = accum0.getDay(); // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); int a1 = accum1.getDay(); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); int a2 = accum2.getDay(); } /** Testing for public int java.util.Date.getHours(). */ public void testGetHours() { // Cannot be done because each time zone will give a different // answer } /** Testing for public int java.util.Date.getMinutes(). */ public void testGetMinutes() { // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); int a1 = accum1.getMinutes(); assertEquals(0, a1); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); int a2 = accum2.getMinutes(); assertEquals(4, a2); } /** Testing for public int java.util.Date.getMonth(). */ public void testGetMonth() { // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); int a1 = accum1.getMonth(); assertEquals(0, a1); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); int a2 = accum2.getMonth(); assertEquals(11, a2); } /** Testing for public int java.util.Date.getSeconds(). */ public void testGetSeconds() { // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); int a1 = accum1.getSeconds(); assertEquals(0, a1); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); int a2 = accum2.getSeconds(); assertEquals(5, a2); } /** Testing for public long java.util.Date.getTime(). */ public void testGetTime() { // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); long a1 = accum1.getTime(); assertEquals(-2524176000000L, a1); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); long a2 = accum2.getTime(); assertEquals(1924830245000L, a2); } /** Testing for public int java.util.Date.getTimezoneOffset(). */ public void testGetTimezoneOffset() { // ///////////////////////////// // Current // ///////////////////////////// Date accum0 = create(); int a0 = accum0.getTimezoneOffset(); // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); int a1 = accum1.getTimezoneOffset(); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); int a2 = accum2.getTimezoneOffset(); } /** Testing for public int java.util.Date.getYear(). */ public void testGetYear() { // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); int a1 = accum1.getYear(); assertEquals(-10, a1); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); int a2 = accum2.getYear(); assertEquals(130, a2); } /** * Testing to that if we set the day number to 31 for a month that only has 30 * days in it, that the date rolls over to the first day of the next month in * sequence. */ public void testInvalidDateForMonth() { int monthNum = 3; // April int numDaysInOldMonth = 30; int newDayNum = 31; Date dateWithThirtyDays = new Date(2006, monthNum, 30); dateWithThirtyDays.setDate(newDayNum); assertEquals(dateWithThirtyDays.getMonth(), monthNum + 1); assertEquals(dateWithThirtyDays.getDate(), newDayNum - numDaysInOldMonth); } /** Testing for public static long java.util.Date.parse(java.lang.String). */ public void testParse() { try { Date.parse(null); fail("Should have thrown exception"); } catch (IllegalArgumentException e) { // Expected } try { Date.parse(""); } catch (IllegalArgumentException e) { // Expected } // ///////////////////////////// // Current // ///////////////////////////// // TODO(b/146498060): Reenable the roundtrip once toLocaleString is fixed for ie11. if (false) { Date accum0 = create(); String arg10 = createString(CURRENT); long a0 = Date.parse(arg10); assertEquals(roundToDay(accum0.getTime()), roundToDay(a0)); } // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); String arg20 = createString(PAST); long a1 = Date.parse(arg20); assertEquals(-2524521600000L, a1); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); String arg30 = createString(FUTURE); long a2 = Date.parse(arg30); assertEquals(1924830245000L, a2); } /** Testing for public void java.util.Date.setDate(int). */ public void testSetDate() { // We only go through dates from 0-28 here. There are some months that do // not // have 29, 30, or 31 days - so our assertion would be wrong in the cases // where // the current month did not have 29,30,or 31 days for (int i = 1; i < 29; i++) { Date accum0 = create(); accum0.setDate(i); assertEquals(accum0.getDate(), i); } } /** Testing for public void java.util.Date.setHours(int). */ public void testSetHours() { for (int i = 0; i < 24; i++) { Date accum0 = create(); if (isDst(accum0)) { // This test fails on the day of DST, so skip it. return; } accum0.setHours(i); assertEquals(accum0.getHours(), i); } } /** * We want to test to see that if we are currently in a month with 31 days and * we set the month to one which has less than 31 days, that the month * returned by the date class will be one higher than the month that we * originally set (according to the spec of java.util.date). */ public void testSetInvalidMonthForDate() { int dayNum = 31; int newMonthNum = 1; int numDaysInNewMonth = 28; Date dateWithThirtyOneDays = new Date(2006, 12, dayNum); dateWithThirtyOneDays.setMonth(newMonthNum); assertEquals(dateWithThirtyOneDays.getMonth(), newMonthNum + 1); assertEquals(dateWithThirtyOneDays.getDate(), dayNum - numDaysInNewMonth); } /** * We want to test to see that if the date is Feb 29th (in a leap year) and we * set the year to a non-leap year, that the month and day will roll over to * March 1st. */ public void testSetInvalidYearForDate() { int dayNum = 29; int monthNum = 1; // February int newYearNum = 2005; int numDaysInFebInNewYear = 28; Date leapYearDate = new Date(2004, monthNum, dayNum); leapYearDate.setYear(newYearNum); assertEquals(leapYearDate.getYear(), newYearNum); assertEquals(leapYearDate.getMonth(), monthNum + 1); assertEquals(leapYearDate.getDate(), dayNum - numDaysInFebInNewYear); } /** Testing for public void java.util.Date.setMinutes(int). */ public void testSetMinutes() { for (int i = 0; i < 24; i++) { Date accum0 = create(); accum0.setMinutes(i); assertEquals(accum0.getMinutes(), i); } } /** Testing for public void java.util.Date.setMonth(int). */ public void testSetMonth() { for (int i = 0; i < 12; i++) { // We want to use a fixed date here. If we use the current date, the // assertion may fail // when the date is the 29th, 30th, or 31st, and we set the month to one // which does // not have 29, 30, or 31 days in it, respectively. Date accum0 = new Date(2006, 12, 1); accum0.setMonth(i); assertEquals(accum0.getMonth(), i); } } /** Testing for public void java.util.Date.setSeconds(int). */ public void testSetSeconds() { for (int i = 0; i < 24; i++) { Date accum0 = create(); accum0.setSeconds(i); assertEquals(accum0.getSeconds(), i); } } /** Testing for public void java.util.Date.setTime(long). */ public void testSetTime() { long[] values = new long[] {-100000000000L, -100L, 0, 100L, 1000000000L}; for (int i = 0; i < values.length; i++) { Date accum0 = create(); accum0.setTime(values[i]); assertEquals(accum0.getTime(), values[i]); } } /** * We want to test to see that if the date is Feb 29th (in a leap year) and we * set the year to another leap year, that the month and day will be retained. */ public void testSetValidLeapYearForDate() { int dayNum = 29; int monthNum = 1; // February int yearNum = 2004; int newYearNum = yearNum + 4; Date leapYearDate = new Date(yearNum, monthNum, dayNum); leapYearDate.setYear(newYearNum); assertEquals(leapYearDate.getYear(), newYearNum); assertEquals(leapYearDate.getMonth(), monthNum); assertEquals(leapYearDate.getDate(), dayNum); } /** Testing for public void java.util.Date.setYear(int). */ public void testSetYear() { for (int i = 1880; i < 2030; i++) { // We want to use a fixed date here. If we use the current date, the // assertion may fail // when the date is February 29th, and we set the year to a non-leap year Date accum0 = new Date(2006, 12, 01); accum0.setYear(i); assertEquals(accum0.getYear(), i); } } /** Testing for public java.lang.String java.util.Date.toGMTString(). */ public void testToGMTString() { // We can't rely on the JRE's toString, as it is an implementation detail. if (!TestUtils.isJvm()) { // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); String a1 = accum1.toGMTString(); assertEquals("5 Jan 1890 00:00:00 GMT", a1); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); String a2 = accum2.toGMTString(); assertEquals("30 Dec 2030 03:04:05 GMT", a2); } } /** Testing for public java.lang.String java.util.Date.toLocaleString(). */ public void testToLocaleString() { // We can't rely on the JRE's toString, as it is an implementation detail. if (!TestUtils.isJvm()) { // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); String a1 = accum1.toLocaleString(); assertTrue(a1.indexOf("1890") != -1); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); String a2 = accum2.toLocaleString(); assertTrue(a2.indexOf("2030") != -1); } } /** Date docs specify an exact format for toString(). */ public void testToString() { // We can't rely on the JRE's toString, as it is an implementation detail. if (!TestUtils.isJvm()) { // ///////////////////////////// // Past // ///////////////////////////// Date d = create(PAST); String s = d.toString(); assertTrue("Bad format " + s, s.matches(TO_STRING_PATTERN)); assertEquals("Parsing returned unequal dates from " + s, d, new Date( Date.parse(s))); // ///////////////////////////// // Future // ///////////////////////////// d = create(FUTURE); s = d.toString(); assertTrue("Bad format " + s, s.matches(TO_STRING_PATTERN)); assertEquals("Parsing returned unequal dates from " + s, d, new Date( Date.parse(s))); } } /** Testing for public static long java.util.Date.UTC(int,int,int,int,int,int). */ public void testUTC() { // ///////////////////////////// // Current // ///////////////////////////// Date accum0 = create(); int arg10 = 0; int arg11 = 0; int arg12 = 0; int arg13 = 0; int arg14 = 0; int arg15 = 0; long a0 = accum0.UTC(arg10, arg11, arg12, arg13, arg14, arg15); // ///////////////////////////// // Past // ///////////////////////////// Date accum1 = create(PAST); int arg20 = 0; int arg21 = 0; int arg22 = 0; int arg23 = 0; int arg24 = 0; int arg25 = 0; long a1 = accum1.UTC(arg20, arg21, arg22, arg23, arg24, arg25); // ///////////////////////////// // Future // ///////////////////////////// Date accum2 = create(FUTURE); int arg30 = 0; int arg31 = 0; int arg32 = 0; int arg33 = 0; int arg34 = 0; int arg35 = 0; long a2 = accum2.UTC(arg30, arg31, arg32, arg33, arg34, arg35); } // Month and date of days with time shifts private ArrayList<Integer> timeShiftMonth = new ArrayList<Integer>(); private ArrayList<Integer> timeShiftDate = new ArrayList<Integer>(); private boolean containsTimeShift(Date start, int days) { long startTime = start.getTime(); Date end = new Date(); end.setTime(startTime); end.setDate(start.getDate() + days); long endTime = end.getTime(); return (endTime - startTime) != ((long) days * 24 * 60 * 60 * 1000); } private void findTimeShift(Date start, int days) { assertTrue(days != 0); // Found a shift day if (days == 1) { timeShiftMonth.add(start.getMonth()); timeShiftDate.add(start.getDate()); return; } // Recurse over the first half of the period if (containsTimeShift(start, days / 2)) { findTimeShift(start, days / 2); } // Recurse over the second half of the period Date mid = new Date(); mid.setTime(start.getTime()); mid.setDate(start.getDate() + days / 2); if (containsTimeShift(mid, days - days / 2)) { findTimeShift(mid, days - days / 2); } } private void findTimeShifts(int year) { timeShiftMonth.clear(); timeShiftDate.clear(); Date start = new Date(year - 1900, 0, 1, 12, 0, 0); Date end = new Date(year + 1 - 1900, 0, 1, 12, 0, 0); int days = (int) ((end.getTime() - start.getTime()) / (24 * 60 * 60 * 1000)); findTimeShift(start, days); } private boolean findClockBackwardTime(int year, int[] monthDayHour) { findTimeShifts(year); int numShifts = timeShiftMonth.size(); for (int i = 0; i < numShifts; i++) { int month = timeShiftMonth.get(i); int day = timeShiftDate.get(i); long start = new Date(year - 1900, month, day, 0, 30, 0).getTime(); long end = new Date(year - 1900, month, day + 1, 23, 30, 0).getTime(); int lastHour = -1; for (long time = start; time < end; time += 60 * 60 * 1000) { Date d = new Date(); d.setTime(time); int hour = d.getHours(); if (hour == lastHour) { monthDayHour[0] = d.getMonth(); monthDayHour[1] = d.getDate(); monthDayHour[2] = d.getHours(); return true; } lastHour = hour; } } return false; } private boolean findClockForwardTime(int year, int[] monthDayHour) { findTimeShifts(year); int numShifts = timeShiftMonth.size(); for (int i = 0; i < numShifts; i++) { int month = timeShiftMonth.get(i); int startDay = timeShiftDate.get(i); for (int day = startDay; day <= startDay + 1; day++) { for (int hour = 0; hour < 24; hour++) { Date d = new Date(year - 1900, month, day, hour, 0, 0); int h = d.getHours(); if ((h % 24) == ((hour + 1) % 24)) { monthDayHour[0] = month; monthDayHour[1] = day; monthDayHour[2] = hour; return true; } } } } return false; } /** * Check if daylight saving time occurs on the date. * * @param date the date to check * @return true if DST occurs on the date, false if not */ private boolean isDst(Date date) { int[] monthDayHour = new int[3]; if (!findClockForwardTime(date.getYear() + 1900, monthDayHour)) { return false; } return monthDayHour[0] == date.getMonth() && monthDayHour[1] == date.getDate(); } public void testClockBackwardTime() { int[] monthDayHour = new int[3]; if (!findClockBackwardTime(2009, monthDayHour)) { return; } Date d; int month = monthDayHour[0]; int day = monthDayHour[1]; int hour = monthDayHour[2]; // Check that this is the later of the two times having the // same hour:minute:second d = new Date(2009 - 1900, month, day, hour, 30, 0); assertEquals(hour, d.getHours()); d.setTime(d.getTime() - 60 * 60 * 1000); assertEquals(hour, d.getHours()); } public void testClockForwardTime() { int[] monthDayHour = new int[3]; if (!findClockForwardTime(2009, monthDayHour)) { return; } Date d; int month = monthDayHour[0]; int day = monthDayHour[1]; int hour = monthDayHour[2]; d = new Date(2009 - 1900, month, day, hour, 0, 0); assertEquals(normalizeHour(hour + 1), d.getHours()); // Test year change -- assume the previous year changes on a different day d = new Date(2008 - 1900, month, day, hour, 0, 0); assertEquals(hour, d.getHours()); d.setYear(2009 - 1900); assertEquals(normalizeHour(hour + 1), d.getHours()); // Test month change d = new Date(2009 - 1900, month + 1, day, hour, 0, 0); assertEquals(hour, d.getHours()); d.setMonth(month); assertEquals(normalizeHour(hour + 1), d.getHours()); // Test day change d = new Date(2009 - 1900, month, day + 1, hour, 0, 0); assertEquals(hour, d.getHours()); d.setDate(day); assertEquals(normalizeHour(hour + 1), d.getHours()); // Test hour setting d = new Date(2009 - 1900, month, day, hour + 2, 0, 0); assertEquals(normalizeHour(hour + 2), d.getHours()); d.setHours(hour); assertEquals(normalizeHour(hour + 1), d.getHours()); // Test changing hour by minutes = +- 60 d = new Date(2009 - 1900, month, day, hour + 2, 0, 0); assertEquals(normalizeHour(hour + 2), d.getHours()); d.setMinutes(-60); assertEquals(normalizeHour(hour + 1), d.getHours()); d = new Date(2009 - 1900, month, day, hour - 1, 0, 0); assertEquals(normalizeHour(hour - 1), d.getHours()); d.setMinutes(60); assertEquals(normalizeHour(hour + 1), d.getHours()); // Test changing hour by minutes = +- 120 d = new Date(2009 - 1900, month, day, hour + 2, 0, 0); assertEquals(normalizeHour(hour + 2), d.getHours()); d.setMinutes(-120); assertEquals(normalizeHour(hour + 1), d.getHours()); d = new Date(2009 - 1900, month, day, hour - 2, 0, 0); assertEquals(normalizeHour(hour - 2), d.getHours()); d.setMinutes(120); assertEquals(normalizeHour(hour + 1), d.getHours()); // Test changing hour by seconds = +- 3600 d = new Date(2009 - 1900, month, day, hour + 2, 0, 0); assertEquals(normalizeHour(hour + 2), d.getHours()); d.setSeconds(-3600); assertEquals(normalizeHour(hour + 1), d.getHours()); d = new Date(2009 - 1900, month, day, hour - 1, 0, 0); assertEquals(normalizeHour(hour - 1), d.getHours()); d.setSeconds(3600); assertEquals(normalizeHour(hour + 1), d.getHours()); // Test changing hour by seconds = +- 7200 d = new Date(2009 - 1900, month, day, hour + 2, 0, 0); assertEquals(normalizeHour(hour + 2), d.getHours()); d.setSeconds(-7200); assertEquals(normalizeHour(hour + 1), d.getHours()); d = new Date(2009 - 1900, month, day, hour - 2, 0, 0); assertEquals(normalizeHour(hour - 2), d.getHours()); d.setSeconds(7200); assertEquals(normalizeHour(hour + 1), d.getHours()); d = new Date(2009 - 1900, month, day, hour + 2, 0, 0); d.setHours(hour); d.setMinutes(30); assertEquals(normalizeHour(hour + 1), d.getHours()); assertEquals(30, d.getMinutes()); d = new Date(2009 - 1900, month, day, hour + 2, 0, 0); d.setMinutes(30); d.setHours(hour); assertEquals(normalizeHour(hour + 1), d.getHours()); assertEquals(30, d.getMinutes()); } int normalizeHour(int i) { return (i + 24) % 24; } Date create() { return (Date) theDate.clone(); } Date create(String s) { if (s.equals(FUTURE)) { return new Date("12/30/2030 3:4:5 GMT"); } else if (s.equals(PAST)) { return new Date("1/5/1890 GMT"); } else { return (Date) theDate.clone(); } } private String createString(String s) { if (s.equals(FUTURE)) { return "12/30/2030 3:4:5 GMT"; } else if (s.equals(PAST)) { return "1/1/1890 GMT"; } else { return theDate.toLocaleString(); } } private long roundToDay(long accum0) { return accum0 >> DAY_MILLISECONDS_SHIFT << DAY_MILLISECONDS_SHIFT; } }