gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.bwcompat; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.index.Fields; import org.apache.lucene.util.English; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.count.CountResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.deletebyquery.DeleteByQueryResponse; import org.elasticsearch.action.deletebyquery.IndexDeleteByQueryResponse; import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.get.*; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.termvector.TermVectorResponse; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ElasticsearchBackwardsCompatIntegrationTest; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutionException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.FilterBuilders.existsFilter; import static org.elasticsearch.index.query.FilterBuilders.missingFilter; import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.hamcrest.Matchers.*; /** */ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompatIntegrationTest { /** * Basic test using Index & Realtime Get with external versioning. This test ensures routing works correctly across versions. */ @Test public void testExternalVersion() throws Exception { createIndex("test"); final boolean routing = randomBoolean(); int numDocs = randomIntBetween(10, 20); for (int i = 0; i < numDocs; i++) { String id = Integer.toString(i); String routingKey = routing ? randomRealisticUnicodeOfLength(10) : null; final long version = randomIntBetween(0, Integer.MAX_VALUE); client().prepareIndex("test", "type1", id).setRouting(routingKey).setVersion(version).setVersionType(VersionType.EXTERNAL).setSource("field1", English.intToEnglish(i)).get(); GetResponse get = client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(version).get(); assertThat("Document with ID " +id + " should exist but doesn't", get.isExists(), is(true)); assertThat(get.getVersion(), equalTo(version)); final long nextVersion = version + randomIntBetween(0, Integer.MAX_VALUE); client().prepareIndex("test", "type1", id).setRouting(routingKey).setVersion(nextVersion).setVersionType(VersionType.EXTERNAL).setSource("field1", English.intToEnglish(i)).get(); get = client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(nextVersion).get(); assertThat("Document with ID " +id + " should exist but doesn't", get.isExists(), is(true)); assertThat(get.getVersion(), equalTo(nextVersion)); } } /** * Basic test using Index & Realtime Get with internal versioning. This test ensures routing works correctly across versions. */ @Test public void testInternalVersion() throws Exception { createIndex("test"); final boolean routing = randomBoolean(); int numDocs = randomIntBetween(10, 20); for (int i = 0; i < numDocs; i++) { String routingKey = routing ? randomRealisticUnicodeOfLength(10) : null; String id = Integer.toString(i); assertThat(id, client().prepareIndex("test", "type1", id).setRouting(routingKey).setSource("field1", English.intToEnglish(i)).get().isCreated(), is(true)); GetResponse get = client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(1).get(); assertThat("Document with ID " +id + " should exist but doesn't", get.isExists(), is(true)); assertThat(get.getVersion(), equalTo(1l)); client().prepareIndex("test", "type1", id).setRouting(routingKey).setSource("field1", English.intToEnglish(i)).execute().actionGet(); get = client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(2).get(); assertThat("Document with ID " +id + " should exist but doesn't", get.isExists(), is(true)); assertThat(get.getVersion(), equalTo(2l)); } assertVersionCreated(compatibilityVersion(), "test"); } /** * Very basic bw compat test with a mixed version cluster random indexing and lookup by ID via term query */ @Test public void testIndexAndSearch() throws Exception { createIndex("test"); int numDocs = randomIntBetween(10, 20); List<IndexRequestBuilder> builder = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { String id = Integer.toString(i); builder.add(client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(i), "the_id", id)); } indexRandom(true, builder); for (int i = 0; i < numDocs; i++) { String id = Integer.toString(i); assertHitCount(client().prepareSearch().setQuery(QueryBuilders.termQuery("the_id", id)).get(), 1); } assertVersionCreated(compatibilityVersion(), "test"); } @Test public void testRecoverFromPreviousVersion() throws ExecutionException, InterruptedException { if (backwardsCluster().numNewDataNodes() == 0) { backwardsCluster().startNewNode(); } assertAcked(prepareCreate("test").setSettings(ImmutableSettings.builder().put("index.routing.allocation.exclude._name", backwardsCluster().newNodePattern()).put(indexSettings()))); ensureYellow(); assertAllShardsOnNodes("test", backwardsCluster().backwardsNodePattern()); int numDocs = randomIntBetween(100, 150); logger.info(" --> indexing [{}] docs", numDocs); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", randomRealisticUnicodeOfLength(10) + String.valueOf(i)).setSource("field1", English.intToEnglish(i)); } indexRandom(true, docs); CountResponse countResponse = client().prepareCount().get(); assertHitCount(countResponse, numDocs); if (randomBoolean()) { logger.info(" --> moving index to new nodes"); backwardsCluster().allowOnlyNewNodes("test"); } else { logger.info(" --> allow index to on all nodes"); backwardsCluster().allowOnAllNodes("test"); } logger.info(" --> indexing [{}] more docs", numDocs); // sometimes index while relocating if (randomBoolean()) { for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", randomRealisticUnicodeOfLength(10) + String.valueOf(numDocs + i)).setSource("field1", English.intToEnglish(numDocs + i)); } indexRandom(true, docs); numDocs *= 2; } logger.info(" --> waiting for relocation to complete", numDocs); ensureYellow("test");// move all shards to the new node (it waits on relocation) final int numIters = randomIntBetween(10, 20); for (int i = 0; i < numIters; i++) { countResponse = client().prepareCount().get(); assertHitCount(countResponse, numDocs); } assertVersionCreated(compatibilityVersion(), "test"); } /** * Test that ensures that we will never recover from a newer to an older version (we are not forward compatible) */ @Test public void testNoRecoveryFromNewNodes() throws ExecutionException, InterruptedException { assertAcked(prepareCreate("test").setSettings(ImmutableSettings.builder().put("index.routing.allocation.exclude._name", backwardsCluster().backwardsNodePattern()).put(indexSettings()))); if (backwardsCluster().numNewDataNodes() == 0) { backwardsCluster().startNewNode(); } ensureYellow(); assertAllShardsOnNodes("test", backwardsCluster().newNodePattern()); if (randomBoolean()) { backwardsCluster().allowOnAllNodes("test"); } int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", randomRealisticUnicodeOfLength(10) + String.valueOf(i)).setSource("field1", English.intToEnglish(i), "num_int", randomInt(), "num_double", randomDouble()); } indexRandom(true, docs); backwardsCluster().allowOnAllNodes("test"); while(ensureYellow() != ClusterHealthStatus.GREEN) { backwardsCluster().startNewNode(); } assertAllShardsOnNodes("test", backwardsCluster().newNodePattern()); CountResponse countResponse = client().prepareCount().get(); assertHitCount(countResponse, numDocs); final int numIters = randomIntBetween(10, 20); for (int i = 0; i < numIters; i++) { countResponse = client().prepareCount().get(); assertHitCount(countResponse, numDocs); assertSimpleSort("num_double", "num_int"); } assertVersionCreated(compatibilityVersion(), "test"); } public void assertSimpleSort(String... numericFields) { for(String field : numericFields) { SearchResponse searchResponse = client().prepareSearch().addSort(field, SortOrder.ASC).get(); SearchHit[] hits = searchResponse.getHits().getHits(); assertThat(hits.length, greaterThan(0)); Number previous = null; for (SearchHit hit : hits) { assertNotNull(hit.getSource().get(field)); if (previous != null) { assertThat(previous.doubleValue(), lessThanOrEqualTo(((Number) hit.getSource().get(field)).doubleValue())); } previous = (Number) hit.getSource().get(field); } } } public void assertAllShardsOnNodes(String index, String pattern) { ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) { for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { for (ShardRouting shardRouting : indexShardRoutingTable) { if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndex())) { String name = clusterState.nodes().get(shardRouting.currentNodeId()).name(); assertThat("Allocated on new node: " + name, Regex.simpleMatch(pattern, name), is(true)); } } } } } /** * Upgrades a single node to the current version */ @Test public void testIndexUpgradeSingleNode() throws Exception { assertAcked(prepareCreate("test").setSettings(ImmutableSettings.builder().put("index.routing.allocation.exclude._name", backwardsCluster().newNodePattern()).put(indexSettings()))); ensureYellow(); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "num_int", randomInt(), "num_double", randomDouble()); } indexRandom(true, docs); assertAllShardsOnNodes("test", backwardsCluster().backwardsNodePattern()); client().admin().indices().prepareUpdateSettings("test").setSettings(ImmutableSettings.builder().put(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, "none")).get(); backwardsCluster().allowOnAllNodes("test"); CountResponse countResponse = client().prepareCount().get(); assertHitCount(countResponse, numDocs); backwardsCluster().upgradeOneNode(); ensureYellow(); if (randomBoolean()) { for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "num_int", randomInt(), "num_double", randomDouble()); } indexRandom(true, docs); } client().admin().indices().prepareUpdateSettings("test").setSettings(ImmutableSettings.builder().put(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, "all")).get(); ensureYellow(); final int numIters = randomIntBetween(1, 20); for (int i = 0; i < numIters; i++) { assertHitCount(client().prepareCount().get(), numDocs); assertSimpleSort("num_double", "num_int"); } assertVersionCreated(compatibilityVersion(), "test"); } /** * Test that allocates an index on one or more old nodes and then do a rolling upgrade * one node after another is shut down and restarted from a newer version and we verify * that all documents are still around after each nodes upgrade. */ @Test public void testIndexRollingUpgrade() throws Exception { String[] indices = new String[randomIntBetween(1,3)]; for (int i = 0; i < indices.length; i++) { indices[i] = "test" + i; assertAcked(prepareCreate(indices[i]).setSettings(ImmutableSettings.builder().put("index.routing.allocation.exclude._name", backwardsCluster().newNodePattern()).put(indexSettings()))); } int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; String[] indexForDoc = new String[docs.length]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex(indexForDoc[i] = RandomPicks.randomFrom(getRandom(), indices), "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "num_int", randomInt(), "num_double", randomDouble()); } indexRandom(true, docs); for (String index : indices) { assertAllShardsOnNodes(index, backwardsCluster().backwardsNodePattern()); } client().admin().indices().prepareUpdateSettings(indices).setSettings(ImmutableSettings.builder().put(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, "none")).get(); backwardsCluster().allowOnAllNodes(indices); logClusterState(); boolean upgraded; do { logClusterState(); CountResponse countResponse = client().prepareCount().get(); assertHitCount(countResponse, numDocs); assertSimpleSort("num_double", "num_int"); upgraded = backwardsCluster().upgradeOneNode(); ensureYellow(); countResponse = client().prepareCount().get(); assertHitCount(countResponse, numDocs); for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex(indexForDoc[i], "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "num_int", randomInt(), "num_double", randomDouble()); } indexRandom(true, docs); } while (upgraded); client().admin().indices().prepareUpdateSettings(indices).setSettings(ImmutableSettings.builder().put(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, "all")).get(); ensureYellow(); CountResponse countResponse = client().prepareCount().get(); assertHitCount(countResponse, numDocs); assertSimpleSort("num_double", "num_int"); String[] newIndices = new String[randomIntBetween(1,3)]; for (int i = 0; i < newIndices.length; i++) { newIndices[i] = "new_index" + i; createIndex(newIndices[i]); } assertVersionCreated(Version.CURRENT, newIndices); // new indices are all created with the new version assertVersionCreated(compatibilityVersion(), indices); } public void assertVersionCreated(Version version, String... indices) { GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings(indices).get(); ImmutableOpenMap<String,Settings> indexToSettings = getSettingsResponse.getIndexToSettings(); for (String index : indices) { Settings settings = indexToSettings.get(index); assertThat(settings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null), notNullValue()); assertThat(settings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null), equalTo(version)); } } @Test public void testUnsupportedFeatures() throws IOException { if (compatibilityVersion().before(Version.V_1_3_0)) { XContentBuilder mapping = XContentBuilder.builder(JsonXContent.jsonXContent) .startObject() .startObject("type") .startObject(FieldNamesFieldMapper.NAME) // by setting randomly index to no we also test the pre-1.3 behavior .field("index", randomFrom("no", "not_analyzed")) .field("store", randomFrom("no", "yes")) .endObject() .endObject() .endObject(); try { assertAcked(prepareCreate("test"). setSettings(ImmutableSettings.builder().put("index.routing.allocation.exclude._name", backwardsCluster().newNodePattern()).put(indexSettings())) .addMapping("type", mapping)); } catch (MapperParsingException ex) { if (getMasterVersion().onOrAfter(Version.V_1_3_0)) { assertThat(ex.getCause(), instanceOf(ElasticsearchIllegalArgumentException.class)); assertThat(ex.getCause().getMessage(), equalTo("type=_field_names is not supported on indices created before version 1.3.0 is your cluster running multiple datanode versions?")); } else { assertThat(ex.getCause(), instanceOf(MapperParsingException.class)); assertThat(ex.getCause().getMessage(), startsWith("Root type mapping not empty after parsing!")); } } } } /** * This filter had a major upgrade in 1.3 where we started to index the field names. Lets see if they still work as expected... * this test is basically copied from SimpleQueryTests... */ @Test public void testExistsFilter() throws IOException, ExecutionException, InterruptedException { assumeTrue("this test fails often with 1.0.3 skipping for now....",compatibilityVersion().onOrAfter(Version.V_1_1_0)); for (;;) { createIndex("test"); ensureYellow(); indexRandom(true, client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().startObject("obj1").field("obj1_val", "1").endObject().field("x1", "x_1").field("field1", "value1_1").field("field2", "value2_1").endObject()), client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().startObject("obj1").field("obj1_val", "1").endObject().field("x2", "x_2").field("field1", "value1_2").endObject()), client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject().startObject("obj2").field("obj2_val", "1").endObject().field("y1", "y_1").field("field2", "value2_3").endObject()), client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject().startObject("obj2").field("obj2_val", "1").endObject().field("y2", "y_2").field("field3", "value3_4").endObject())); CountResponse countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsFilter("field1"))).get(); assertHitCount(countResponse, 2l); countResponse = client().prepareCount().setQuery(constantScoreQuery(existsFilter("field1"))).get(); assertHitCount(countResponse, 2l); countResponse = client().prepareCount().setQuery(queryString("_exists_:field1")).get(); assertHitCount(countResponse, 2l); countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsFilter("field2"))).get(); assertHitCount(countResponse, 2l); countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsFilter("field3"))).get(); assertHitCount(countResponse, 1l); // wildcard check countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsFilter("x*"))).get(); assertHitCount(countResponse, 2l); // object check countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsFilter("obj1"))).get(); assertHitCount(countResponse, 2l); countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), missingFilter("field1"))).get(); assertHitCount(countResponse, 2l); countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), missingFilter("field1"))).get(); assertHitCount(countResponse, 2l); countResponse = client().prepareCount().setQuery(constantScoreQuery(missingFilter("field1"))).get(); assertHitCount(countResponse, 2l); countResponse = client().prepareCount().setQuery(queryString("_missing_:field1")).get(); assertHitCount(countResponse, 2l); // wildcard check countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), missingFilter("x*"))).get(); assertHitCount(countResponse, 2l); // object check countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), missingFilter("obj1"))).get(); assertHitCount(countResponse, 2l); if (!backwardsCluster().upgradeOneNode()) { break; } ensureYellow(); assertVersionCreated(compatibilityVersion(), "test"); // we had an old node in the cluster so we have to be on the compat version assertAcked(client().admin().indices().prepareDelete("test")); } assertVersionCreated(Version.CURRENT, "test"); // after upgrade we have current version } public Version getMasterVersion() { return client().admin().cluster().prepareState().get().getState().nodes().masterNode().getVersion(); } @Test public void testDeleteByQuery() throws ExecutionException, InterruptedException { createIndex("test"); ensureYellow("test"); int numDocs = iterations(10, 50); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs + 1]; for (int i = 0; i < numDocs; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "test", Integer.toString(i)).setSource("field", "value"); } indexRequestBuilders[numDocs] = client().prepareIndex("test", "test", Integer.toString(numDocs)).setSource("field", "other_value"); indexRandom(true, indexRequestBuilders); SearchResponse searchResponse = client().prepareSearch("test").get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo((long)numDocs + 1)); DeleteByQueryResponse deleteByQueryResponse = client().prepareDeleteByQuery("test").setQuery(QueryBuilders.termQuery("field", "value")).get(); assertThat(deleteByQueryResponse.getIndices().size(), equalTo(1)); for (IndexDeleteByQueryResponse indexDeleteByQueryResponse : deleteByQueryResponse) { assertThat(indexDeleteByQueryResponse.getIndex(), equalTo("test")); assertThat(indexDeleteByQueryResponse.getFailures().length, equalTo(0)); } refresh(); searchResponse = client().prepareSearch("test").get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); } @Test public void testDeleteRoutingRequired() throws ExecutionException, InterruptedException, IOException { createIndexWithAlias(); assertAcked(client().admin().indices().preparePutMapping("test").setType("test").setSource( XContentFactory.jsonBuilder().startObject().startObject("test").startObject("_routing").field("required", true).endObject().endObject().endObject())); ensureYellow("test"); int numDocs = iterations(10, 50); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs - 2; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "test", Integer.toString(i)) .setRouting(randomAsciiOfLength(randomIntBetween(1, 10))).setSource("field", "value"); } String firstDocId = Integer.toString(numDocs - 2); indexRequestBuilders[numDocs - 2] = client().prepareIndex("test", "test", firstDocId) .setRouting("routing").setSource("field", "value"); String secondDocId = Integer.toString(numDocs - 1); String secondRouting = randomAsciiOfLength(randomIntBetween(1, 10)); indexRequestBuilders[numDocs - 1] = client().prepareIndex("test", "test", secondDocId) .setRouting(secondRouting).setSource("field", "value"); indexRandom(true, indexRequestBuilders); SearchResponse searchResponse = client().prepareSearch("test").get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo((long) numDocs)); //use routing DeleteResponse deleteResponse = client().prepareDelete("test", "test", firstDocId).setRouting("routing").get(); assertThat(deleteResponse.isFound(), equalTo(true)); GetResponse getResponse = client().prepareGet("test", "test", firstDocId).setRouting("routing").get(); assertThat(getResponse.isExists(), equalTo(false)); refresh(); searchResponse = client().prepareSearch("test").get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo((long) numDocs - 1)); //don't use routing and trigger a broadcast delete deleteResponse = client().prepareDelete("test", "test", secondDocId).get(); assertThat(deleteResponse.isFound(), equalTo(true)); getResponse = client().prepareGet("test", "test", secondDocId).setRouting(secondRouting).get(); assertThat(getResponse.isExists(), equalTo(false)); refresh(); searchResponse = client().prepareSearch("test").setSize(numDocs).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo((long) numDocs - 2)); } @Test public void testIndexGetAndDelete() throws ExecutionException, InterruptedException { createIndexWithAlias(); ensureYellow("test"); int numDocs = iterations(10, 50); for (int i = 0; i < numDocs; i++) { IndexResponse indexResponse = client().prepareIndex(indexOrAlias(), "type", Integer.toString(i)).setSource("field", "value-" + i).get(); assertThat(indexResponse.isCreated(), equalTo(true)); assertThat(indexResponse.getIndex(), equalTo("test")); assertThat(indexResponse.getType(), equalTo("type")); assertThat(indexResponse.getId(), equalTo(Integer.toString(i))); } refresh(); String docId = Integer.toString(randomIntBetween(0, numDocs - 1)); GetResponse getResponse = client().prepareGet(indexOrAlias(), "type", docId).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getIndex(), equalTo("test")); assertThat(getResponse.getType(), equalTo("type")); assertThat(getResponse.getId(), equalTo(docId)); DeleteResponse deleteResponse = client().prepareDelete(indexOrAlias(), "type", docId).get(); assertThat(deleteResponse.isFound(), equalTo(true)); assertThat(deleteResponse.getIndex(), equalTo("test")); assertThat(deleteResponse.getType(), equalTo("type")); assertThat(deleteResponse.getId(), equalTo(docId)); getResponse = client().prepareGet(indexOrAlias(), "type", docId).get(); assertThat(getResponse.isExists(), equalTo(false)); refresh(); SearchResponse searchResponse = client().prepareSearch(indexOrAlias()).get(); assertThat(searchResponse.getHits().totalHits(), equalTo((long)numDocs - 1)); } @Test public void testUpdate() { createIndexWithAlias(); ensureYellow("test"); UpdateRequestBuilder updateRequestBuilder = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert("field1", "value1").setDoc("field2", "value2"); UpdateResponse updateResponse = updateRequestBuilder.get(); assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getType(), equalTo("type1")); assertThat(updateResponse.getId(), equalTo("1")); assertThat(updateResponse.isCreated(), equalTo(true)); GetResponse getResponse = client().prepareGet("test", "type1", "1").get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getSourceAsMap().containsKey("field1"), equalTo(true)); assertThat(getResponse.getSourceAsMap().containsKey("field2"), equalTo(false)); updateResponse = updateRequestBuilder.get(); assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getType(), equalTo("type1")); assertThat(updateResponse.getId(), equalTo("1")); assertThat(updateResponse.isCreated(), equalTo(false)); getResponse = client().prepareGet("test", "type1", "1").get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getSourceAsMap().containsKey("field1"), equalTo(true)); assertThat(getResponse.getSourceAsMap().containsKey("field2"), equalTo(true)); } @Test public void testAnalyze() { createIndexWithAlias(); assertAcked(client().admin().indices().preparePutMapping("test").setType("test").setSource("field", "type=string,analyzer=keyword")); ensureYellow("test"); AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("this is a test").setIndex(indexOrAlias()).setField("field").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(1)); assertThat(analyzeResponse.getTokens().get(0).getTerm(), equalTo("this is a test")); } @Test public void testExplain() { createIndexWithAlias(); ensureYellow("test"); client().prepareIndex(indexOrAlias(), "test", "1").setSource("field", "value1").get(); refresh(); ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1") .setQuery(QueryBuilders.termQuery("field", "value1")).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.isMatch(), equalTo(true)); assertThat(response.getExplanation(), notNullValue()); assertThat(response.getExplanation().isMatch(), equalTo(true)); assertThat(response.getExplanation().getDetails().length, equalTo(1)); } @Test public void testGetTermVector() throws IOException { createIndexWithAlias(); assertAcked(client().admin().indices().preparePutMapping("test").setType("type1").setSource("field", "type=string,term_vector=with_positions_offsets_payloads").get()); ensureYellow("test"); client().prepareIndex(indexOrAlias(), "type1", "1") .setSource("field", "the quick brown fox jumps over the lazy dog").get(); refresh(); TermVectorResponse termVectorResponse = client().prepareTermVector(indexOrAlias(), "type1", "1").get(); assertThat(termVectorResponse.getIndex(), equalTo("test")); assertThat(termVectorResponse.isExists(), equalTo(true)); Fields fields = termVectorResponse.getFields(); assertThat(fields.size(), equalTo(1)); assertThat(fields.terms("field").size(), equalTo(8l)); } @Test public void testIndicesStats() { createIndex("test"); ensureYellow("test"); IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats().all().get(); assertThat(indicesStatsResponse.getIndices().size(), equalTo(1)); assertThat(indicesStatsResponse.getIndices().containsKey("test"), equalTo(true)); } @Test public void testMultiGet() throws ExecutionException, InterruptedException { createIndexWithAlias(); ensureYellow("test"); int numDocs = iterations(10, 50); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + Integer.toString(i)); } indexRandom(false, indexRequestBuilders); int iterations = iterations(1, numDocs); MultiGetRequestBuilder multiGetRequestBuilder = client().prepareMultiGet(); for (int i = 0; i < iterations; i++) { multiGetRequestBuilder.add(new MultiGetRequest.Item(indexOrAlias(), "type", Integer.toString(randomInt(numDocs - 1)))); } MultiGetResponse multiGetResponse = multiGetRequestBuilder.get(); assertThat(multiGetResponse.getResponses().length, equalTo(iterations)); for (int i = 0; i < multiGetResponse.getResponses().length; i++) { MultiGetItemResponse multiGetItemResponse = multiGetResponse.getResponses()[i]; assertThat(multiGetItemResponse.isFailed(), equalTo(false)); assertThat(multiGetItemResponse.getIndex(), equalTo("test")); assertThat(multiGetItemResponse.getType(), equalTo("type")); assertThat(multiGetItemResponse.getId(), equalTo(multiGetRequestBuilder.request().getItems().get(i).id())); assertThat(multiGetItemResponse.getResponse().isExists(), equalTo(true)); assertThat(multiGetItemResponse.getResponse().getIndex(), equalTo("test")); assertThat(multiGetItemResponse.getResponse().getType(), equalTo("type")); assertThat(multiGetItemResponse.getResponse().getId(), equalTo(multiGetRequestBuilder.request().getItems().get(i).id())); } } @Test public void testScroll() throws ExecutionException, InterruptedException { createIndex("test"); ensureYellow("test"); int numDocs = iterations(10, 100); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + Integer.toString(i)); } indexRandom(true, indexRequestBuilders); int size = randomIntBetween(1, 10); SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test").setScroll("1m").setSize(size); boolean scan = randomBoolean(); if (scan) { searchRequestBuilder.setSearchType(SearchType.SCAN); } SearchResponse searchResponse = searchRequestBuilder.get(); assertThat(searchResponse.getScrollId(), notNullValue()); assertHitCount(searchResponse, numDocs); int hits = 0; if (scan) { assertThat(searchResponse.getHits().getHits().length, equalTo(0)); } else { assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); hits += searchResponse.getHits().getHits().length; } try { do { searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll("1m").get(); assertThat(searchResponse.getScrollId(), notNullValue()); assertHitCount(searchResponse, numDocs); hits += searchResponse.getHits().getHits().length; } while (searchResponse.getHits().getHits().length > 0); assertThat(hits, equalTo(numDocs)); } finally { clearScroll(searchResponse.getScrollId()); } } private static String indexOrAlias() { return randomBoolean() ? "test" : "alias"; } private void createIndexWithAlias() { if (compatibilityVersion().onOrAfter(Version.V_1_1_0)) { assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); } else { assertAcked(prepareCreate("test")); assertAcked(client().admin().indices().prepareAliases().addAlias("test", "alias")); } } }
package com.insidecoding.sos.io; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.MissingResourceException; import java.util.Properties; import java.util.PropertyResourceBundle; import java.util.ResourceBundle; import org.apache.log4j.Logger; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.apache.poi.poifs.filesystem.OfficeXmlFileException; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.xssf.usermodel.XSSFWorkbook; /** * This class offers helper methods for I/O operations. <br/> * It allows you to read/write/append to/from text files, properties files, CSV * files and Microsoft Excel files. <br/> * This is very helpful for data driven testing. <br/> * * @author ludovicianul * */ public final class FileUtils { /** * Simple cache that holds all the workbooks that will be used within the * current run. */ private Map<String, Workbook> workbooks = new HashMap<String, Workbook>(); /** * Holds all the resources that were opened so that they can be released at * the end of the execution cycle. */ private List<InputStream> fis = new ArrayList<InputStream>(); /** * Simple cache that holds all the bunldes used within in the application. */ private Map<String, ResourceBundle> bundles = new HashMap<String, ResourceBundle>(); /** * Logger for this class. */ private static final Logger LOG = Logger.getLogger(FileUtils.class); /** * This will call {@code buildupPropertiesBundles("./src/test/resources")}. */ public FileUtils() { try { buildupPropertiesBundles(new File("./src/test/resources")); } catch (IOException e) { LOG.warn("No bundle was loaded!!"); } } /** * Loads all the properties files from this location. * * @param resourcesLocation * the path from where the properties files will be loaded. This * must be a folder otherwise a {@link IOException} will be * thrown * @throws IOException * if something goes wrong while reading the resources files */ public FileUtils(final String resourcesLocation) throws IOException { LOG.info("Loading properties files from: " + resourcesLocation); File file = new File(resourcesLocation); if (!file.exists()) { throw new IOException("Invalid path: " + resourcesLocation); } if (!file.isDirectory()) { throw new IOException("The path supplied must be a folder: " + resourcesLocation); } buildupPropertiesBundles(file); } /** * Loads all properties files into a bundle cache. * * @param file * the folder where the properties files can be found * @throws IOException * if something goes wrong while reading the file */ private void buildupPropertiesBundles(final File file) throws IOException { File[] files = file.listFiles(); for (File f : files) { if (f.getName().endsWith("properties")) { String bundleName = f.getName().substring(0, f.getName().indexOf("properties") - 1); LOG.info("Loading: " + bundleName); ResourceBundle bundle = new PropertyResourceBundle( new FileInputStream(f)); bundles.put(bundleName, bundle); } } } /** * Loads a locale specific bundle. * * @param bundleName * Name of the bundle to be loaded. This name must be fully * qualified. * @param locale * Locale for which the resource bundle will be loaded. */ public void loadPropertiesBundle(final String bundleName, final Locale locale) { LOG.info("Loading properties bundle: " + bundleName); ResourceBundle bundle = ResourceBundle.getBundle(bundleName, locale); bundles.put(bundleName, bundle); } /** * Loads a properties bundle with the default locale. * * @param bundleName * Name of the bundle to be loaded. This name must be fully * qualified. */ public void loadPropertiesBundle(final String bundleName) { LOG.info("Loading properties bundle: " + bundleName); ResourceBundle bundle = ResourceBundle.getBundle(bundleName); bundles.put(bundleName, bundle); } /** * Loads the properties from a file specified as a parameter. * * @param propertiesFile * Path to the properties file. * @throws IOException * is something goes wrong while reading the file */ public void loadPropertiesFromFile(final String propertiesFile) throws IOException { LOG.info("Loading properties from file: " + propertiesFile); File file = new File(propertiesFile); String bundleName = file.getPath().substring(0, file.getPath().indexOf("properties") - 1); FileInputStream inputStream = null; try { inputStream = new FileInputStream(file); ResourceBundle bundle = new PropertyResourceBundle(inputStream); LOG.info("Adding to bunlde: " + bundleName); bundles.put(bundleName, bundle); } finally { if (inputStream != null) { inputStream.close(); } } } /** * Returns a single bundle from the bundles map. * * @param bundleName * The name of the bundle to be retrieved. * @return returns the specified bundle from the cache */ public ResourceBundle getBundle(final String bundleName) { LOG.info("Getting bundle: " + bundleName); return bundles.get(bundleName); } /** * Gets the value from the resource bundles corresponding to the supplied * key. <br/> * If the property exists in multiple bundles only the first occurrence will * be returned * * @param key * the key to search for * @return {@code Boolean.FALSE} if the property is not found; * {@code Boolean.TRUE} if the property is true or * {@code Boolean.FALSE} otherwise */ public Boolean getPropertyAsBoolean(final String key) { LOG.info("Getting value for key: " + key); for (ResourceBundle bundle : bundles.values()) { try { return Boolean.valueOf(bundle.getString(key)); } catch (MissingResourceException e) { LOG.info("Resource: " + key + " not found!"); } } return Boolean.FALSE; } /** * Gets the value from the resource bundles corresponding to the supplied * key. <br/> * * * @param bundleName * the name of the bundle to search in * @param key * the key to search for * @return {@code Boolean.FALSE} if the property is not found; * {@code Boolean.TRUE} if the property is true or * {@code Boolean.FALSE} otherwise */ public Boolean getPropertyAsBoolean(final String bundleName, final String key) { LOG.info("Getting value for key: " + key + " from bundle name: " + bundleName); ResourceBundle bundle = bundles.get(bundleName); Boolean result = Boolean.FALSE; try { result = Boolean.valueOf(bundle.getString(key)); } catch (MissingResourceException e) { LOG.info("Resource: " + key + " not found!"); } return result; } /** * Gets the value from the resource bundles corresponding to the supplied * key. <br/> * If the property exists in multiple bundles only the first occurrence will * be returned * * @param key * the key to search for * @return {@code null} if the property is not found or the corresponding * value otherwise */ public String getPropertyAsString(final String key) { LOG.info("Getting value for key: " + key); for (ResourceBundle bundle : bundles.values()) { try { return bundle.getString(key); } catch (MissingResourceException e) { LOG.info("Resource: " + key + " not found!"); } } return null; } /** * Gets the value from the resource bundles corresponding to the supplied * key. <br/> * * * @param bundleName * the name of the bundle to search in * @param key * the key to search for * @return {@code null} if the property is not found or the corresponding * value otherwise */ public String getPropertyAsString(final String bundleName, final String key) { LOG.info("Getting value for key: " + key + " bundleName:" + bundleName); ResourceBundle bundle = bundles.get(bundleName); String result = null; try { result = bundle.getString(key); } catch (MissingResourceException e) { LOG.info("Resource: " + key + " not found!"); } return result; } /** * Gets the value as int from the resource bundles corresponding to the * supplied key. <br/> * * If the property exists in multiple bundles only the first occurrence will * be returned * * @param key * the key to search for * @return {@code -1} if the property is not found or the value is not a * number; the corresponding value otherwise */ public int getPropertyAsInteger(final String key) { LOG.info("Getting value for key: " + key); int result = -1; for (ResourceBundle bundle : bundles.values()) { try { result = Integer.parseInt(bundle.getString(key)); break; } catch (MissingResourceException e) { LOG.info("Resource: " + key + " not found!"); } catch (NumberFormatException e) { result = -1; } } return result; } /** * Gets the value as int from the resource bundles corresponding to the * supplied key. <br/> * * * @param bundleName * the name of the bunlde to search in * @param key * the key to search for * @return {@code -1} if the property is not found or the value is not a * number; the corresponding value otherwise */ public int getPropertyAsInteger(final String bundleName, final String key) { LOG.info("Getting value for key: " + key + " from bundle: " + bundleName); ResourceBundle bundle = bundles.get(bundleName); try { return Integer.parseInt(bundle.getString(key)); } catch (MissingResourceException e) { LOG.info("Resource: " + key + " not found!"); } catch (NumberFormatException e) { return -1; } return -1; } /** * Gets the value as double from the resource bundles corresponding to the * supplied key. <br/> * * If the property exists in multiple bundles only the first occurrence will * be returned * * @param key * the key to search for * @return {@code -1} if the property is not found or the value is not a * number; the corresponding value otherwise */ public double getPropertyAsDouble(final String key) { LOG.info("Getting value for key: " + key); double result = -1d; for (ResourceBundle bundle : bundles.values()) { try { result = Double.parseDouble(bundle.getString(key)); break; } catch (MissingResourceException e) { LOG.info("Resource: " + key + " not found!"); } catch (NumberFormatException e) { result = -1d; } } return result; } /** * Gets the value as double from the resource bundles corresponding to the * supplied key. <br/> * * * @param bundleName * the name of the bundle to search in * @param key * the key to search for * @return {@code -1} if the property is not found or the value is not a * number; the corresponding value otherwise */ public double getPropertyAsDouble(final String bundleName, final String key) { LOG.info("Getting value for key: " + key + " from bundle: " + bundleName); ResourceBundle bundle = bundles.get(bundleName); try { return Double.parseDouble(bundle.getString(key)); } catch (MissingResourceException e) { LOG.info("Resource: " + key + " not found!"); } catch (NumberFormatException e) { return -1d; } return -1d; } /** * Reads the value of a cell from an Excel workbook corresponding to the * supplied coordinates. <br/> * * * @param fileName * the name of the file * @param sheetName * the name of the sheet within the file/workbook * @param rowNumber * the row number; starts from 0 * @param cellNumber * the cell number; starts from 0 * @return the cell value from the excel file * @throws IOException * if something goes wrong while reading the file */ public String readFromExcel(final String fileName, final String sheetName, final int rowNumber, final int cellNumber) throws IOException { LOG.info("Reading from file: " + fileName + " sheet: " + sheetName + " rowNumber: " + rowNumber + " cellNumber:" + cellNumber); Workbook workbook = getWorkbook(fileName); Sheet rows = workbook.getSheet(sheetName); Row row = rows.getRow(rowNumber); Cell cell = row.getCell(cellNumber); String result = null; switch (cell.getCellType()) { case Cell.CELL_TYPE_BOOLEAN: result = String.valueOf(cell.getBooleanCellValue()); break; case Cell.CELL_TYPE_STRING: result = cell.getStringCellValue(); break; case Cell.CELL_TYPE_NUMERIC: result = String.valueOf(cell.getNumericCellValue()); break; default: result = cell.getStringCellValue(); } LOG.info(" Returning: " + result); return result; } /** * Tries to load the workbook from the current cache. If it doesn't find it * it will load it from the disk. If it doesn't find it on the disk a * {@link IOException} will be thrown * * @param fileName * the name of file * @return the workbook corresponding to this file * @throws IOException * if something goes wrong while reading the file */ private Workbook getWorkbook(final String fileName) throws IOException { if (workbooks.get(fileName) == null) { FileInputStream fiStream = new FileInputStream(new File(fileName)); fis.add(fiStream); Workbook workbook = null; try { workbook = new HSSFWorkbook(fiStream); } catch (OfficeXmlFileException e) { fiStream.close(); fiStream = new FileInputStream(new File(fileName)); fis.add(fiStream); workbook = new XSSFWorkbook(fiStream); } workbooks.put(fileName, workbook); } return workbooks.get(fileName); } /** * Gets the number of rows populated within the supplied file and sheet * name. * * @param fileName * the name of the file * @param sheetName * the sheet name * @return the number of rows * @throws IOException * if something goes wrong while reading the file */ public int getNumberOfRows(final String fileName, final String sheetName) throws IOException { LOG.info("Getting the number of rows from:" + fileName + " sheet: " + sheetName); Workbook workbook = getWorkbook(fileName); Sheet sheets = workbook.getSheet(sheetName); return sheets.getPhysicalNumberOfRows(); } /** * You MUST call this method after you finish running your tests. <br/> * Usually you can do this in a @After method (tearDown()). <br/> * If you just extend the provided {@code AbstractSoSBase} you won't need to * worry about it */ public void releaseResources() { for (InputStream str : fis) { try { str.close(); } catch (Exception e) { LOG.info("Exception while trying to close streams", e); } } } /** * Returns the contents of a file into a String object. <br/> * Please be careful when using this with large files * * @param file * the file * @param encoding * the file encoding. Example: "UTF-8", "UTF-16" * @return a String object with the contents of the file * @throws IOException * if something goes wrong while reading the file */ public String getFileContentsAsString(final File file, final String encoding) throws IOException { LOG.info("Getting files contents as string: " + file); BufferedReader br = null; try { br = new BufferedReader(new InputStreamReader(new FileInputStream( file), encoding)); StringBuilder builder = new StringBuilder(); String line = null; while ((line = br.readLine()) != null) { builder.append(line); } LOG.debug("File contents: " + builder); return builder.toString(); } finally { if (br != null) { br.close(); } } } /** * Returns the contents of a file into a String object. <br/> * Please be careful when using this with large files * * @param fileName * the name of the file * @param encoding * the file encoding. Examples: "UTF-8", "UTF-16" * @return a String object with the contents of the file * @throws IOException * if something goes wrong while reading the file */ public String getFileContentsAsString(final String fileName, final String encoding) throws IOException { return this.getFileContentsAsString(new File(fileName), encoding); } /** * Parses a CSV file based on the header received. * * @param headers * a comma separated list of headers * @param file * the file to read from * @param separator * the separator used in the CSV file * @param encoding * the file encoding. Examples: "UTF-8", "UTF-16". * @return a Map having the Headers as keys and a corresponding list for * each value * @throws IOException * if something goes wrong while reading the file */ public Map<String, List<String>> parseCSV(final String headers, final String file, final String separator, final String encoding) throws IOException { LOG.info("Parsing CSVs from file: " + file + " with headers: " + headers + " separator: " + separator); Map<String, List<String>> result = new HashMap<String, List<String>>(); BufferedReader reader = null; try { String[] headersArr = headers.split(","); reader = new BufferedReader(new InputStreamReader( new FileInputStream(file), encoding)); String line = null; while ((line = reader.readLine()) != null) { String[] lines = line.split(separator); if (lines.length != headersArr.length) { throw new IOException("Too many or too little theaders!"); } for (int i = 0; i < lines.length; i++) { List<String> currentHeader = result.get(headersArr[i] .trim()); if (currentHeader == null) { currentHeader = new ArrayList<String>(); result.put(headersArr[i].trim(), currentHeader); } currentHeader.add(lines[i].trim()); } } } finally { if (reader != null) { reader.close(); } } LOG.info("Result of parsing CSV: " + result); return result; } /** * Converts a ResourceBundle object to a Properties object. * * @param resource * Name of the bundle to be converted. * @return a Properties object corresponding to the supplied bundle object * */ public Properties convertResourceBundleToProperties( final ResourceBundle resource) { Properties properties = new Properties(); Enumeration<String> keys = resource.getKeys(); while (keys.hasMoreElements()) { String key = keys.nextElement(); properties.put(key, resource.getString(key)); } return properties; } /** * This method returns the content of a file into a list of strings. Each * file line will correspond to an element in the list. Please be careful * when using this method as it is not intended to be used with large files * * @param fileName * the name of the file * @param encoding * the file encoding. Examples: "UTF-8", "UTF-16". * @return a list of Strings contains the lines of the file * @throws IOException * if something goes wrong while reading the file */ public List<String> getFileAsList(final String fileName, final String encoding) throws IOException { LOG.info("Get file as list. file: " + fileName); List<String> result = new ArrayList<String>(); BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader( new FileInputStream(fileName), encoding)); String line = null; while ((line = reader.readLine()) != null) { result.add(line); } } finally { if (reader != null) { reader.close(); } } LOG.info("Returning: " + result); return result; } /** * Writes the specific string content {@code toWrite} to the specified file * {@code filePath}. If the file doesn't exists one will be created and the * content will be written. If the file exists and {@code overwrite} is true * the content of the file will be overwritten otherwise the content will be * appended to the existing file * * @param filePath * the path to the file * @param toWrite * the string to be written * @param overwrite * true if you want to overwrite an existing file or false * otherwise * @param fileEncoding * the file encoding. Examples: "UTF-8", "UTF-16". * @throws IOException * if something goes wrong while writing to the file */ public void writeToFile(final String filePath, final String toWrite, final boolean overwrite, final String fileEncoding) throws IOException { File file = new File(filePath); if (!file.exists()) { boolean created = file.createNewFile(); LOG.info("File successfully created: " + created); } BufferedWriter writer = null; try { writer = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(file, overwrite), fileEncoding)); writer.write(toWrite); writer.flush(); } finally { if (writer != null) { writer.close(); } } } /** * Writes the specific content {@code toWrite} to the specified file * {@code filePath}. If the file doesn't exists one will be created and the * content will be written. If the file exists and {@code overwrite} is true * the content of the file will be overwritten otherwise the content will be * appended to the existing file. Each item in the list will be written on a * new line * * @param filePath * the path to the file * @param toWrite * the string to be written * @param overwrite * true if you want to overwrite an existing file or false * otherwise * @param fileEncoding * the file encoding. Examples: "UTF-8", "UTF-16". * @throws IOException * if something goes wrong while writing to file */ public void writeToFile(final String filePath, final List<String> toWrite, final boolean overwrite, final String fileEncoding) throws IOException { File file = new File(filePath); if (!file.exists()) { boolean created = file.createNewFile(); LOG.info("File successfully created: " + created); } BufferedWriter writer = null; try { writer = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(file, overwrite), fileEncoding)); for (String s : toWrite) { writer.write(s); writer.newLine(); } writer.flush(); } finally { if (writer != null) { writer.close(); } } } /** * Reads the content of the file between the specific line numbers. * * @param filePath * the path to the file * @param lineToStart * the line number to start with * @param lineToEnd * the line number to end with * @param encoding * the file encoding. Examples: "UTF-8", "UTF-16". * @return a list of strings for each line between {@code LineToStart} and * {@code lineToEnd} * @throws IOException * if something goes wrong while reading the file */ public List<String> readFromFile(final String filePath, final int lineToStart, final int lineToEnd, final String encoding) throws IOException { if (lineToStart > lineToEnd) { throw new IllegalArgumentException( "Line to start must be lower than line to end"); } LOG.info("Reading from file: " + filePath); List<String> result = new ArrayList<String>(); BufferedReader reader = null; int i = 0; try { reader = new BufferedReader(new InputStreamReader( new FileInputStream(filePath), encoding)); String line = reader.readLine(); while (line != null && i >= lineToStart && i <= lineToEnd) { result.add(line); i++; line = reader.readLine(); } } finally { if (reader != null) { reader.close(); } } LOG.info("Returning: " + result); return result; } }
package de.tum.in.www1.artemis.domain; import java.io.Serializable; import java.time.ZonedDateTime; import java.util.HashSet; import java.util.Objects; import java.util.Set; import javax.persistence.*; import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonInclude; /** * A Lecture. */ @Entity @Table(name = "lecture") @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) @JsonInclude(JsonInclude.Include.NON_EMPTY) public class Lecture implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @Column(name = "title") private String title; @Lob @Column(name = "description") private String description; @Column(name = "start_date") private ZonedDateTime startDate; @Column(name = "end_date") private ZonedDateTime endDate; @OneToMany(mappedBy = "lecture", cascade = CascadeType.REMOVE, orphanRemoval = true, fetch = FetchType.EAGER) @JsonIgnoreProperties(value = "lecture", allowSetters = true) private Set<Attachment> attachments = new HashSet<>(); @OneToMany(mappedBy = "lecture", cascade = CascadeType.REMOVE, orphanRemoval = true, fetch = FetchType.LAZY) @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) @JsonIgnoreProperties("lecture") private Set<StudentQuestion> studentQuestions = new HashSet<>(); @ManyToOne @JsonIgnoreProperties("lectures") private Course course; // jhipster-needle-entity-add-field - JHipster will add fields here, do not remove public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getTitle() { return title; } public Lecture title(String title) { this.title = title; return this; } public void setTitle(String title) { this.title = title; } public String getDescription() { return description; } public Lecture description(String description) { this.description = description; return this; } public void setDescription(String description) { this.description = description; } public ZonedDateTime getStartDate() { return startDate; } public Lecture startDate(ZonedDateTime startDate) { this.startDate = startDate; return this; } public void setStartDate(ZonedDateTime startDate) { this.startDate = startDate; } public ZonedDateTime getEndDate() { return endDate; } public Lecture endDate(ZonedDateTime endDate) { this.endDate = endDate; return this; } public void setEndDate(ZonedDateTime endDate) { this.endDate = endDate; } public Set<Attachment> getAttachments() { return attachments; } public Lecture attachments(Set<Attachment> attachments) { this.attachments = attachments; return this; } public Lecture addAttachments(Attachment attachment) { this.attachments.add(attachment); attachment.setLecture(this); return this; } public Lecture removeAttachments(Attachment attachment) { this.attachments.remove(attachment); attachment.setLecture(null); return this; } public void setAttachments(Set<Attachment> attachments) { this.attachments = attachments; } public Set<StudentQuestion> getStudentQuestions() { return studentQuestions; } public Lecture studentQuestions(Set<StudentQuestion> studentQuestions) { this.studentQuestions = studentQuestions; return this; } public Lecture addStudentQuestions(StudentQuestion studentQuestion) { this.studentQuestions.add(studentQuestion); studentQuestion.setLecture(this); return this; } public Lecture removeStudentQuestions(StudentQuestion studentQuestion) { this.studentQuestions.remove(studentQuestion); studentQuestion.setLecture(null); return this; } public void setStudentQuestions(Set<StudentQuestion> studentQuestions) { this.studentQuestions = studentQuestions; } public Course getCourse() { return course; } public Lecture course(Course course) { this.course = course; return this; } public void setCourse(Course course) { this.course = course; } // jhipster-needle-entity-add-getters-setters - JHipster will add getters and setters here, do not remove @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Lecture lecture = (Lecture) o; if (lecture.getId() == null || getId() == null) { return false; } return Objects.equals(getId(), lecture.getId()); } @Override public int hashCode() { return Objects.hashCode(getId()); } @Override public String toString() { return "Lecture{" + "id=" + getId() + ", title='" + getTitle() + "'" + ", description='" + getDescription() + "'" + ", startDate='" + getStartDate() + "'" + ", endDate='" + getEndDate() + "'" + "}"; } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.testsuite.admin.client; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.ws.rs.BadRequestException; import javax.ws.rs.NotFoundException; import javax.ws.rs.core.Response; import org.junit.Assert; import org.junit.Test; import org.keycloak.admin.client.resource.ClientTemplatesResource; import org.keycloak.admin.client.resource.RoleMappingResource; import org.keycloak.events.admin.OperationType; import org.keycloak.events.admin.ResourceType; import org.keycloak.models.AccountRoles; import org.keycloak.models.Constants; import org.keycloak.protocol.oidc.OIDCLoginProtocol; import org.keycloak.protocol.saml.SamlProtocol; import org.keycloak.representations.idm.ClientRepresentation; import org.keycloak.representations.idm.ClientTemplateRepresentation; import org.keycloak.representations.idm.ErrorRepresentation; import org.keycloak.representations.idm.MappingsRepresentation; import org.keycloak.representations.idm.RoleRepresentation; import org.keycloak.testsuite.admin.ApiUtil; import org.keycloak.testsuite.util.AdminEventPaths; import static org.junit.Assert.assertEquals; /** * @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a> */ public class ClientTemplateTest extends AbstractClientTest { @Test public void testAddDuplicatedTemplate() { ClientTemplateRepresentation templateRep = new ClientTemplateRepresentation(); templateRep.setName("template1"); String templateId = createTemplate(templateRep); templateRep = new ClientTemplateRepresentation(); templateRep.setName("template1"); Response response = clientTemplates().create(templateRep); assertEquals(409, response.getStatus()); ErrorRepresentation error = response.readEntity(ErrorRepresentation.class); Assert.assertEquals("Client Template template1 already exists", error.getErrorMessage()); // Cleanup removeTemplate(templateId); } @Test (expected = NotFoundException.class) public void testGetUnknownTemplate() { clientTemplates().get("unknown-id").toRepresentation(); } @Test public void testRemoveTemplate() { // Create template1 ClientTemplateRepresentation templateRep = new ClientTemplateRepresentation(); templateRep.setName("template1"); String template1Id = createTemplate(templateRep); List<ClientTemplateRepresentation> clientTemplates = clientTemplates().findAll(); Assert.assertEquals(1, clientTemplates.size()); Assert.assertEquals("template1", clientTemplates.get(0).getName()); // Create template2 templateRep = new ClientTemplateRepresentation(); templateRep.setName("template2"); String template2Id = createTemplate(templateRep); clientTemplates = clientTemplates().findAll(); Assert.assertEquals(2, clientTemplates.size()); // Remove template1 removeTemplate(template1Id); clientTemplates = clientTemplates().findAll(); Assert.assertEquals(1, clientTemplates.size()); Assert.assertEquals("template2", clientTemplates.get(0).getName()); // Remove template2 removeTemplate(template2Id); clientTemplates = clientTemplates().findAll(); Assert.assertEquals(0, clientTemplates.size()); } @Test public void testUpdateTemplate() { // Test creating ClientTemplateRepresentation templateRep = new ClientTemplateRepresentation(); templateRep.setName("template1"); templateRep.setDescription("template1-desc"); templateRep.setProtocol(OIDCLoginProtocol.LOGIN_PROTOCOL); templateRep.setFullScopeAllowed(true); String template1Id = createTemplate(templateRep); // Assert created attributes templateRep = clientTemplates().get(template1Id).toRepresentation(); Assert.assertEquals("template1", templateRep.getName()); Assert.assertEquals("template1-desc", templateRep.getDescription()); Assert.assertEquals(OIDCLoginProtocol.LOGIN_PROTOCOL, templateRep.getProtocol()); Assert.assertEquals(true, templateRep.isFullScopeAllowed()); // Test updating templateRep.setName("template1-updated"); templateRep.setDescription("template1-desc-updated"); templateRep.setProtocol(SamlProtocol.LOGIN_PROTOCOL); templateRep.setFullScopeAllowed(false); clientTemplates().get(template1Id).update(templateRep); assertAdminEvents.assertEvent(getRealmId(), OperationType.UPDATE, AdminEventPaths.clientTemplateResourcePath(template1Id), templateRep, ResourceType.CLIENT_TEMPLATE); // Assert updated attributes templateRep = clientTemplates().get(template1Id).toRepresentation(); Assert.assertEquals("template1-updated", templateRep.getName()); Assert.assertEquals("template1-desc-updated", templateRep.getDescription()); Assert.assertEquals(SamlProtocol.LOGIN_PROTOCOL, templateRep.getProtocol()); Assert.assertEquals(false, templateRep.isFullScopeAllowed()); // Remove template1 clientTemplates().get(template1Id).remove(); } @Test public void testScopes() { // Add realm role1 RoleRepresentation roleRep1 = createRealmRole("role1"); // Add realm role2 RoleRepresentation roleRep2 = createRealmRole("role2"); // Add role2 as composite to role1 testRealmResource().roles().get("role1").addComposites(Collections.singletonList(roleRep2)); assertAdminEvents.assertEvent(getRealmId(), OperationType.CREATE, AdminEventPaths.roleResourceCompositesPath("role1"), Collections.singletonList(roleRep2), ResourceType.REALM_ROLE); // create client template ClientTemplateRepresentation templateRep = new ClientTemplateRepresentation(); templateRep.setName("bar-template"); templateRep.setFullScopeAllowed(false); String templateId = createTemplate(templateRep); // update with some scopes String accountMgmtId = testRealmResource().clients().findByClientId(Constants.ACCOUNT_MANAGEMENT_CLIENT_ID).get(0).getId(); RoleRepresentation viewAccountRoleRep = testRealmResource().clients().get(accountMgmtId).roles().get(AccountRoles.VIEW_PROFILE).toRepresentation(); RoleMappingResource scopesResource = clientTemplates().get(templateId).getScopeMappings(); scopesResource.realmLevel().add(Collections.singletonList(roleRep1)); assertAdminEvents.assertEvent(getRealmId(), OperationType.CREATE, AdminEventPaths.clientTemplateScopeMappingsRealmLevelPath(templateId), Collections.singletonList(roleRep1), ResourceType.REALM_SCOPE_MAPPING); scopesResource.clientLevel(accountMgmtId).add(Collections.singletonList(viewAccountRoleRep)); assertAdminEvents.assertEvent(getRealmId(), OperationType.CREATE, AdminEventPaths.clientTemplateScopeMappingsClientLevelPath(templateId, accountMgmtId), Collections.singletonList(viewAccountRoleRep), ResourceType.CLIENT_SCOPE_MAPPING); // test that scopes are available (also through composite role) List<RoleRepresentation> allRealm = scopesResource.realmLevel().listAll(); List<RoleRepresentation> availableRealm = scopesResource.realmLevel().listAvailable(); List<RoleRepresentation> effectiveRealm = scopesResource.realmLevel().listEffective(); List<RoleRepresentation> accountRoles = scopesResource.clientLevel(accountMgmtId).listAll(); assertRolesPresent(allRealm, "role1"); assertRolesNotPresent(availableRealm, "role1", "role2"); assertRolesPresent(effectiveRealm, "role1", "role2"); assertRolesPresent(accountRoles, AccountRoles.VIEW_PROFILE); MappingsRepresentation mappingsRep = clientTemplates().get(templateId).getScopeMappings().getAll(); assertRolesPresent(mappingsRep.getRealmMappings(), "role1"); assertRolesPresent(mappingsRep.getClientMappings().get(Constants.ACCOUNT_MANAGEMENT_CLIENT_ID).getMappings(), AccountRoles.VIEW_PROFILE); // remove scopes scopesResource.realmLevel().remove(Collections.singletonList(roleRep1)); assertAdminEvents.assertEvent(getRealmId(), OperationType.DELETE, AdminEventPaths.clientTemplateScopeMappingsRealmLevelPath(templateId), Collections.singletonList(roleRep1), ResourceType.REALM_SCOPE_MAPPING); scopesResource.clientLevel(accountMgmtId).remove(Collections.singletonList(viewAccountRoleRep)); assertAdminEvents.assertEvent(getRealmId(), OperationType.DELETE, AdminEventPaths.clientTemplateScopeMappingsClientLevelPath(templateId, accountMgmtId), Collections.singletonList(viewAccountRoleRep), ResourceType.CLIENT_SCOPE_MAPPING); // assert scopes are removed allRealm = scopesResource.realmLevel().listAll(); availableRealm = scopesResource.realmLevel().listAvailable(); effectiveRealm = scopesResource.realmLevel().listEffective(); accountRoles = scopesResource.clientLevel(accountMgmtId).listAll(); assertRolesNotPresent(allRealm, "role1"); assertRolesPresent(availableRealm, "role1", "role2"); assertRolesNotPresent(effectiveRealm, "role1", "role2"); assertRolesNotPresent(accountRoles, AccountRoles.VIEW_PROFILE); // remove template removeTemplate(templateId); } private void assertRolesPresent(List<RoleRepresentation> roles, String... expectedRoleNames) { List<String> expectedList = Arrays.asList(expectedRoleNames); Set<String> presentRoles = new HashSet<>(); for (RoleRepresentation roleRep : roles) { presentRoles.add(roleRep.getName()); } for (String expected : expectedList) { if (!presentRoles.contains(expected)) { Assert.fail("Expected role " + expected + " not available"); } } } private void assertRolesNotPresent(List<RoleRepresentation> roles, String... notExpectedRoleNames) { List<String> notExpectedList = Arrays.asList(notExpectedRoleNames); for (RoleRepresentation roleRep : roles) { if (notExpectedList.contains(roleRep.getName())) { Assert.fail("Role " + roleRep.getName() + " wasn't expected to be available"); } } } // KEYCLOAK-2809 @Test public void testRemoveScopedRole() { // Add realm role RoleRepresentation roleRep = createRealmRole("foo-role"); // Add client template ClientTemplateRepresentation templateRep = new ClientTemplateRepresentation(); templateRep.setName("bar-template"); templateRep.setFullScopeAllowed(false); String templateId = createTemplate(templateRep); // Add realm role to scopes of clientTemplate clientTemplates().get(templateId).getScopeMappings().realmLevel().add(Collections.singletonList(roleRep)); assertAdminEvents.assertEvent(getRealmId(), OperationType.CREATE, AdminEventPaths.clientTemplateScopeMappingsRealmLevelPath(templateId), Collections.singletonList(roleRep), ResourceType.REALM_SCOPE_MAPPING); List<RoleRepresentation> roleReps = clientTemplates().get(templateId).getScopeMappings().realmLevel().listAll(); Assert.assertEquals(1, roleReps.size()); Assert.assertEquals("foo-role", roleReps.get(0).getName()); // Remove realm role testRealmResource().roles().deleteRole("foo-role"); assertAdminEvents.assertEvent(getRealmId(), OperationType.DELETE, AdminEventPaths.roleResourcePath("foo-role"), ResourceType.REALM_ROLE); // Get scope mappings roleReps = clientTemplates().get(templateId).getScopeMappings().realmLevel().listAll(); Assert.assertEquals(0, roleReps.size()); // Cleanup removeTemplate(templateId); } private RoleRepresentation createRealmRole(String roleName) { RoleRepresentation roleRep = new RoleRepresentation(); roleRep.setName(roleName); testRealmResource().roles().create(roleRep); assertAdminEvents.assertEvent(getRealmId(), OperationType.CREATE, AdminEventPaths.roleResourcePath(roleName), roleRep, ResourceType.REALM_ROLE); return testRealmResource().roles().get(roleName).toRepresentation(); } // KEYCLOAK-2844 @Test public void testRemoveTemplateInUse() { // Add client template ClientTemplateRepresentation templateRep = new ClientTemplateRepresentation(); templateRep.setName("foo-template"); templateRep.setFullScopeAllowed(false); String templateId = createTemplate(templateRep); // Add client with the clientTemplate ClientRepresentation clientRep = new ClientRepresentation(); clientRep.setClientId("bar-client"); clientRep.setName("bar-client"); clientRep.setRootUrl("foo"); clientRep.setProtocol("openid-connect"); clientRep.setClientTemplate("foo-template"); String clientDbId = createClient(clientRep); // Can't remove clientTemplate try { clientTemplates().get(templateId).remove(); } catch (BadRequestException bre) { ErrorRepresentation error = bre.getResponse().readEntity(ErrorRepresentation.class); Assert.assertEquals("Cannot remove client template, it is currently in use", error.getErrorMessage()); assertAdminEvents.assertEmpty(); } // Remove client removeClient(clientDbId); // Can remove clientTemplate now removeTemplate(templateId); } private ClientTemplatesResource clientTemplates() { return testRealmResource().clientTemplates(); } private String createTemplate(ClientTemplateRepresentation templateRep) { Response resp = clientTemplates().create(templateRep); Assert.assertEquals(201, resp.getStatus()); resp.close(); String templateId = ApiUtil.getCreatedId(resp); assertAdminEvents.assertEvent(getRealmId(), OperationType.CREATE, AdminEventPaths.clientTemplateResourcePath(templateId), templateRep, ResourceType.CLIENT_TEMPLATE); return templateId; } private void removeTemplate(String templateId) { clientTemplates().get(templateId).remove(); assertAdminEvents.assertEvent(getRealmId(), OperationType.DELETE, AdminEventPaths.clientTemplateResourcePath(templateId), ResourceType.CLIENT_TEMPLATE); } }
/* * Zorbage: an algebraic data hierarchy for use in numeric processing. * * Copyright (c) 2016-2021 Barry DeZonia All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list * of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this * list of conditions and the following disclaimer in the documentation and/or other * materials provided with the distribution. * * Neither the name of the <copyright holder> nor the names of its contributors may * be used to endorse or promote products derived from this software without specific * prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH * DAMAGE. */ package nom.bdezonia.zorbage.type.quaternion.float64; import java.math.BigDecimal; import java.math.BigInteger; import nom.bdezonia.zorbage.algebra.*; import nom.bdezonia.zorbage.sampling.IntegerIndex; import nom.bdezonia.zorbage.storage.coder.DoubleCoder; import nom.bdezonia.zorbage.misc.Hasher; import nom.bdezonia.zorbage.type.real.float64.Float64Member; import nom.bdezonia.zorbage.type.universal.OctonionRepresentation; import nom.bdezonia.zorbage.type.universal.PrimitiveConversion; import nom.bdezonia.zorbage.type.universal.PrimitiveRepresentation; import nom.bdezonia.zorbage.type.universal.TensorOctonionRepresentation; import nom.bdezonia.zorbage.type.universal.TensorStringRepresentation; import nom.bdezonia.zorbage.type.universal.UniversalRepresentation; /** * * @author Barry DeZonia * */ public final class QuaternionFloat64Member implements NumberMember<QuaternionFloat64Member>, DoubleCoder, Allocatable<QuaternionFloat64Member>, Duplicatable<QuaternionFloat64Member>, Settable<QuaternionFloat64Member>, Gettable<QuaternionFloat64Member>, PrimitiveConversion, UniversalRepresentation, SetQuaternion<Float64Member>, GetQuaternion<Float64Member>, SetFromDouble, GetAsDoubleArray { private double r, i, j, k; public QuaternionFloat64Member() { primitiveInit(); } public QuaternionFloat64Member(double r, double i, double j, double k) { setR(r); setI(i); setJ(j); setK(k); } public QuaternionFloat64Member(QuaternionFloat64Member value) { set(value); } public QuaternionFloat64Member(String value) { TensorStringRepresentation rep = new TensorStringRepresentation(value); OctonionRepresentation val = rep.firstValue(); setR(val.r().doubleValue()); setI(val.i().doubleValue()); setJ(val.j().doubleValue()); setK(val.k().doubleValue()); } public QuaternionFloat64Member(double... vals) { setFromDouble(vals); } public double r() { return r; } public double i() { return i; } public double j() { return j; } public double k() { return k; } public void setR(double val) { r = val; } public void setI(double val) { i = val; } public void setJ(double val) { j = val; } public void setK(double val) { k = val; } @Override public void set(QuaternionFloat64Member other) { //if (this == other) return; r = other.r; i = other.i; j = other.j; k = other.k; } @Override public void get(QuaternionFloat64Member other) { //if (this == other) return; other.r = r; other.i = i; other.j = j; other.k = k; } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append('{'); builder.append(r()); builder.append(','); builder.append(i()); builder.append(','); builder.append(j()); builder.append(','); builder.append(k()); builder.append('}'); return builder.toString(); } @Override public int doubleCount() { return 4; } @Override public void fromDoubleArray(double[] arr, int index) { r = arr[index]; i = arr[index+1]; j = arr[index+2]; k = arr[index+3]; } @Override public void toDoubleArray(double[] arr, int index) { arr[index] = r; arr[index+1] = i; arr[index+2] = j; arr[index+3] = k; } @Override public QuaternionFloat64Member allocate() { return new QuaternionFloat64Member(); } @Override public QuaternionFloat64Member duplicate() { return new QuaternionFloat64Member(this); } @Override public int numDimensions() { return 0; } @Override public void getV(QuaternionFloat64Member value) { get(value); } @Override public void setV(QuaternionFloat64Member value) { set(value); } @Override public void toRep(TensorOctonionRepresentation rep) { rep.setValue( new OctonionRepresentation( BigDecimal.valueOf(r()), BigDecimal.valueOf(i()), BigDecimal.valueOf(j()), BigDecimal.valueOf(k()) ) ); } @Override public void fromRep(TensorOctonionRepresentation rep) { OctonionRepresentation v = rep.getValue(); setR(v.r().doubleValue()); setI(v.i().doubleValue()); setJ(v.j().doubleValue()); setK(v.k().doubleValue()); } @Override public PrimitiveRepresentation preferredRepresentation() { return PrimitiveRepresentation.DOUBLE; } @Override public long dimension(int i) { return 0; } @Override public int componentCount() { return 4; } @Override public void primComponentSetByte(IntegerIndex index, int component, byte v) { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v); else // component == 1 this.setI(v); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v); else // component == 3 this.setK(v); } } @Override public void primComponentSetShort(IntegerIndex index, int component, short v) { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v); else // component == 1 this.setI(v); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v); else // component == 3 this.setK(v); } } @Override public void primComponentSetInt(IntegerIndex index, int component, int v) { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v); else // component == 1 this.setI(v); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v); else // component == 3 this.setK(v); } } @Override public void primComponentSetLong(IntegerIndex index, int component, long v) { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v); else // component == 1 this.setI(v); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v); else // component == 3 this.setK(v); } } @Override public void primComponentSetFloat(IntegerIndex index, int component, float v) { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v); else // component == 1 this.setI(v); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v); else // component == 3 this.setK(v); } } @Override public void primComponentSetDouble(IntegerIndex index, int component, double v) { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v); else // component == 1 this.setI(v); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v); else // component == 3 this.setK(v); } } @Override public void primComponentSetBigInteger(IntegerIndex index, int component, BigInteger v) { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v.doubleValue()); else // component == 1 this.setI(v.doubleValue()); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v.doubleValue()); else // component == 3 this.setK(v.doubleValue()); } } @Override public void primComponentSetBigDecimal(IntegerIndex index, int component, BigDecimal v) { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v.doubleValue()); else // component == 1 this.setI(v.doubleValue()); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v.doubleValue()); else // component == 3 this.setK(v.doubleValue()); } } @Override public void primComponentSetByteSafe(IntegerIndex index, int component, byte v) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { if (v != 0) throw new IllegalArgumentException( "cannot set nonzero value outside extents"); } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v); else // component == 1 this.setI(v); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v); else // component == 3 this.setK(v); } } } @Override public void primComponentSetShortSafe(IntegerIndex index, int component, short v) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { if (v != 0) throw new IllegalArgumentException( "cannot set nonzero value outside extents"); } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v); else // component == 1 this.setI(v); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v); else // component == 3 this.setK(v); } } } @Override public void primComponentSetIntSafe(IntegerIndex index, int component, int v) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { if (v != 0) throw new IllegalArgumentException( "cannot set nonzero value outside extents"); } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v); else // component == 1 this.setI(v); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v); else // component == 3 this.setK(v); } } } @Override public void primComponentSetLongSafe(IntegerIndex index, int component, long v) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { if (v != 0) throw new IllegalArgumentException( "cannot set nonzero value outside extents"); } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v); else // component == 1 this.setI(v); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v); else // component == 3 this.setK(v); } } } @Override public void primComponentSetFloatSafe(IntegerIndex index, int component, float v) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { if (v != 0) throw new IllegalArgumentException( "cannot set nonzero value outside extents"); } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v); else // component == 1 this.setI(v); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v); else // component == 3 this.setK(v); } } } @Override public void primComponentSetDoubleSafe(IntegerIndex index, int component, double v) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { if (v != 0) throw new IllegalArgumentException( "cannot set nonzero value outside extents"); } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v); else // component == 1 this.setI(v); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v); else // component == 3 this.setK(v); } } } @Override public void primComponentSetBigIntegerSafe(IntegerIndex index, int component, BigInteger v) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { if (v.signum() != 0) throw new IllegalArgumentException( "cannot set nonzero value outside extents"); } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v.doubleValue()); else // component == 1 this.setI(v.doubleValue()); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v.doubleValue()); else // component == 3 this.setK(v.doubleValue()); } } } @Override public void primComponentSetBigDecimalSafe(IntegerIndex index, int component, BigDecimal v) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { if (v.signum() != 0) throw new IllegalArgumentException( "cannot set nonzero value outside extents"); } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) this.setR(v.doubleValue()); else // component == 1 this.setI(v.doubleValue()); } else { // 2 <= component <= 3 if (component == 2) this.setJ(v.doubleValue()); else // component == 3 this.setK(v.doubleValue()); } } } @Override public byte primComponentGetAsByte(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); if (component < 2) { // 0 <= component <= 1 if (component == 0) return (byte) r(); else // component == 1 return (byte) i(); } else { // 2 <= component <= 3 if (component == 2) return (byte) j(); else // component == 3 return (byte) k(); } } @Override public short primComponentGetAsShort(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); if (component < 2) { // 0 <= component <= 1 if (component == 0) return (short) r(); else // component == 1 return (short) i(); } else { // 2 <= component <= 3 if (component == 2) return (short) j(); else // component == 3 return (short) k(); } } @Override public int primComponentGetAsInt(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); if (component < 2) { // 0 <= component <= 1 if (component == 0) return (int) r(); else // component == 1 return (int) i(); } else { // 2 <= component <= 3 if (component == 2) return (int) j(); else // component == 3 return (int) k(); } } @Override public long primComponentGetAsLong(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); if (component < 2) { // 0 <= component <= 1 if (component == 0) return (long) r(); else // component == 1 return (long) i(); } else { // 2 <= component <= 3 if (component == 2) return (long) j(); else // component == 3 return (long) k(); } } @Override public float primComponentGetAsFloat(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); if (component < 2) { // 0 <= component <= 1 if (component == 0) return (float) r(); else // component == 1 return (float) i(); } else { // 2 <= component <= 3 if (component == 2) return (float) j(); else // component == 3 return (float) k(); } } @Override public double primComponentGetAsDouble(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); if (component < 2) { // 0 <= component <= 1 if (component == 0) return r(); else // component == 1 return i(); } else { // 2 <= component <= 3 if (component == 2) return j(); else // component == 3 return k(); } } @Override public BigInteger primComponentGetAsBigInteger(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); if (component < 2) { // 0 <= component <= 1 if (component == 0) return BigDecimal.valueOf(r()).toBigInteger(); else // component == 1 return BigDecimal.valueOf(i()).toBigInteger(); } else { // 2 <= component <= 3 if (component == 2) return BigDecimal.valueOf(j()).toBigInteger(); else // component == 3 return BigDecimal.valueOf(k()).toBigInteger(); } } @Override public BigDecimal primComponentGetAsBigDecimal(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); if (component < 2) { // 0 <= component <= 1 if (component == 0) return BigDecimal.valueOf(r()); else // component == 1 return BigDecimal.valueOf(i()); } else { // 2 <= component <= 3 if (component == 2) return BigDecimal.valueOf(j()); else // component == 3 return BigDecimal.valueOf(k()); } } @Override public byte primComponentGetAsByteSafe(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { return 0; } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) return (byte) r(); else // component == 1 return (byte) i(); } else { // 2 <= component <= 3 if (component == 2) return (byte) j(); else // component == 3 return (byte) k(); } } } @Override public short primComponentGetAsShortSafe(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { return 0; } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) return (short) r(); else // component == 1 return (short) i(); } else { // 2 <= component <= 3 if (component == 2) return (short) j(); else // component == 3 return (short) k(); } } } @Override public int primComponentGetAsIntSafe(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { return 0; } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) return (int) r(); else // component == 1 return (int) i(); } else { // 2 <= component <= 3 if (component == 2) return (int) j(); else // component == 3 return (int) k(); } } } @Override public long primComponentGetAsLongSafe(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { return 0; } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) return (long) r(); else // component == 1 return (long) i(); } else { // 2 <= component <= 3 if (component == 2) return (long) j(); else // component == 3 return (long) k(); } } } @Override public float primComponentGetAsFloatSafe(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { return 0; } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) return (float) r(); else // component == 1 return (float) i(); } else { // 2 <= component <= 3 if (component == 2) return (float) j(); else // component == 3 return (float) k(); } } } @Override public double primComponentGetAsDoubleSafe(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { return 0; } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) return r(); else // component == 1 return i(); } else { // 2 <= component <= 3 if (component == 2) return j(); else // component == 3 return k(); } } } @Override public BigInteger primComponentGetAsBigIntegerSafe(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { return BigInteger.ZERO; } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) return BigDecimal.valueOf(r()).toBigInteger(); else // component == 1 return BigDecimal.valueOf(i()).toBigInteger(); } else { // 2 <= component <= 3 if (component == 2) return BigDecimal.valueOf(j()).toBigInteger(); else // component == 3 return BigDecimal.valueOf(k()).toBigInteger(); } } } @Override public BigDecimal primComponentGetAsBigDecimalSafe(IntegerIndex index, int component) { if (component < 0) throw new IllegalArgumentException( "negative component index error"); boolean oob = component > 3; if (!oob) { for (int i = 0; i < numDimensions(); i++) { if (index.get(i) != 0) { oob = true; break; } } } if (oob) { return BigDecimal.ZERO; } else { if (component < 2) { // 0 <= component <= 1 if (component == 0) return BigDecimal.valueOf(r()); else // component == 1 return BigDecimal.valueOf(i()); } else { // 2 <= component <= 3 if (component == 2) return BigDecimal.valueOf(j()); else // component == 3 return BigDecimal.valueOf(k()); } } } @Override public void primitiveInit() { r = i = j = k = 0; } @Override public void setR(Float64Member val) { setR(val.v()); } @Override public void setI(Float64Member val) { setI(val.v()); } @Override public void setJ(Float64Member val) { setJ(val.v()); } @Override public void setK(Float64Member val) { setK(val.v()); } @Override public void getR(Float64Member v) { v.setV(r); } @Override public void getI(Float64Member v) { v.setV(i); } @Override public void getJ(Float64Member v) { v.setV(j); } @Override public void getK(Float64Member v) { v.setV(k); } @Override public int hashCode() { int v = 1; v = Hasher.PRIME * v + Hasher.hashCode(r); v = Hasher.PRIME * v + Hasher.hashCode(i); v = Hasher.PRIME * v + Hasher.hashCode(j); v = Hasher.PRIME * v + Hasher.hashCode(k); return v; } @Override public boolean equals(Object o) { if (o instanceof QuaternionFloat64Member) { return G.QDBL.isEqual().call(this, (QuaternionFloat64Member) o); } return false; } @Override public void setFromDouble(double... vals) { setR(vals[0]); setI(vals[1]); setJ(vals[2]); setK(vals[3]); } @Override public double[] getAsDoubleArray() { return new double[] {r(), i(), j(), k()}; } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInspection; import com.intellij.codeInsight.ExceptionUtil; import com.intellij.codeInsight.daemon.impl.quickfix.DeleteCatchFix; import com.intellij.codeInsight.daemon.impl.quickfix.DeleteMultiCatchFix; import com.intellij.codeInspection.java15api.Java15APIUsageInspection; import com.intellij.java.JavaBundle; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import com.siyeh.ig.callMatcher.CallMatcher; import com.siyeh.ig.psiutils.CommentTracker; import com.siyeh.ig.psiutils.ExpressionUtils; import one.util.streamex.StreamEx; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.function.Function; import static com.intellij.psi.CommonClassNames.*; import static java.util.Map.entry; public class CharsetObjectCanBeUsedInspection extends AbstractBaseJavaLocalInspectionTool implements CleanupLocalInspectionTool { private static final CharsetCallMatcher[] MATCHERS = { new CharsetConstructorMatcher("java.io.InputStreamReader", "java.io.InputStream", ""), new CharsetConstructorMatcher("java.io.OutputStreamWriter", "java.io.OutputStream", ""), new CharsetConstructorMatcher(JAVA_LANG_STRING, "byte[]", "int", "int", ""), new CharsetConstructorMatcher(JAVA_LANG_STRING, "byte[]", ""), new CharsetMethodMatcher(JAVA_LANG_STRING, "getBytes", ""), // Java 10 new CharsetConstructorMatcher("java.util.Scanner", "java.io.InputStream", ""), new CharsetConstructorMatcher("java.util.Scanner", JAVA_IO_FILE, ""), new CharsetConstructorMatcher("java.util.Scanner", "java.nio.file.Path", ""), new CharsetConstructorMatcher("java.util.Scanner", "java.nio.channels.ReadableByteChannel", ""), new CharsetConstructorMatcher("java.io.PrintStream", "java.io.OutputStream", "boolean", ""), new CharsetConstructorMatcher("java.io.PrintStream", JAVA_LANG_STRING, ""), new CharsetConstructorMatcher("java.io.PrintStream", JAVA_IO_FILE, ""), new CharsetConstructorMatcher("java.io.PrintWriter", "java.io.OutputStream", "boolean", ""), new CharsetConstructorMatcher("java.io.PrintWriter", JAVA_LANG_STRING, ""), new CharsetConstructorMatcher("java.io.PrintWriter", JAVA_IO_FILE, ""), new CharsetMethodMatcher("java.io.ByteArrayOutputStream", "toString", ""), new CharsetMethodMatcher("java.net.URLDecoder", "decode", JAVA_LANG_STRING, ""), new CharsetMethodMatcher("java.net.URLEncoder", "encode", JAVA_LANG_STRING, ""), new CharsetMethodMatcher("java.nio.channels.Channels", "newReader", "java.nio.channels.ReadableByteChannel", ""), new CharsetMethodMatcher("java.nio.channels.Channels", "newWriter", "java.nio.channels.WritableByteChannel", ""), new CharsetMethodMatcher(JAVA_UTIL_PROPERTIES, "storeToXML", "java.io.OutputStream", JAVA_LANG_STRING, ""), // Apache IO new CharsetMethodMatcher("org.apache.commons.io.IOUtils", "toString", "java.io.InputStream", ""), new CharsetMethodMatcher("org.apache.commons.io.IOUtils", "toString", "java.net.URI", ""), new CharsetMethodMatcher("org.apache.commons.io.IOUtils", "toString", "java.net.URL", ""), }; private static final CallMatcher FOR_NAME_MATCHER = CallMatcher.staticCall("java.nio.charset.Charset", "forName").parameterTypes(JAVA_LANG_STRING); private static final Map<String, String> SUPPORTED_CHARSETS = Map.ofEntries( entry("US-ASCII", "US_ASCII"), entry("ASCII", "US_ASCII"), entry("ISO646-US", "US_ASCII"), entry("ISO-8859-1", "ISO_8859_1"), entry("UTF-8", "UTF_8"), entry("UTF8", "UTF_8"), entry("UTF-16BE", "UTF_16BE"), entry("UTF16BE", "UTF_16BE"), entry("UTF-16LE", "UTF_16LE"), entry("UTF16LE", "UTF_16LE"), entry("UTF-16", "UTF_16"), entry("UTF16", "UTF_16")); @NotNull @Override public PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly) { LanguageLevel languageLevel = PsiUtil.getLanguageLevel(holder.getFile()); if (!languageLevel.isAtLeast(LanguageLevel.JDK_1_7)) return PsiElementVisitor.EMPTY_VISITOR; return new JavaElementVisitor() { @Override public void visitCallExpression(PsiCallExpression call) { CharsetMatch match = StreamEx.of(MATCHERS) .map(matcher -> matcher.extractCharsetMatch(languageLevel, call)) .nonNull() .findFirst().orElse(null); if (match == null) return; addCharsetReplacement(match.myStringCharset, match.myStringCharset); } @Override public void visitMethodCallExpression(PsiMethodCallExpression call) { super.visitMethodCallExpression(call); if (!FOR_NAME_MATCHER.matches(call)) return; PsiExpressionList arguments = call.getArgumentList(); PsiExpression charset = arguments.getExpressions()[0]; addCharsetReplacement(call, charset); } private void addCharsetReplacement(@NotNull PsiElement place, @NotNull PsiExpression charset) { String charsetString = getCharsetString(charset); if (charsetString == null) return; String constantName = "StandardCharsets." + SUPPORTED_CHARSETS.get(charsetString); holder.registerProblem(place, JavaBundle.message("inspection.charset.object.can.be.used.message", constantName), new CharsetObjectCanBeUsedFix(constantName)); } @Nullable private String getCharsetString(PsiExpression charsetExpression) { charsetExpression = PsiUtil.skipParenthesizedExprDown(charsetExpression); String charsetString = ObjectUtils.tryCast(ExpressionUtils.computeConstantExpression(charsetExpression), String.class); if (charsetString == null) return null; charsetString = StringUtil.toUpperCase(charsetString); if (!SUPPORTED_CHARSETS.containsKey(charsetString)) return null; if (charsetExpression instanceof PsiLiteralExpression) return charsetString; if (charsetExpression instanceof PsiReferenceExpression) { String name = ((PsiReferenceExpression)charsetExpression).getReferenceName(); if (name == null) return null; String baseName = StringUtil.toLowerCase(name.replaceAll("[^A-Z0-9]", "")); String baseCharset = StringUtil.toLowerCase(charsetString.replaceAll("[^A-Z0-9]", "")); // Do not report constants which name is not based on charset name (like "ENCODING", "DEFAULT_ENCODING", etc.) // because replacement might not be well-suitable if (!baseName.contains(baseCharset)) return null; return charsetString; } return null; } }; } abstract static class CharsetCallMatcher { @NotNull final String myClassName; final String @NotNull [] myParameters; final int myCharsetParameterIndex; CharsetCallMatcher(@NotNull String className, String @NotNull ... parameters) { myClassName = className; myParameters = parameters; int index = -1; for (int i = 0; i < parameters.length; i++) { if (parameters[i].isEmpty()) { if (index == -1) { index = i; } else { throw new IllegalArgumentException("Empty parameter type must be specified exactly once"); } } } if (index == -1) { throw new IllegalArgumentException("No empty parameter type is specified"); } myCharsetParameterIndex = index; } @Contract("null,_ -> false") final boolean checkMethod(PsiMethod method, @NotNull String charsetType) { if (method == null) return false; PsiClass containingClass = method.getContainingClass(); if (containingClass == null || !myClassName.equals(containingClass.getQualifiedName())) return false; PsiParameterList list = method.getParameterList(); if (list.getParametersCount() != myParameters.length) return false; PsiParameter[] parameters = list.getParameters(); for (int i = 0; i < myParameters.length; i++) { PsiType parameterType = parameters[i].getType(); if (!parameterType.equalsToText(myParameters[i].isEmpty() ? charsetType : myParameters[i])) return false; } return true; } @Nullable final CharsetMatch createMatch(LanguageLevel languageLevel, PsiMethod method, PsiExpressionList arguments) { PsiExpression argument = arguments.getExpressions()[myCharsetParameterIndex]; PsiClass aClass = method.getContainingClass(); if (aClass == null) return null; PsiMethod[] candidates = method.isConstructor() ? aClass.getConstructors() : aClass.findMethodsByName(method.getName(), false); PsiMethod charsetMethod = Arrays.stream(candidates) .filter(psiMethod -> checkMethod(psiMethod, "java.nio.charset.Charset")) .filter(psiMethod -> Java15APIUsageInspection.getLastIncompatibleLanguageLevel(psiMethod, languageLevel) == null) .findFirst().orElse(null); if (charsetMethod == null) return null; return new CharsetMatch(argument, method, charsetMethod); } @Nullable abstract CharsetMatch extractCharsetMatch(LanguageLevel languageLevel, PsiCallExpression call); } static class CharsetConstructorMatcher extends CharsetCallMatcher { CharsetConstructorMatcher(@NotNull String className, String @NotNull ... parameters) { super(className, parameters); } @Override CharsetMatch extractCharsetMatch(LanguageLevel languageLevel, PsiCallExpression call) { if (!(call instanceof PsiNewExpression)) return null; PsiNewExpression newExpression = (PsiNewExpression)call; PsiExpressionList argumentList = newExpression.getArgumentList(); if (argumentList == null || argumentList.getExpressionCount() != myParameters.length) return null; PsiMethod method = call.resolveMethod(); if (!checkMethod(method, JAVA_LANG_STRING) || !method.isConstructor()) return null; return createMatch(languageLevel, method, argumentList); } } static class CharsetMethodMatcher extends CharsetCallMatcher { @NotNull private final String myMethodName; CharsetMethodMatcher(@NotNull String className, @NotNull String methodName, String @NotNull ... parameters) { super(className, parameters); myMethodName = methodName; } @Override CharsetMatch extractCharsetMatch(LanguageLevel languageLevel, PsiCallExpression call) { if (!(call instanceof PsiMethodCallExpression)) return null; PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)call; if (!myMethodName.equals(methodCallExpression.getMethodExpression().getReferenceName())) return null; PsiExpressionList argumentList = methodCallExpression.getArgumentList(); if (argumentList.getExpressionCount() != myParameters.length) return null; PsiMethod method = call.resolveMethod(); if (!checkMethod(method, JAVA_LANG_STRING)) return null; return createMatch(languageLevel, method, argumentList); } } static class CharsetMatch { @NotNull final PsiExpression myStringCharset; @NotNull final PsiMethod myStringMethod; @NotNull final PsiMethod myCharsetMethod; CharsetMatch(@NotNull PsiExpression charset, @NotNull PsiMethod stringMethod, @NotNull PsiMethod charsetMethod) { myStringCharset = charset; myStringMethod = stringMethod; myCharsetMethod = charsetMethod; } } static class CharsetObjectCanBeUsedFix implements LocalQuickFix { private final String myConstantName; CharsetObjectCanBeUsedFix(String constantName) { myConstantName = constantName; } @Nls @NotNull @Override public String getName() { return JavaBundle.message("inspection.charset.object.can.be.used.fix.name", myConstantName); } @Nls @NotNull @Override public String getFamilyName() { return JavaBundle.message("inspection.charset.object.can.be.used.fix.family.name"); } @Override public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) { PsiExpression expression = ObjectUtils.tryCast(descriptor.getStartElement(), PsiExpression.class); if (expression == null) return; PsiElement anchor = FOR_NAME_MATCHER.matches(expression) ? null : PsiTreeUtil.getParentOfType(expression, PsiCallExpression.class); CommentTracker ct = new CommentTracker(); String replacement = "java.nio.charset." + myConstantName; PsiReferenceExpression ref = (PsiReferenceExpression)ct.replaceAndRestoreComments(expression, replacement); JavaCodeStyleManager.getInstance(project).shortenClassReferences(ref); if (anchor == null) return; while (true) { PsiTryStatement tryStatement = PsiTreeUtil.getParentOfType(anchor, PsiTryStatement.class, true, PsiMember.class, PsiLambdaExpression.class); if (tryStatement == null) break; PsiCodeBlock tryBlock = tryStatement.getTryBlock(); if (PsiTreeUtil.isAncestor(tryBlock, anchor, true)) { for (PsiParameter parameter : tryStatement.getCatchBlockParameters()) { List<PsiTypeElement> typeElements = PsiUtil.getParameterTypeElements(parameter); for (PsiTypeElement element : typeElements) { PsiType type = element.getType(); if (type.equalsToText("java.io.UnsupportedEncodingException") || type.equalsToText("java.io.IOException")) { Collection<PsiClassType> unhandledExceptions = ExceptionUtil.collectUnhandledExceptions(tryBlock, tryBlock); PsiResourceList resourceList = tryStatement.getResourceList(); if (resourceList != null) { Collection<PsiClassType> resourceExceptions = ExceptionUtil.collectUnhandledExceptions(resourceList, resourceList); unhandledExceptions = StreamEx.of(unhandledExceptions, resourceExceptions).toFlatList(Function.identity()); } if (!ContainerUtil.exists(unhandledExceptions, ue -> ue.isAssignableFrom(type) || type.isAssignableFrom(ue))) { if (parameter.getType() instanceof PsiDisjunctionType) { DeleteMultiCatchFix.deleteCaughtExceptionType(element); } else { DeleteCatchFix.deleteCatch(parameter); } } return; } if (type.equalsToText(JAVA_LANG_EXCEPTION) || type.equalsToText(JAVA_LANG_THROWABLE)) { return; } } } } anchor = tryStatement; } } } }
package org.commcare.resources.model.installers; import org.commcare.resources.ResourceInstallContext; import org.commcare.resources.model.Resource; import org.commcare.resources.model.ResourceLocation; import org.commcare.resources.model.ResourceTable; import org.commcare.resources.model.UnreliableSourceException; import org.commcare.resources.model.UnresolvedResourceException; import org.commcare.suite.model.Profile; import org.commcare.util.CommCarePlatform; import org.commcare.util.LogTypes; import org.commcare.xml.ProfileParser; import org.javarosa.core.reference.InvalidReferenceException; import org.javarosa.core.reference.Reference; import org.javarosa.core.services.Logger; import org.javarosa.core.util.externalizable.DeserializationException; import org.javarosa.core.util.externalizable.ExtUtil; import org.javarosa.core.util.externalizable.PrototypeFactory; import org.javarosa.xml.util.InvalidStructureException; import org.javarosa.xml.util.UnfullfilledRequirementsException; import org.xmlpull.v1.XmlPullParserException; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.Hashtable; /** * @author ctsims */ public class ProfileInstaller extends CacheInstaller<Profile> { private static Hashtable<String, Profile> localTable; private boolean forceVersion; @SuppressWarnings("unused") public ProfileInstaller() { forceVersion = false; } public ProfileInstaller(boolean forceVersion) { this.forceVersion = forceVersion; } private Hashtable<String, Profile> getlocal() { if (localTable == null) { localTable = new Hashtable<>(); } return localTable; } @Override public boolean initialize(CommCarePlatform platform, boolean isUpgrade) throws IOException, InvalidReferenceException, InvalidStructureException, XmlPullParserException, UnfullfilledRequirementsException { //Certain properties may not have been able to set during install, so we'll make sure they're //set here. Profile p = storage(platform).read(cacheLocation); p.initializeProperties(platform, false); platform.setProfile(p); return true; } @Override public boolean requiresRuntimeInitialization() { return true; } @Override protected String getCacheKey() { return Profile.STORAGE_KEY; } @Override public boolean install(Resource r, ResourceLocation location, Reference ref, ResourceTable table, CommCarePlatform platform, boolean upgrade, ResourceInstallContext resourceInstallContext) throws UnresolvedResourceException, UnfullfilledRequirementsException { //Install for the profile installer is a two step process. Step one is to parse the file and read the relevant data. //Step two is to actually install the resource if it needs to be (whether or not it should will be handled //by the resource table). InputStream incoming = null; //If we've already got the local copy, and the installer is marked as such, install and roll out. try { if (getlocal().containsKey(r.getRecordGuid()) && r.getStatus() == Resource.RESOURCE_STATUS_LOCAL) { Profile local = getlocal().get(r.getRecordGuid()); installInternal(local, platform); table.commitCompoundResource(r, Resource.RESOURCE_STATUS_UPGRADE); localTable.remove(r.getRecordGuid()); for (Resource child : table.getResourcesForParent(r.getRecordGuid())) { table.commitCompoundResource(child, Resource.RESOURCE_STATUS_UNINITIALIZED); } return true; } //Otherwise we need to get the profile from its location, parse it out, and //set the relevant parameters. if (location.getAuthority() == Resource.RESOURCE_AUTHORITY_CACHE) { //If it's in the cache, we should just get it from there return false; } else { Profile p; try { incoming = ref.getStream(); ProfileParser parser = new ProfileParser(incoming, platform, table, r.getRecordGuid(), Resource.RESOURCE_STATUS_UNINITIALIZED, forceVersion); if (Resource.RESOURCE_AUTHORITY_REMOTE == location.getAuthority()) { parser.setMaximumAuthority(Resource.RESOURCE_AUTHORITY_REMOTE); } p = parser.parse(); } catch (IOException e) { if (e.getMessage() != null) { Logger.log(LogTypes.TYPE_RESOURCES, "IO Exception fetching profile: " + e.getMessage()); } UnreliableSourceException exception = new UnreliableSourceException(r, e.getMessage()); exception.initCause(e); throw exception; } //If we're upgrading we need to come back and see if the statuses need to change if (upgrade) { getlocal().put(r.getRecordGuid(), p); table.commitCompoundResource(r, Resource.RESOURCE_STATUS_LOCAL, p.getVersion()); } else { p.initializeProperties(platform, true); installInternal(p, platform); //TODO: What if this fails? Maybe we should be throwing exceptions... table.commitCompoundResource(r, Resource.RESOURCE_STATUS_INSTALLED, p.getVersion()); } return true; } } catch (InvalidStructureException e) { if (e.getMessage() != null) { Logger.log(LogTypes.TYPE_RESOURCES, "Invalid profile structure: " + e.getMessage()); } e.printStackTrace(); return false; } catch (XmlPullParserException e) { if (e.getMessage() != null) { Logger.log(LogTypes.TYPE_RESOURCES, "XML Parse exception fetching profile: " + e.getMessage()); } return false; } finally { try { if (incoming != null) { incoming.close(); } } catch (IOException e) { } } } private void installInternal(Profile profile, CommCarePlatform platform) { storage(platform).write(profile); cacheLocation = profile.getID(); } @Override public boolean upgrade(Resource r, CommCarePlatform platform) throws UnresolvedResourceException { //TODO: Hm... how to do this property setting for reverting? Profile p; if (getlocal().containsKey(r.getRecordGuid())) { p = getlocal().get(r.getRecordGuid()); } else { p = storage(platform).read(cacheLocation); } p.initializeProperties(platform, true); storage(platform).write(p); return true; } @Override public boolean unstage(Resource r, int newStatus, CommCarePlatform platform) { //Nothing to do. Cache location is clear. return true; } @Override public boolean revert(Resource r, ResourceTable table, CommCarePlatform platform) { //Possibly re-set this profile's default property setters. return true; } @Override public void cleanup() { super.cleanup(); if (localTable != null) { localTable.clear(); localTable = null; } } @Override public void readExternal(DataInputStream in, PrototypeFactory pf) throws IOException, DeserializationException { super.readExternal(in, pf); forceVersion = ExtUtil.readBool(in); } @Override public void writeExternal(DataOutputStream out) throws IOException { super.writeExternal(out); ExtUtil.writeBool(out, forceVersion); } }
/* * Copyright &copy 2014-2016 NetApp, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * DO NOT EDIT THIS CODE BY HAND! It has been generated with jsvcgen. */ package com.solidfire.element.api; import com.solidfire.gson.Gson; import com.solidfire.core.client.Attributes; import com.solidfire.gson.annotations.SerializedName; import com.solidfire.core.annotation.Since; import com.solidfire.core.javautil.Optional; import java.io.Serializable; import java.util.Arrays; import java.util.HashMap; import java.util.Objects; /** * ListEventsRequest * ListEvents returns events detected on the cluster, sorted from oldest to newest. **/ public class ListEventsRequest implements Serializable { public static final long serialVersionUID = 829864358201180124L; @SerializedName("maxEvents") private Optional<Long> maxEvents; @SerializedName("startEventID") private Optional<Long> startEventID; @SerializedName("endEventID") private Optional<Long> endEventID; @SerializedName("eventType") private Optional<String> eventType; @SerializedName("serviceID") private Optional<Long> serviceID; @SerializedName("nodeID") private Optional<Long> nodeID; @SerializedName("driveID") private Optional<Long> driveID; @SerializedName("startReportTime") private Optional<String> startReportTime; @SerializedName("endReportTime") private Optional<String> endReportTime; @SerializedName("startPublishTime") private Optional<String> startPublishTime; @SerializedName("endPublishTime") private Optional<String> endPublishTime; // empty constructor @Since("7.0") public ListEventsRequest() {} // parameterized constructor @Since("7.0") public ListEventsRequest( Optional<Long> maxEvents, Optional<Long> startEventID, Optional<Long> endEventID, Optional<Long> serviceID, Optional<Long> nodeID, Optional<Long> driveID, Optional<String> startReportTime, Optional<String> endReportTime, Optional<String> startPublishTime, Optional<String> endPublishTime ) { this.maxEvents = (maxEvents == null) ? Optional.<Long>empty() : maxEvents; this.startEventID = (startEventID == null) ? Optional.<Long>empty() : startEventID; this.endEventID = (endEventID == null) ? Optional.<Long>empty() : endEventID; this.serviceID = (serviceID == null) ? Optional.<Long>empty() : serviceID; this.nodeID = (nodeID == null) ? Optional.<Long>empty() : nodeID; this.driveID = (driveID == null) ? Optional.<Long>empty() : driveID; this.startReportTime = (startReportTime == null) ? Optional.<String>empty() : startReportTime; this.endReportTime = (endReportTime == null) ? Optional.<String>empty() : endReportTime; this.startPublishTime = (startPublishTime == null) ? Optional.<String>empty() : startPublishTime; this.endPublishTime = (endPublishTime == null) ? Optional.<String>empty() : endPublishTime; } // parameterized constructor @Since("10.0") public ListEventsRequest( Optional<Long> maxEvents, Optional<Long> startEventID, Optional<Long> endEventID, Optional<String> eventType, Optional<Long> serviceID, Optional<Long> nodeID, Optional<Long> driveID, Optional<String> startReportTime, Optional<String> endReportTime, Optional<String> startPublishTime, Optional<String> endPublishTime ) { this.maxEvents = (maxEvents == null) ? Optional.<Long>empty() : maxEvents; this.startEventID = (startEventID == null) ? Optional.<Long>empty() : startEventID; this.endEventID = (endEventID == null) ? Optional.<Long>empty() : endEventID; this.eventType = (eventType == null) ? Optional.<String>empty() : eventType; this.serviceID = (serviceID == null) ? Optional.<Long>empty() : serviceID; this.nodeID = (nodeID == null) ? Optional.<Long>empty() : nodeID; this.driveID = (driveID == null) ? Optional.<Long>empty() : driveID; this.startReportTime = (startReportTime == null) ? Optional.<String>empty() : startReportTime; this.endReportTime = (endReportTime == null) ? Optional.<String>empty() : endReportTime; this.startPublishTime = (startPublishTime == null) ? Optional.<String>empty() : startPublishTime; this.endPublishTime = (endPublishTime == null) ? Optional.<String>empty() : endPublishTime; } /** * Specifies the maximum number of events to return. **/ public Optional<Long> getMaxEvents() { return this.maxEvents; } public void setMaxEvents(Optional<Long> maxEvents) { this.maxEvents = (maxEvents == null) ? Optional.<Long>empty() : maxEvents; } /** * Specifies the beginning of a range of events to return. **/ public Optional<Long> getStartEventID() { return this.startEventID; } public void setStartEventID(Optional<Long> startEventID) { this.startEventID = (startEventID == null) ? Optional.<Long>empty() : startEventID; } /** * Specifies the end of a range of events to return. **/ public Optional<Long> getEndEventID() { return this.endEventID; } public void setEndEventID(Optional<Long> endEventID) { this.endEventID = (endEventID == null) ? Optional.<Long>empty() : endEventID; } /** * Specifies the type of events to return. **/ public Optional<String> getEventType() { return this.eventType; } public void setEventType(Optional<String> eventType) { this.eventType = (eventType == null) ? Optional.<String>empty() : eventType; } /** * Specifies that only events with this ServiceID will be returned. **/ public Optional<Long> getServiceID() { return this.serviceID; } public void setServiceID(Optional<Long> serviceID) { this.serviceID = (serviceID == null) ? Optional.<Long>empty() : serviceID; } /** * Specifies that only events with this NodeID will be returned. **/ public Optional<Long> getNodeID() { return this.nodeID; } public void setNodeID(Optional<Long> nodeID) { this.nodeID = (nodeID == null) ? Optional.<Long>empty() : nodeID; } /** * Specifies that only events with this DriveID will be returned. **/ public Optional<Long> getDriveID() { return this.driveID; } public void setDriveID(Optional<Long> driveID) { this.driveID = (driveID == null) ? Optional.<Long>empty() : driveID; } /** * Specifies that only events reported after this time will be returned. **/ public Optional<String> getStartReportTime() { return this.startReportTime; } public void setStartReportTime(Optional<String> startReportTime) { this.startReportTime = (startReportTime == null) ? Optional.<String>empty() : startReportTime; } /** * Specifies that only events reported earlier than this time will be returned. **/ public Optional<String> getEndReportTime() { return this.endReportTime; } public void setEndReportTime(Optional<String> endReportTime) { this.endReportTime = (endReportTime == null) ? Optional.<String>empty() : endReportTime; } /** * Specifies that only events published after this time will be returned. **/ public Optional<String> getStartPublishTime() { return this.startPublishTime; } public void setStartPublishTime(Optional<String> startPublishTime) { this.startPublishTime = (startPublishTime == null) ? Optional.<String>empty() : startPublishTime; } /** * Specifies that only events published earlier than this time will be returned. **/ public Optional<String> getEndPublishTime() { return this.endPublishTime; } public void setEndPublishTime(Optional<String> endPublishTime) { this.endPublishTime = (endPublishTime == null) ? Optional.<String>empty() : endPublishTime; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListEventsRequest that = (ListEventsRequest) o; return Objects.equals(maxEvents, that.maxEvents) && Objects.equals(startEventID, that.startEventID) && Objects.equals(endEventID, that.endEventID) && Objects.equals(eventType, that.eventType) && Objects.equals(serviceID, that.serviceID) && Objects.equals(nodeID, that.nodeID) && Objects.equals(driveID, that.driveID) && Objects.equals(startReportTime, that.startReportTime) && Objects.equals(endReportTime, that.endReportTime) && Objects.equals(startPublishTime, that.startPublishTime) && Objects.equals(endPublishTime, that.endPublishTime); } @Override public int hashCode() { return Objects.hash( maxEvents,startEventID,endEventID,eventType,serviceID,nodeID,driveID,startReportTime,endReportTime,startPublishTime,endPublishTime ); } public java.util.Map<String, Object> toMap() { java.util.Map<String, Object> map = new HashMap<>(); map.put("maxEvents", maxEvents); map.put("startEventID", startEventID); map.put("endEventID", endEventID); map.put("eventType", eventType); map.put("serviceID", serviceID); map.put("nodeID", nodeID); map.put("driveID", driveID); map.put("startReportTime", startReportTime); map.put("endReportTime", endReportTime); map.put("startPublishTime", startPublishTime); map.put("endPublishTime", endPublishTime); return map; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); Gson gson = new Gson(); sb.append( "{ " ); if(null != maxEvents && maxEvents.isPresent()){ sb.append(" maxEvents : ").append(gson.toJson(maxEvents)).append(","); } else{ sb.append(" maxEvents : ").append("null").append(","); } if(null != startEventID && startEventID.isPresent()){ sb.append(" startEventID : ").append(gson.toJson(startEventID)).append(","); } else{ sb.append(" startEventID : ").append("null").append(","); } if(null != endEventID && endEventID.isPresent()){ sb.append(" endEventID : ").append(gson.toJson(endEventID)).append(","); } else{ sb.append(" endEventID : ").append("null").append(","); } if(null != eventType && eventType.isPresent()){ sb.append(" eventType : ").append(gson.toJson(eventType)).append(","); } else{ sb.append(" eventType : ").append("null").append(","); } if(null != serviceID && serviceID.isPresent()){ sb.append(" serviceID : ").append(gson.toJson(serviceID)).append(","); } else{ sb.append(" serviceID : ").append("null").append(","); } if(null != nodeID && nodeID.isPresent()){ sb.append(" nodeID : ").append(gson.toJson(nodeID)).append(","); } else{ sb.append(" nodeID : ").append("null").append(","); } if(null != driveID && driveID.isPresent()){ sb.append(" driveID : ").append(gson.toJson(driveID)).append(","); } else{ sb.append(" driveID : ").append("null").append(","); } if(null != startReportTime && startReportTime.isPresent()){ sb.append(" startReportTime : ").append(gson.toJson(startReportTime)).append(","); } else{ sb.append(" startReportTime : ").append("null").append(","); } if(null != endReportTime && endReportTime.isPresent()){ sb.append(" endReportTime : ").append(gson.toJson(endReportTime)).append(","); } else{ sb.append(" endReportTime : ").append("null").append(","); } if(null != startPublishTime && startPublishTime.isPresent()){ sb.append(" startPublishTime : ").append(gson.toJson(startPublishTime)).append(","); } else{ sb.append(" startPublishTime : ").append("null").append(","); } if(null != endPublishTime && endPublishTime.isPresent()){ sb.append(" endPublishTime : ").append(gson.toJson(endPublishTime)).append(","); } else{ sb.append(" endPublishTime : ").append("null").append(","); } sb.append( " }" ); if(sb.lastIndexOf(", }") != -1) sb.deleteCharAt(sb.lastIndexOf(", }")); return sb.toString(); } public static Builder builder() { return new Builder(); } public final Builder asBuilder() { return new Builder().buildFrom(this); } public static class Builder { private Optional<Long> maxEvents; private Optional<Long> startEventID; private Optional<Long> endEventID; private Optional<String> eventType; private Optional<Long> serviceID; private Optional<Long> nodeID; private Optional<Long> driveID; private Optional<String> startReportTime; private Optional<String> endReportTime; private Optional<String> startPublishTime; private Optional<String> endPublishTime; private Builder() { } public ListEventsRequest build() { return new ListEventsRequest ( this.maxEvents, this.startEventID, this.endEventID, this.eventType, this.serviceID, this.nodeID, this.driveID, this.startReportTime, this.endReportTime, this.startPublishTime, this.endPublishTime); } private ListEventsRequest.Builder buildFrom(final ListEventsRequest req) { this.maxEvents = req.maxEvents; this.startEventID = req.startEventID; this.endEventID = req.endEventID; this.eventType = req.eventType; this.serviceID = req.serviceID; this.nodeID = req.nodeID; this.driveID = req.driveID; this.startReportTime = req.startReportTime; this.endReportTime = req.endReportTime; this.startPublishTime = req.startPublishTime; this.endPublishTime = req.endPublishTime; return this; } public ListEventsRequest.Builder optionalMaxEvents(final Long maxEvents) { this.maxEvents = (maxEvents == null) ? Optional.<Long>empty() : Optional.of(maxEvents); return this; } public ListEventsRequest.Builder optionalStartEventID(final Long startEventID) { this.startEventID = (startEventID == null) ? Optional.<Long>empty() : Optional.of(startEventID); return this; } public ListEventsRequest.Builder optionalEndEventID(final Long endEventID) { this.endEventID = (endEventID == null) ? Optional.<Long>empty() : Optional.of(endEventID); return this; } public ListEventsRequest.Builder optionalEventType(final String eventType) { this.eventType = (eventType == null) ? Optional.<String>empty() : Optional.of(eventType); return this; } public ListEventsRequest.Builder optionalServiceID(final Long serviceID) { this.serviceID = (serviceID == null) ? Optional.<Long>empty() : Optional.of(serviceID); return this; } public ListEventsRequest.Builder optionalNodeID(final Long nodeID) { this.nodeID = (nodeID == null) ? Optional.<Long>empty() : Optional.of(nodeID); return this; } public ListEventsRequest.Builder optionalDriveID(final Long driveID) { this.driveID = (driveID == null) ? Optional.<Long>empty() : Optional.of(driveID); return this; } public ListEventsRequest.Builder optionalStartReportTime(final String startReportTime) { this.startReportTime = (startReportTime == null) ? Optional.<String>empty() : Optional.of(startReportTime); return this; } public ListEventsRequest.Builder optionalEndReportTime(final String endReportTime) { this.endReportTime = (endReportTime == null) ? Optional.<String>empty() : Optional.of(endReportTime); return this; } public ListEventsRequest.Builder optionalStartPublishTime(final String startPublishTime) { this.startPublishTime = (startPublishTime == null) ? Optional.<String>empty() : Optional.of(startPublishTime); return this; } public ListEventsRequest.Builder optionalEndPublishTime(final String endPublishTime) { this.endPublishTime = (endPublishTime == null) ? Optional.<String>empty() : Optional.of(endPublishTime); return this; } } }
/* * Copyright (c) 2008-2012, Stephen Colebourne & Michael Nascimento Santos * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of JSR-310 nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package javax.time; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertSame; import static org.testng.Assert.assertTrue; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.time.DateTimeException; import java.time.Instant; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.calendrical.DateTimeAccessor; import java.time.format.TextStyle; import java.time.zone.ZoneOffsetTransition; import java.time.zone.ZoneRules; import java.time.zone.ZoneRulesException; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.SimpleTimeZone; import java.util.TimeZone; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; /** * Test ZoneId. */ @Test public class TestZoneId extends AbstractTest { private static final ZoneId ZONE_PARIS = ZoneId.of("Europe/Paris"); public static final String LATEST_TZDB = "2010i"; private static final int OVERLAP = 2; private static final int GAP = 0; // ----------------------------------------------------------------------- // Basics // ----------------------------------------------------------------------- public void test_immutable() { Class<ZoneId> cls = ZoneId.class; assertTrue(Modifier.isPublic(cls.getModifiers())); Field[] fields = cls.getDeclaredFields(); for (Field field : fields) { if (Modifier.isStatic(field.getModifiers()) == false) { assertTrue(Modifier.isPrivate(field.getModifiers())); assertTrue(Modifier.isFinal(field.getModifiers()) || (Modifier.isVolatile(field.getModifiers()) && Modifier.isTransient(field.getModifiers()))); } } } public void test_serialization_UTC() throws Exception { ZoneId test = ZoneOffset.UTC; assertSerializableAndSame(test); } public void test_serialization_fixed() throws Exception { ZoneId test = ZoneId.of("UTC+01:30"); assertSerializable(test); } public void test_serialization_Europe() throws Exception { ZoneId test = ZoneId.of("Europe/London"); assertSerializable(test); } public void test_serialization_America() throws Exception { ZoneId test = ZoneId.of("America/Chicago"); assertSerializable(test); } @Test(groups = { "tck" }) public void test_serialization_format() throws ClassNotFoundException, IOException { assertEqualsSerialisedForm(ZoneId.of("Europe/London"), ZoneId.class); } // ----------------------------------------------------------------------- // UTC // ----------------------------------------------------------------------- public void test_constant_UTC() { ZoneId test = ZoneOffset.UTC; assertEquals(test.getId(), "Z"); assertEquals(test.getText(TextStyle.FULL, Locale.UK), "Z"); assertEquals(test.getRules().isFixedOffset(), true); assertEquals(test.getRules().getOffset(Instant.ofEpochSecond(0L)), ZoneOffset.UTC); checkOffset(test.getRules(), createLDT(2008, 6, 30), ZoneOffset.UTC, 1); assertSame(test, ZoneId.of("UTC+00")); } // ----------------------------------------------------------------------- // OLD_IDS_PRE_2005 // ----------------------------------------------------------------------- public void test_constant_OLD_IDS_PRE_2005() { Map<String, String> ids = ZoneId.OLD_IDS_PRE_2005; assertEquals(ids.get("EST"), "America/Indianapolis"); assertEquals(ids.get("MST"), "America/Phoenix"); assertEquals(ids.get("HST"), "Pacific/Honolulu"); assertEquals(ids.get("ACT"), "Australia/Darwin"); assertEquals(ids.get("AET"), "Australia/Sydney"); assertEquals(ids.get("AGT"), "America/Argentina/Buenos_Aires"); assertEquals(ids.get("ART"), "Africa/Cairo"); assertEquals(ids.get("AST"), "America/Anchorage"); assertEquals(ids.get("BET"), "America/Sao_Paulo"); assertEquals(ids.get("BST"), "Asia/Dhaka"); assertEquals(ids.get("CAT"), "Africa/Harare"); assertEquals(ids.get("CNT"), "America/St_Johns"); assertEquals(ids.get("CST"), "America/Chicago"); assertEquals(ids.get("CTT"), "Asia/Shanghai"); assertEquals(ids.get("EAT"), "Africa/Addis_Ababa"); assertEquals(ids.get("ECT"), "Europe/Paris"); assertEquals(ids.get("IET"), "America/Indiana/Indianapolis"); assertEquals(ids.get("IST"), "Asia/Kolkata"); assertEquals(ids.get("JST"), "Asia/Tokyo"); assertEquals(ids.get("MIT"), "Pacific/Apia"); assertEquals(ids.get("NET"), "Asia/Yerevan"); assertEquals(ids.get("NST"), "Pacific/Auckland"); assertEquals(ids.get("PLT"), "Asia/Karachi"); assertEquals(ids.get("PNT"), "America/Phoenix"); assertEquals(ids.get("PRT"), "America/Puerto_Rico"); assertEquals(ids.get("PST"), "America/Los_Angeles"); assertEquals(ids.get("SST"), "Pacific/Guadalcanal"); assertEquals(ids.get("VST"), "Asia/Ho_Chi_Minh"); } @Test(expectedExceptions = UnsupportedOperationException.class) public void test_constant_OLD_IDS_PRE_2005_immutable() { Map<String, String> ids = ZoneId.OLD_IDS_PRE_2005; ids.clear(); } // ----------------------------------------------------------------------- // OLD_IDS_POST_2005 // ----------------------------------------------------------------------- public void test_constant_OLD_IDS_POST_2005() { Map<String, String> ids = ZoneId.OLD_IDS_POST_2005; assertEquals(ids.get("EST"), "-05:00"); assertEquals(ids.get("MST"), "-07:00"); assertEquals(ids.get("HST"), "-10:00"); assertEquals(ids.get("ACT"), "Australia/Darwin"); assertEquals(ids.get("AET"), "Australia/Sydney"); assertEquals(ids.get("AGT"), "America/Argentina/Buenos_Aires"); assertEquals(ids.get("ART"), "Africa/Cairo"); assertEquals(ids.get("AST"), "America/Anchorage"); assertEquals(ids.get("BET"), "America/Sao_Paulo"); assertEquals(ids.get("BST"), "Asia/Dhaka"); assertEquals(ids.get("CAT"), "Africa/Harare"); assertEquals(ids.get("CNT"), "America/St_Johns"); assertEquals(ids.get("CST"), "America/Chicago"); assertEquals(ids.get("CTT"), "Asia/Shanghai"); assertEquals(ids.get("EAT"), "Africa/Addis_Ababa"); assertEquals(ids.get("ECT"), "Europe/Paris"); assertEquals(ids.get("IET"), "America/Indiana/Indianapolis"); assertEquals(ids.get("IST"), "Asia/Kolkata"); assertEquals(ids.get("JST"), "Asia/Tokyo"); assertEquals(ids.get("MIT"), "Pacific/Apia"); assertEquals(ids.get("NET"), "Asia/Yerevan"); assertEquals(ids.get("NST"), "Pacific/Auckland"); assertEquals(ids.get("PLT"), "Asia/Karachi"); assertEquals(ids.get("PNT"), "America/Phoenix"); assertEquals(ids.get("PRT"), "America/Puerto_Rico"); assertEquals(ids.get("PST"), "America/Los_Angeles"); assertEquals(ids.get("SST"), "Pacific/Guadalcanal"); assertEquals(ids.get("VST"), "Asia/Ho_Chi_Minh"); } @Test(expectedExceptions = UnsupportedOperationException.class) public void test_constant_OLD_IDS_POST_2005_immutable() { Map<String, String> ids = ZoneId.OLD_IDS_POST_2005; ids.clear(); } // ----------------------------------------------------------------------- // system default // ----------------------------------------------------------------------- public void test_systemDefault() { ZoneId test = ZoneId.systemDefault(); assertEquals(test.getId(), TimeZone.getDefault().getID()); } @Test(expectedExceptions = DateTimeException.class) public void test_systemDefault_unableToConvert_badFormat() { TimeZone current = TimeZone.getDefault(); try { TimeZone.setDefault(new SimpleTimeZone(127, "Something Weird")); ZoneId.systemDefault(); } finally { TimeZone.setDefault(current); } } @Test(expectedExceptions = ZoneRulesException.class) public void test_systemDefault_unableToConvert_unknownId() { TimeZone current = TimeZone.getDefault(); try { TimeZone.setDefault(new SimpleTimeZone(127, "SomethingWeird")); ZoneId.systemDefault(); } finally { TimeZone.setDefault(current); } } // ----------------------------------------------------------------------- // mapped factory // ----------------------------------------------------------------------- public void test_of_string_Map() { Map<String, String> map = new HashMap<String, String>(); map.put("LONDON", "Europe/London"); map.put("PARIS", "Europe/Paris"); ZoneId test = ZoneId.of("LONDON", map); assertEquals(test.getId(), "Europe/London"); } public void test_of_string_Map_lookThrough() { Map<String, String> map = new HashMap<String, String>(); map.put("LONDON", "Europe/London"); map.put("PARIS", "Europe/Paris"); ZoneId test = ZoneId.of("Europe/Madrid", map); assertEquals(test.getId(), "Europe/Madrid"); } public void test_of_string_Map_emptyMap() { Map<String, String> map = new HashMap<String, String>(); ZoneId test = ZoneId.of("Europe/Madrid", map); assertEquals(test.getId(), "Europe/Madrid"); } @Test(expectedExceptions = DateTimeException.class) public void test_of_string_Map_badFormat() { Map<String, String> map = new HashMap<String, String>(); ZoneId.of("Not kknown", map); } @Test(expectedExceptions = ZoneRulesException.class) public void test_of_string_Map_unknown() { Map<String, String> map = new HashMap<String, String>(); ZoneId.of("Unknown", map); } // ----------------------------------------------------------------------- // regular factory // ----------------------------------------------------------------------- @DataProvider(name = "String_UTC") Object[][] data_of_string_UTC() { return new Object[][] { { "" }, { "Z" }, { "+00" }, { "+0000" }, { "+00:00" }, { "+000000" }, { "+00:00:00" }, { "-00" }, { "-0000" }, { "-00:00" }, { "-000000" }, { "-00:00:00" }, }; } @Test(dataProvider = "String_UTC") public void test_of_string_UTC(String id) { ZoneId test = ZoneId.of("UTC" + id); assertSame(test, ZoneOffset.UTC); } @Test(dataProvider = "String_UTC") public void test_of_string_GMT(String id) { ZoneId test = ZoneId.of("GMT" + id); assertSame(test, ZoneOffset.UTC); } // ----------------------------------------------------------------------- @DataProvider(name = "String_Fixed") Object[][] data_of_string_Fixed() { return new Object[][] { { "Z", "Z" }, { "+0", "Z" }, { "+5", "+05:00" }, { "+01", "+01:00" }, { "+0100", "+01:00" }, { "+01:00", "+01:00" }, { "+010000", "+01:00" }, { "+01:00:00", "+01:00" }, { "+12", "+12:00" }, { "+1234", "+12:34" }, { "+12:34", "+12:34" }, { "+123456", "+12:34:56" }, { "+12:34:56", "+12:34:56" }, { "-02", "-02:00" }, { "-5", "-05:00" }, { "-0200", "-02:00" }, { "-02:00", "-02:00" }, { "-020000", "-02:00" }, { "-02:00:00", "-02:00" }, }; } @Test(dataProvider = "String_Fixed") public void test_of_string_offset(String input, String id) { ZoneId test = ZoneId.of(input); assertEquals(test.getId(), id); assertEquals(test.getText(TextStyle.FULL, Locale.UK), id); assertEquals(test.getRules().isFixedOffset(), true); ZoneOffset offset = ZoneOffset.of(id); assertEquals(test.getRules().getOffset(Instant.ofEpochSecond(0L)), offset); checkOffset(test.getRules(), createLDT(2008, 6, 30), offset, 1); } @Test(dataProvider = "String_Fixed") public void test_of_string_FixedUTC(String input, String id) { ZoneId test = ZoneId.of("UTC" + input); assertEquals(test.getId(), id); assertEquals(test.getText(TextStyle.FULL, Locale.UK), id); assertEquals(test.getRules().isFixedOffset(), true); ZoneOffset offset = ZoneOffset.of(id); assertEquals(test.getRules().getOffset(Instant.ofEpochSecond(0L)), offset); checkOffset(test.getRules(), createLDT(2008, 6, 30), offset, 1); } @Test(dataProvider = "String_Fixed") public void test_of_string_FixedGMT(String input, String id) { ZoneId test = ZoneId.of("GMT" + input); assertEquals(test.getId(), id); assertEquals(test.getText(TextStyle.FULL, Locale.UK), id); assertEquals(test.getRules().isFixedOffset(), true); ZoneOffset offset = ZoneOffset.of(id); assertEquals(test.getRules().getOffset(Instant.ofEpochSecond(0L)), offset); checkOffset(test.getRules(), createLDT(2008, 6, 30), offset, 1); } // ----------------------------------------------------------------------- @DataProvider(name = "String_UTC_Invalid") Object[][] data_of_string_UTC_invalid() { return new Object[][] { { "A" }, { "B" }, { "C" }, { "D" }, { "E" }, { "F" }, { "G" }, { "H" }, { "I" }, { "J" }, { "K" }, { "L" }, { "M" }, { "N" }, { "O" }, { "P" }, { "Q" }, { "R" }, { "S" }, { "T" }, { "U" }, { "V" }, { "W" }, { "X" }, { "Y" }, { "+0:00" }, { "+00:0" }, { "+0:0" }, { "+000" }, { "+00000" }, { "+0:00:00" }, { "+00:0:00" }, { "+00:00:0" }, { "+0:0:0" }, { "+0:0:00" }, { "+00:0:0" }, { "+0:00:0" }, { "+01_00" }, { "+01;00" }, { "+01@00" }, { "+01:AA" }, { "+19" }, { "+19:00" }, { "+18:01" }, { "+18:00:01" }, { "+1801" }, { "+180001" }, { "-0:00" }, { "-00:0" }, { "-0:0" }, { "-000" }, { "-00000" }, { "-0:00:00" }, { "-00:0:00" }, { "-00:00:0" }, { "-0:0:0" }, { "-0:0:00" }, { "-00:0:0" }, { "-0:00:0" }, { "-19" }, { "-19:00" }, { "-18:01" }, { "-18:00:01" }, { "-1801" }, { "-180001" }, { "-01_00" }, { "-01;00" }, { "-01@00" }, { "-01:AA" }, { "@01:00" }, }; } @Test(dataProvider = "String_UTC_Invalid", expectedExceptions = DateTimeException.class) public void test_of_string_UTC_invalid(String id) { ZoneId.of("UTC" + id); } @Test(dataProvider = "String_UTC_Invalid", expectedExceptions = DateTimeException.class) public void test_of_string_GMT_invalid(String id) { ZoneId.of("GMT" + id); } // ----------------------------------------------------------------------- @DataProvider(name = "String_Invalid") Object[][] data_of_string_invalid() { // \u00ef is a random unicode character return new Object[][] { { "" }, { ":" }, { "#" }, { "\u00ef" }, { "`" }, { "!" }, { "\"" }, { "\u00ef" }, { "$" }, { "^" }, { "&" }, { "*" }, { "(" }, { ")" }, { "=" }, { "\\" }, { "|" }, { "," }, { "<" }, { ">" }, { "?" }, { ";" }, { "'" }, { "[" }, { "]" }, { "{" }, { "}" }, { "\u00ef:A" }, { "`:A" }, { "!:A" }, { "\":A" }, { "\u00ef:A" }, { "$:A" }, { "^:A" }, { "&:A" }, { "*:A" }, { "(:A" }, { "):A" }, { "=:A" }, { "+:A" }, { "\\:A" }, { "|:A" }, { ",:A" }, { "<:A" }, { ">:A" }, { "?:A" }, { ";:A" }, { "::A" }, { "':A" }, { "@:A" }, { "~:A" }, { "[:A" }, { "]:A" }, { "{:A" }, { "}:A" }, { "A:B#\u00ef" }, { "A:B#`" }, { "A:B#!" }, { "A:B#\"" }, { "A:B#\u00ef" }, { "A:B#$" }, { "A:B#^" }, { "A:B#&" }, { "A:B#*" }, { "A:B#(" }, { "A:B#)" }, { "A:B#=" }, { "A:B#+" }, { "A:B#\\" }, { "A:B#|" }, { "A:B#," }, { "A:B#<" }, { "A:B#>" }, { "A:B#?" }, { "A:B#;" }, { "A:B#:" }, { "A:B#'" }, { "A:B#@" }, { "A:B#~" }, { "A:B#[" }, { "A:B#]" }, { "A:B#{" }, { "A:B#}" }, }; } @Test(dataProvider = "String_Invalid", expectedExceptions = DateTimeException.class) public void test_of_string_invalid(String id) { ZoneId.of(id); } // ----------------------------------------------------------------------- public void test_of_string_GMT0() { ZoneId test = ZoneId.of("GMT0"); assertEquals(test.getId(), "Z"); assertEquals(test.getRules().isFixedOffset(), true); } // ----------------------------------------------------------------------- public void test_of_string_London() { ZoneId test = ZoneId.of("Europe/London"); assertEquals(test.getId(), "Europe/London"); assertEquals(test.getRules().isFixedOffset(), false); } // ----------------------------------------------------------------------- @Test(expectedExceptions = NullPointerException.class) public void test_of_string_null() { ZoneId.of((String) null); } @Test(expectedExceptions = ZoneRulesException.class) public void test_of_string_unknown_simple() { ZoneId.of("Unknown"); } // ------------------------------------------------------------------------- // TODO: test by deserialization // public void test_ofUnchecked_string_invalidNotChecked() { // ZoneRegion test = ZoneRegion.ofLenient("Unknown"); // assertEquals(test.getId(), "Unknown"); // } // // public void test_ofUnchecked_string_invalidNotChecked_unusualCharacters() { // ZoneRegion test = ZoneRegion.ofLenient("QWERTYUIOPASDFGHJKLZXCVBNM~/._+-"); // assertEquals(test.getId(), "QWERTYUIOPASDFGHJKLZXCVBNM~/._+-"); // } // ----------------------------------------------------------------------- // from() // ----------------------------------------------------------------------- public void test_factory_CalendricalObject() { assertEquals(ZoneId.from(createZDT(2007, 7, 15, 17, 30, 0, 0, ZONE_PARIS)), ZONE_PARIS); } @Test(expectedExceptions = DateTimeException.class) public void test_factory_CalendricalObject_invalid_noDerive() { ZoneId.from(LocalTime.of(12, 30)); } @Test(expectedExceptions = NullPointerException.class) public void test_factory_CalendricalObject_null() { ZoneId.from((DateTimeAccessor) null); } // ----------------------------------------------------------------------- // Europe/London // ----------------------------------------------------------------------- public void test_London() { ZoneId test = ZoneId.of("Europe/London"); assertEquals(test.getId(), "Europe/London"); assertEquals(test.getRules().isFixedOffset(), false); } public void test_London_getOffset() { ZoneId test = ZoneId.of("Europe/London"); assertEquals(test.getRules().getOffset(createInstant(2008, 1, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 2, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 4, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 5, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 6, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 7, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 8, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 9, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 12, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); } public void test_London_getOffset_toDST() { ZoneId test = ZoneId.of("Europe/London"); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 24, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 25, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 26, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 27, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 28, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 29, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 30, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 31, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); // cutover at 01:00Z assertEquals(test.getRules().getOffset(createInstant(2008, 3, 30, 0, 59, 59, 999999999, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 30, 1, 0, 0, 0, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); } public void test_London_getOffset_fromDST() { ZoneId test = ZoneId.of("Europe/London"); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 24, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 25, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 26, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 27, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 28, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 29, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 30, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 31, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); // cutover at 01:00Z assertEquals(test.getRules().getOffset(createInstant(2008, 10, 26, 0, 59, 59, 999999999, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 26, 1, 0, 0, 0, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); } public void test_London_getOffsetInfo() { ZoneId test = ZoneId.of("Europe/London"); checkOffset(test.getRules(), createLDT(2008, 1, 1), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 2, 1), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 1), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 4, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 5, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 6, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 7, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 8, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 9, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 11, 1), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 12, 1), ZoneOffset.ofHours(0), 1); } public void test_London_getOffsetInfo_toDST() { ZoneId test = ZoneId.of("Europe/London"); checkOffset(test.getRules(), createLDT(2008, 3, 24), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 25), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 26), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 27), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 28), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 29), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 30), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 31), ZoneOffset.ofHours(1), 1); // cutover at 01:00Z checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 30, 0, 59, 59, 999999999), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 30, 1, 30, 0, 0), ZoneOffset.ofHours(0), GAP); checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 30, 2, 0, 0, 0), ZoneOffset.ofHours(1), 1); } public void test_London_getOffsetInfo_fromDST() { ZoneId test = ZoneId.of("Europe/London"); checkOffset(test.getRules(), createLDT(2008, 10, 24), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 25), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 26), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 27), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 10, 28), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 10, 29), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 10, 30), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 10, 31), ZoneOffset.ofHours(0), 1); // cutover at 01:00Z checkOffset(test.getRules(), LocalDateTime.of(2008, 10, 26, 0, 59, 59, 999999999), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), LocalDateTime.of(2008, 10, 26, 1, 30, 0, 0), ZoneOffset.ofHours(1), OVERLAP); checkOffset(test.getRules(), LocalDateTime.of(2008, 10, 26, 2, 0, 0, 0), ZoneOffset.ofHours(0), 1); } public void test_London_getOffsetInfo_gap() { ZoneId test = ZoneId.of("Europe/London"); final LocalDateTime dateTime = LocalDateTime.of(2008, 3, 30, 1, 0, 0, 0); ZoneOffsetTransition trans = checkOffset(test.getRules(), dateTime, ZoneOffset.ofHours(0), GAP); assertEquals(trans.isGap(), true); assertEquals(trans.isOverlap(), false); assertEquals(trans.getOffsetBefore(), ZoneOffset.ofHours(0)); assertEquals(trans.getOffsetAfter(), ZoneOffset.ofHours(1)); assertEquals(trans.getInstant(), dateTime.toInstant(ZoneOffset.UTC)); assertEquals(trans.getDateTimeBefore(), LocalDateTime.of(2008, 3, 30, 1, 0)); assertEquals(trans.getDateTimeAfter(), LocalDateTime.of(2008, 3, 30, 2, 0)); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-1)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(0)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(1)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(2)), false); assertEquals(trans.toString(), "Transition[Gap at 2008-03-30T01:00Z to +01:00]"); assertFalse(trans.equals(null)); assertFalse(trans.equals(ZoneOffset.ofHours(0))); assertTrue(trans.equals(trans)); final ZoneOffsetTransition otherTrans = test.getRules().getTransition(dateTime); assertTrue(trans.equals(otherTrans)); assertEquals(trans.hashCode(), otherTrans.hashCode()); } public void test_London_getOffsetInfo_overlap() { ZoneId test = ZoneId.of("Europe/London"); final LocalDateTime dateTime = LocalDateTime.of(2008, 10, 26, 1, 0, 0, 0); ZoneOffsetTransition trans = checkOffset(test.getRules(), dateTime, ZoneOffset.ofHours(1), OVERLAP); assertEquals(trans.isGap(), false); assertEquals(trans.isOverlap(), true); assertEquals(trans.getOffsetBefore(), ZoneOffset.ofHours(1)); assertEquals(trans.getOffsetAfter(), ZoneOffset.ofHours(0)); assertEquals(trans.getInstant(), dateTime.toInstant(ZoneOffset.UTC)); assertEquals(trans.getDateTimeBefore(), LocalDateTime.of(2008, 10, 26, 2, 0)); assertEquals(trans.getDateTimeAfter(), LocalDateTime.of(2008, 10, 26, 1, 0)); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-1)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(0)), true); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(1)), true); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(2)), false); assertEquals(trans.toString(), "Transition[Overlap at 2008-10-26T02:00+01:00 to Z]"); assertFalse(trans.equals(null)); assertFalse(trans.equals(ZoneOffset.ofHours(1))); assertTrue(trans.equals(trans)); final ZoneOffsetTransition otherTrans = test.getRules().getTransition(dateTime); assertTrue(trans.equals(otherTrans)); assertEquals(trans.hashCode(), otherTrans.hashCode()); } // ----------------------------------------------------------------------- // Europe/Paris // ----------------------------------------------------------------------- public void test_Paris() { ZoneId test = ZoneId.of("Europe/Paris"); assertEquals(test.getId(), "Europe/Paris"); assertEquals(test.getRules().isFixedOffset(), false); } public void test_Paris_getOffset() { ZoneId test = ZoneId.of("Europe/Paris"); assertEquals(test.getRules().getOffset(createInstant(2008, 1, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 2, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 4, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 5, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 6, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 7, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 8, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 9, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 12, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); } public void test_Paris_getOffset_toDST() { ZoneId test = ZoneId.of("Europe/Paris"); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 24, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 25, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 26, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 27, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 28, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 29, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 30, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 31, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); // cutover at 01:00Z assertEquals(test.getRules().getOffset(createInstant(2008, 3, 30, 0, 59, 59, 999999999, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 30, 1, 0, 0, 0, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); } public void test_Paris_getOffset_fromDST() { ZoneId test = ZoneId.of("Europe/Paris"); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 24, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 25, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 26, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 27, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 28, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 29, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 30, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 31, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); // cutover at 01:00Z assertEquals(test.getRules().getOffset(createInstant(2008, 10, 26, 0, 59, 59, 999999999, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 26, 1, 0, 0, 0, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); } public void test_Paris_getOffsetInfo() { ZoneId test = ZoneId.of("Europe/Paris"); checkOffset(test.getRules(), createLDT(2008, 1, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 2, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 4, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 5, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 6, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 7, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 8, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 9, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 10, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 11, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 12, 1), ZoneOffset.ofHours(1), 1); } public void test_Paris_getOffsetInfo_toDST() { ZoneId test = ZoneId.of("Europe/Paris"); checkOffset(test.getRules(), createLDT(2008, 3, 24), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 25), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 26), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 27), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 28), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 29), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 30), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 31), ZoneOffset.ofHours(2), 1); // cutover at 01:00Z which is 02:00+01:00(local Paris time) checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 30, 1, 59, 59, 999999999), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 30, 2, 30, 0, 0), ZoneOffset.ofHours(1), GAP); checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 30, 3, 0, 0, 0), ZoneOffset.ofHours(2), 1); } public void test_Paris_getOffsetInfo_fromDST() { ZoneId test = ZoneId.of("Europe/Paris"); checkOffset(test.getRules(), createLDT(2008, 10, 24), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 10, 25), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 10, 26), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 10, 27), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 28), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 29), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 30), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 31), ZoneOffset.ofHours(1), 1); // cutover at 01:00Z which is 02:00+01:00(local Paris time) checkOffset(test.getRules(), LocalDateTime.of(2008, 10, 26, 1, 59, 59, 999999999), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), LocalDateTime.of(2008, 10, 26, 2, 30, 0, 0), ZoneOffset.ofHours(2), OVERLAP); checkOffset(test.getRules(), LocalDateTime.of(2008, 10, 26, 3, 0, 0, 0), ZoneOffset.ofHours(1), 1); } public void test_Paris_getOffsetInfo_gap() { ZoneId test = ZoneId.of("Europe/Paris"); final LocalDateTime dateTime = LocalDateTime.of(2008, 3, 30, 2, 0, 0, 0); ZoneOffsetTransition trans = checkOffset(test.getRules(), dateTime, ZoneOffset.ofHours(1), GAP); assertEquals(trans.isGap(), true); assertEquals(trans.isOverlap(), false); assertEquals(trans.getOffsetBefore(), ZoneOffset.ofHours(1)); assertEquals(trans.getOffsetAfter(), ZoneOffset.ofHours(2)); assertEquals(trans.getInstant(), createInstant(2008, 3, 30, 1, 0, 0, 0, ZoneOffset.UTC)); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(0)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(1)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(2)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(3)), false); assertEquals(trans.toString(), "Transition[Gap at 2008-03-30T02:00+01:00 to +02:00]"); assertFalse(trans.equals(null)); assertFalse(trans.equals(ZoneOffset.ofHours(1))); assertTrue(trans.equals(trans)); final ZoneOffsetTransition otherDis = test.getRules().getTransition(dateTime); assertTrue(trans.equals(otherDis)); assertEquals(trans.hashCode(), otherDis.hashCode()); } public void test_Paris_getOffsetInfo_overlap() { ZoneId test = ZoneId.of("Europe/Paris"); final LocalDateTime dateTime = LocalDateTime.of(2008, 10, 26, 2, 0, 0, 0); ZoneOffsetTransition trans = checkOffset(test.getRules(), dateTime, ZoneOffset.ofHours(2), OVERLAP); assertEquals(trans.isGap(), false); assertEquals(trans.isOverlap(), true); assertEquals(trans.getOffsetBefore(), ZoneOffset.ofHours(2)); assertEquals(trans.getOffsetAfter(), ZoneOffset.ofHours(1)); assertEquals(trans.getInstant(), createInstant(2008, 10, 26, 1, 0, 0, 0, ZoneOffset.UTC)); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(0)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(1)), true); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(2)), true); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(3)), false); assertEquals(trans.toString(), "Transition[Overlap at 2008-10-26T03:00+02:00 to +01:00]"); assertFalse(trans.equals(null)); assertFalse(trans.equals(ZoneOffset.ofHours(2))); assertTrue(trans.equals(trans)); final ZoneOffsetTransition otherDis = test.getRules().getTransition(dateTime); assertTrue(trans.equals(otherDis)); assertEquals(trans.hashCode(), otherDis.hashCode()); } // ----------------------------------------------------------------------- // America/New_York // ----------------------------------------------------------------------- public void test_NewYork() { ZoneId test = ZoneId.of("America/New_York"); assertEquals(test.getId(), "America/New_York"); assertEquals(test.getRules().isFixedOffset(), false); } public void test_NewYork_getOffset() { ZoneId test = ZoneId.of("America/New_York"); ZoneOffset offset = ZoneOffset.ofHours(-5); assertEquals(test.getRules().getOffset(createInstant(2008, 1, 1, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 2, 1, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 1, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 4, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 5, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 6, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 7, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 8, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 9, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 12, 1, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 1, 28, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 2, 28, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 4, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 5, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 6, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 7, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 8, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 9, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 28, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 12, 28, offset)), ZoneOffset.ofHours(-5)); } public void test_NewYork_getOffset_toDST() { ZoneId test = ZoneId.of("America/New_York"); ZoneOffset offset = ZoneOffset.ofHours(-5); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 8, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 9, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 10, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 11, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 12, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 13, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 14, offset)), ZoneOffset.ofHours(-4)); // cutover at 02:00 local assertEquals(test.getRules().getOffset(createInstant(2008, 3, 9, 1, 59, 59, 999999999, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 9, 2, 0, 0, 0, offset)), ZoneOffset.ofHours(-4)); } public void test_NewYork_getOffset_fromDST() { ZoneId test = ZoneId.of("America/New_York"); ZoneOffset offset = ZoneOffset.ofHours(-4); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 2, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 3, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 4, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 5, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 6, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 7, offset)), ZoneOffset.ofHours(-5)); // cutover at 02:00 local assertEquals(test.getRules().getOffset(createInstant(2008, 11, 2, 1, 59, 59, 999999999, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 2, 2, 0, 0, 0, offset)), ZoneOffset.ofHours(-5)); } public void test_NewYork_getOffsetInfo() { ZoneId test = ZoneId.of("America/New_York"); checkOffset(test.getRules(), createLDT(2008, 1, 1), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 2, 1), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 3, 1), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 4, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 5, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 6, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 7, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 8, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 9, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 10, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 11, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 12, 1), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 1, 28), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 2, 28), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 3, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 4, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 5, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 6, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 7, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 8, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 9, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 10, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 11, 28), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 12, 28), ZoneOffset.ofHours(-5), 1); } public void test_NewYork_getOffsetInfo_toDST() { ZoneId test = ZoneId.of("America/New_York"); checkOffset(test.getRules(), createLDT(2008, 3, 8), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 3, 9), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 3, 10), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 3, 11), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 3, 12), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 3, 13), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 3, 14), ZoneOffset.ofHours(-4), 1); // cutover at 02:00 local checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 9, 1, 59, 59, 999999999), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 9, 2, 30, 0, 0), ZoneOffset.ofHours(-5), GAP); checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 9, 3, 0, 0, 0), ZoneOffset.ofHours(-4), 1); } public void test_NewYork_getOffsetInfo_fromDST() { ZoneId test = ZoneId.of("America/New_York"); checkOffset(test.getRules(), createLDT(2008, 11, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 11, 2), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 11, 3), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 11, 4), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 11, 5), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 11, 6), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 11, 7), ZoneOffset.ofHours(-5), 1); // cutover at 02:00 local checkOffset(test.getRules(), LocalDateTime.of(2008, 11, 2, 0, 59, 59, 999999999), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), LocalDateTime.of(2008, 11, 2, 1, 30, 0, 0), ZoneOffset.ofHours(-4), OVERLAP); checkOffset(test.getRules(), LocalDateTime.of(2008, 11, 2, 2, 0, 0, 0), ZoneOffset.ofHours(-5), 1); } public void test_NewYork_getOffsetInfo_gap() { ZoneId test = ZoneId.of("America/New_York"); final LocalDateTime dateTime = LocalDateTime.of(2008, 3, 9, 2, 0, 0, 0); ZoneOffsetTransition trans = checkOffset(test.getRules(), dateTime, ZoneOffset.ofHours(-5), GAP); assertEquals(trans.getOffsetBefore(), ZoneOffset.ofHours(-5)); assertEquals(trans.getOffsetAfter(), ZoneOffset.ofHours(-4)); assertEquals(trans.getInstant(), createInstant(2008, 3, 9, 2, 0, 0, 0, ZoneOffset.ofHours(-5))); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-6)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-5)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-4)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-3)), false); assertEquals(trans.toString(), "Transition[Gap at 2008-03-09T02:00-05:00 to -04:00]"); assertFalse(trans.equals(null)); assertFalse(trans.equals(ZoneOffset.ofHours(-5))); assertTrue(trans.equals(trans)); final ZoneOffsetTransition otherTrans = test.getRules().getTransition(dateTime); assertTrue(trans.equals(otherTrans)); assertEquals(trans.hashCode(), otherTrans.hashCode()); } public void test_NewYork_getOffsetInfo_overlap() { ZoneId test = ZoneId.of("America/New_York"); final LocalDateTime dateTime = LocalDateTime.of(2008, 11, 2, 1, 0, 0, 0); ZoneOffsetTransition trans = checkOffset(test.getRules(), dateTime, ZoneOffset.ofHours(-4), OVERLAP); assertEquals(trans.getOffsetBefore(), ZoneOffset.ofHours(-4)); assertEquals(trans.getOffsetAfter(), ZoneOffset.ofHours(-5)); assertEquals(trans.getInstant(), createInstant(2008, 11, 2, 2, 0, 0, 0, ZoneOffset.ofHours(-4))); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-1)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-5)), true); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-4)), true); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(2)), false); assertEquals(trans.toString(), "Transition[Overlap at 2008-11-02T02:00-04:00 to -05:00]"); assertFalse(trans.equals(null)); assertFalse(trans.equals(ZoneOffset.ofHours(-4))); assertTrue(trans.equals(trans)); final ZoneOffsetTransition otherTrans = test.getRules().getTransition(dateTime); assertTrue(trans.equals(otherTrans)); assertEquals(trans.hashCode(), otherTrans.hashCode()); } // ----------------------------------------------------------------------- // getXxx() isXxx() // ----------------------------------------------------------------------- public void test_get_Tzdb() { ZoneId test = ZoneId.of("Europe/London"); assertEquals(test.getId(), "Europe/London"); assertEquals(test.getRules().isFixedOffset(), false); } public void test_get_TzdbFixed() { ZoneId test = ZoneId.of("+01:30"); assertEquals(test.getId(), "+01:30"); assertEquals(test.getRules().isFixedOffset(), true); } // ----------------------------------------------------------------------- // equals() / hashCode() // ----------------------------------------------------------------------- public void test_equals() { ZoneId test1 = ZoneId.of("Europe/London"); ZoneId test2 = ZoneId.of("Europe/Paris"); ZoneId test2b = ZoneId.of("Europe/Paris"); assertEquals(test1.equals(test2), false); assertEquals(test2.equals(test1), false); assertEquals(test1.equals(test1), true); assertEquals(test2.equals(test2), true); assertEquals(test2.equals(test2b), true); assertEquals(test1.hashCode() == test1.hashCode(), true); assertEquals(test2.hashCode() == test2.hashCode(), true); assertEquals(test2.hashCode() == test2b.hashCode(), true); } public void test_equals_null() { assertEquals(ZoneId.of("Europe/London").equals(null), false); } public void test_equals_notTimeZone() { assertEquals(ZoneId.of("Europe/London").equals("Europe/London"), false); } // ----------------------------------------------------------------------- // toString() // ----------------------------------------------------------------------- @DataProvider(name = "ToString") Object[][] data_toString() { return new Object[][] { { "Europe/London", "Europe/London" }, { "Europe/Paris", "Europe/Paris" }, { "Europe/Berlin", "Europe/Berlin" }, { "UTC", "Z" }, { "UTC+01:00", "+01:00" }, }; } @Test(dataProvider = "ToString") public void test_toString(String id, String expected) { ZoneId test = ZoneId.of(id); assertEquals(test.toString(), expected); } // ----------------------------------------------------------------------- // ----------------------------------------------------------------------- // ----------------------------------------------------------------------- private Instant createInstant(int year, int month, int day, ZoneOffset offset) { return LocalDateTime.of(year, month, day, 0, 0).toInstant(offset); } private Instant createInstant(int year, int month, int day, int hour, int min, int sec, int nano, ZoneOffset offset) { return LocalDateTime.of(year, month, day, hour, min, sec, nano).toInstant(offset); } private ZonedDateTime createZDT(int year, int month, int day, int hour, int min, int sec, int nano, ZoneId zone) { return LocalDateTime.of(year, month, day, hour, min, sec, nano).atZone(zone); } private LocalDateTime createLDT(int year, int month, int day) { return LocalDateTime.of(year, month, day, 0, 0); } private ZoneOffsetTransition checkOffset(ZoneRules rules, LocalDateTime dateTime, ZoneOffset offset, int type) { List<ZoneOffset> validOffsets = rules.getValidOffsets(dateTime); assertEquals(validOffsets.size(), type); assertEquals(rules.getOffset(dateTime), offset); if (type == 1) { assertEquals(validOffsets.get(0), offset); return null; } else { ZoneOffsetTransition zot = rules.getTransition(dateTime); assertNotNull(zot); assertEquals(zot.isOverlap(), type == 2); assertEquals(zot.isGap(), type == 0); assertEquals(zot.isValidOffset(offset), type == 2); return zot; } } }
// // MessagePack for Java // // Copyright (C) 2009 - 2013 FURUHASHI Sadayuki // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.msgpack.template.builder; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Modifier; import org.msgpack.*; import org.msgpack.packer.Packer; import org.msgpack.template.*; import org.msgpack.unpacker.Unpacker; import javassist.CannotCompileException; import javassist.CtClass; import javassist.CtConstructor; import javassist.CtNewConstructor; import javassist.NotFoundException; @SuppressWarnings({ "rawtypes", "unchecked" }) public class DefaultBuildContext extends BuildContext<FieldEntry> { protected FieldEntry[] entries; protected Class<?> origClass; protected String origName; protected Template<?>[] templates; public DefaultBuildContext(JavassistTemplateBuilder director) { super(director); } public Template buildTemplate(Class targetClass, FieldEntry[] entries, Template[] templates) { this.entries = entries; this.templates = templates; this.origClass = targetClass; this.origName = origClass.getName(); return build(origName); } protected void setSuperClass() throws CannotCompileException, NotFoundException { tmplCtClass.setSuperclass(director.getCtClass( JavassistTemplateBuilder.JavassistTemplate.class.getName())); } protected void buildConstructor() throws CannotCompileException, NotFoundException { // Constructor(Class targetClass, Template[] templates) CtConstructor newCtCons = CtNewConstructor.make( new CtClass[] { director.getCtClass(Class.class.getName()), director.getCtClass(Template.class.getName() + "[]") }, new CtClass[0], tmplCtClass); tmplCtClass.addConstructor(newCtCons); } protected Template buildInstance(Class<?> c) throws NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { Constructor<?> cons = c.getConstructor(new Class[] { Class.class, Template[].class }); Object tmpl = cons.newInstance(new Object[] { origClass, templates }); return (Template) tmpl; } protected void buildMethodInit() { } protected String buildWriteMethodBody() { resetStringBuilder(); buildString("\n{\n"); buildString(" if ($2 == null) {\n"); buildString(" if ($3) {\n"); buildString(" throw new %s(\"Attempted to write null\");\n", MessageTypeException.class.getName()); buildString(" }\n"); buildString(" $1.writeNil();\n"); buildString(" return;\n"); buildString(" }\n"); buildString(" %s _$$_t = (%s) $2;\n", origName, origName); buildString(" $1.writeArrayBegin(%d);\n", entries.length); for (int i = 0; i < entries.length; i++) { FieldEntry e = entries[i]; if (!e.isAvailable()) { buildString(" $1.writeNil();\n"); continue; } DefaultFieldEntry de = (DefaultFieldEntry) e; boolean isPrivate = Modifier.isPrivate(de.getField().getModifiers()); Class<?> type = de.getType(); if (type.isPrimitive()) { // primitive types if (!isPrivate) { buildString(" $1.%s(_$$_t.%s);\n", primitiveWriteName(type), de.getName()); } else { buildString( " %s.writePrivateField($1, _$$_t, %s.class, \"%s\", templates[%d]);\n", DefaultBuildContext.class.getName(), de.getField().getDeclaringClass().getName(), de.getName(), i); } } else { // reference types if (!isPrivate) { buildString(" if (_$$_t.%s == null) {\n", de.getName()); } else { buildString( " if (%s.readPrivateField(_$$_t, %s.class, \"%s\") == null) {\n", DefaultBuildContext.class.getName(), de.getField().getDeclaringClass().getName(), de.getName()); } if (de.isNotNullable()) { buildString( " throw new %s(\"%s cannot be null by @NotNullable\");\n", MessageTypeException.class.getName(), de.getName()); } else { buildString(" $1.writeNil();\n"); } buildString(" } else {\n"); if (!isPrivate) { buildString(" templates[%d].write($1, _$$_t.%s);\n", i, de.getName()); } else { buildString( " %s.writePrivateField($1, _$$_t, %s.class, \"%s\", templates[%d]);\n", DefaultBuildContext.class.getName(), de.getField().getDeclaringClass().getName(), de.getName(), i); } buildString(" }\n"); } } buildString(" $1.writeArrayEnd();\n"); buildString("}\n"); return getBuiltString(); } public static Object readPrivateField(Object target, Class targetClass, String fieldName) { Field field = null; try { field = targetClass.getDeclaredField(fieldName); field.setAccessible(true); Object valueReference = field.get(target); return valueReference; } catch (Exception e) { throw new MessageTypeException(e); } finally { if (field != null) { field.setAccessible(false); } } } public static void writePrivateField(Packer packer, Object target, Class targetClass, String fieldName, Template tmpl) { Field field = null; try { field = targetClass.getDeclaredField(fieldName); field.setAccessible(true); Object valueReference = field.get(target); tmpl.write(packer, valueReference); } catch (Exception e) { throw new MessageTypeException(e); } finally { if (field != null) { field.setAccessible(false); } } } protected String buildReadMethodBody() { resetStringBuilder(); buildString("\n{\n"); buildString(" if (!$3 && $1.trySkipNil()) {\n"); buildString(" return null;\n"); buildString(" }\n"); buildString(" %s _$$_t;\n", origName); buildString(" if ($2 == null) {\n"); buildString(" _$$_t = new %s();\n", origName); buildString(" } else {\n"); buildString(" _$$_t = (%s) $2;\n", origName); buildString(" }\n"); buildString(" $1.readArrayBegin();\n"); int i; for (i = 0; i < entries.length; i++) { FieldEntry e = entries[i]; if (!e.isAvailable()) { buildString(" $1.skip();\n"); continue; } if (e.isOptional()) { buildString(" if ($1.trySkipNil()) {"); // if Optional and nil, then keep default value buildString(" } else {\n"); } DefaultFieldEntry de = (DefaultFieldEntry) e; boolean isPrivate = Modifier.isPrivate(de.getField().getModifiers()); Class<?> type = de.getType(); if (type.isPrimitive()) { if (!isPrivate) { buildString(" _$$_t.%s = $1.%s();\n", de.getName(), primitiveReadName(type)); } else { buildString( " %s.readPrivateField($1, _$$_t, %s.class, \"%s\", templates[%d]);\n", DefaultBuildContext.class.getName(), de.getField().getDeclaringClass().getName(), de.getName(), i); } } else { if (!isPrivate) { buildString( " _$$_t.%s = (%s) this.templates[%d].read($1, _$$_t.%s);\n", de.getName(), de.getJavaTypeName(), i, de.getName()); } else { buildString( " %s.readPrivateField($1, _$$_t, %s.class, \"%s\", templates[%d]);\n", DefaultBuildContext.class.getName(), de.getField().getDeclaringClass().getName(), de.getName(), i); } } if (de.isOptional()) { buildString(" }\n"); } } buildString(" $1.readArrayEnd();\n"); buildString(" return _$$_t;\n"); buildString("}\n"); return getBuiltString(); } public static void readPrivateField(Unpacker unpacker, Object target, Class targetClass, String fieldName, Template tmpl) { Field field = null; try { field = targetClass.getDeclaredField(fieldName); field.setAccessible(true); Object fieldReference = field.get(target); Object valueReference = tmpl.read(unpacker, fieldReference); if (valueReference != fieldReference) { field.set(target, valueReference); } } catch (Exception e) { throw new MessageTypeException(e); } finally { if (field != null) { field.setAccessible(false); } } } @Override public void writeTemplate(Class<?> targetClass, FieldEntry[] entries, Template[] templates, String directoryName) { this.entries = entries; this.templates = templates; this.origClass = targetClass; this.origName = origClass.getName(); write(origName, directoryName); } @Override public Template loadTemplate(Class<?> targetClass, FieldEntry[] entries, Template[] templates) { this.entries = entries; this.templates = templates; this.origClass = targetClass; this.origName = origClass.getName(); return load(origName); } }
/* * The MIT License * * Copyright (c) 2015 The Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package picard.analysis; import htsjdk.samtools.metrics.MetricsFile; import htsjdk.samtools.util.Histogram; import org.apache.commons.math3.util.CombinatoricsUtils; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; import java.io.FileReader; import java.util.*; import java.util.stream.IntStream; /** * Created by davidben on 5/18/15. */ public class TheoreticalSensitivityTest { private final static File TEST_DIR = new File("testdata/picard/analysis/TheoreticalSensitivity/"); private final static File DEPTH = new File(TEST_DIR, "Solexa332667_DepthDist.histo"); private final static File BASEQ = new File(TEST_DIR, "Solexa332667_BaseQ.histo"); @Test public void testRouletteWheel() throws Exception { //test that a deterministic roulette wheel only gives one value final double[] deterministicWeights = {0.0, 1.0, 0.0}; final TheoreticalSensitivity.RouletteWheel deterministicWheel = new TheoreticalSensitivity.RouletteWheel(deterministicWeights); for (int n = 0; n < 10; n++) Assert.assertEquals(deterministicWheel.draw(), 1); //test the sums of this deterministic wheel: a sum of n 1's equals n final List<ArrayList<Integer>> deterministicSums = deterministicWheel.sampleCumulativeSums(10, 1, false); for (int n = 0; n < 10; n++) Assert.assertEquals(deterministicSums.get(n).get(0), (Integer) n); } @Test public void testProportionsAboveThresholds() throws Exception { final List<ArrayList<Integer>> sums = new ArrayList<>(); sums.add(new ArrayList<>(Arrays.asList(0, 0, 0))); sums.add(new ArrayList<>(Arrays.asList(10, 10))); sums.add(new ArrayList<>(Arrays.asList(5, 11, -2, 4))); final List<Double> thresholds = Arrays.asList(-1.0, 1.0, 6.0); Assert.assertEquals(sums.size(), 3); Assert.assertEquals(thresholds.size(), 3); final List<ArrayList<Double>> proportions = TheoreticalSensitivity.proportionsAboveThresholds(sums, thresholds); Assert.assertEquals(proportions.size(), 3); Assert.assertEquals(proportions.get(0).get(0), (double) 3 / 3); Assert.assertEquals(proportions.get(0).get(1), (double) 0 / 3); Assert.assertEquals(proportions.get(0).get(2), (double) 0 / 3); Assert.assertEquals(proportions.get(1).get(0), (double) 2 / 2); Assert.assertEquals(proportions.get(1).get(1), (double) 2 / 2); Assert.assertEquals(proportions.get(1).get(2), (double) 2 / 2); Assert.assertEquals(proportions.get(2).get(0), (double) 3 / 4); Assert.assertEquals(proportions.get(2).get(1), (double) 3 / 4); Assert.assertEquals(proportions.get(2).get(2), (double) 1 / 4); } @Test public void testHetAltDepthDistribution() throws Exception { final int N = 6; final double p = 0.5; final List<ArrayList<Double>> distribution = TheoreticalSensitivity.hetAltDepthDistribution(N); for (int n = 0; n < N - 1; n++) { for (int m = 0; m <= n; m++) { final long binomialCoefficient = CombinatoricsUtils.binomialCoefficient(n, m); Assert.assertEquals(distribution.get(n).get(m), binomialCoefficient * Math.pow(p, n)); } } } //test that large-sample sums from the RouletteWheel converge to a normal distribution //using the empirical CDF as measured by proportionsAboveThresholds @Test public void testCentralLimitTheorem() throws Exception { //use a RouletteWheel that gives 0, 1, 2 with equal probability final double[] weights = {1.0, 1.0, 1.0}; final TheoreticalSensitivity.RouletteWheel wheel = new TheoreticalSensitivity.RouletteWheel(weights); final int sampleSize = 1000; final int numSummands = 100; //the mean and standard deviation of a single roulette draw and of many draws final double muSingleDraw = 1.0; final double sigmaSingleDraw = Math.sqrt(2.0 / 3.0); final double mu = numSummands * muSingleDraw; final double sigma = Math.sqrt(numSummands) * sigmaSingleDraw; //test the sums of this deterministic wheel: a sum of n 1's equals n final List<ArrayList<Integer>> sums = wheel.sampleCumulativeSums(numSummands, sampleSize, false); //we only want the last set of sums, those with numSummands summands sums.subList(0, sums.size() - 1).clear(); Assert.assertEquals(sums.size(), 1); //test whether the number of elements within one standard deviation agrees with the normal distribution final List<Double> thresholds = Arrays.asList(mu - sigma, mu + sigma); //sums is 1 x sampleSize, thresholds is a 2-vector, so proportions is 1 x 2 final List<ArrayList<Double>> proportions = TheoreticalSensitivity.proportionsAboveThresholds(sums, thresholds); final double empiricalProportionWithinOneSigma = proportions.get(0).get(0) - proportions.get(0).get(1); //the proportion within one sigma for the normal distribution //hence whether any element falls within one sigma is a Bernoulli variable final double theoreticalProportionWithinOneSigma = 0.682689492; final double samplingStandardDeviationOfProportion = Math.sqrt(theoreticalProportionWithinOneSigma * (1 - theoreticalProportionWithinOneSigma) / sampleSize); Assert.assertEquals(empiricalProportionWithinOneSigma, theoreticalProportionWithinOneSigma, 5 * samplingStandardDeviationOfProportion); } //Put it all together for deterministic quality and depths @Test public void testDeterministicQualityAndDepth() throws Exception { final double logOddsThreshold = 0.0; final double tolerance = 0.001; final int sampleSize = 1; //quality is deterministic, hence no sampling error for (int q = 5; q < 10; q++) { for (int n = 5; n < 10; n++) { final double minAltCount = 10 * n * Math.log10(2) / q; //alts required to call when log odds ratio threshold = 1 double expectedResult = 0.0; final List<ArrayList<Double>> altCountProbabilities = TheoreticalSensitivity.hetAltDepthDistribution(n + 1); for (int altCount = n; altCount > minAltCount; altCount--) { expectedResult += altCountProbabilities.get(n).get(altCount); } //deterministic weights that always yield q are 0.0 for 0 through q - 1 and 1.0 for q final double[] qualityDistribution = new double[q + 1]; Arrays.fill(qualityDistribution, 0L); qualityDistribution[qualityDistribution.length - 1] = 1L; final double[] depthDistribution = new double[n + 1]; Arrays.fill(depthDistribution, 0L); depthDistribution[depthDistribution.length - 1] = 1L; final double result = TheoreticalSensitivity.hetSNPSensitivity(depthDistribution, qualityDistribution, sampleSize, logOddsThreshold); Assert.assertEquals(result, expectedResult, tolerance); } } } @Test public void testHetSensDistributions() throws Exception { //Expect theoretical sens to be close to .9617 for Solexa-332667 final double tolerance = 0.02; final double expectedResult = .9617; final int maxDepth = 500; final double[] depthDistribution = new double[maxDepth + 1]; final double[] qualityDistribution = new double[50]; final Scanner scanDepth = new Scanner(DEPTH); for (int i = 0; scanDepth.hasNextDouble(); i++) { depthDistribution[i] = scanDepth.nextDouble(); } final Scanner scanBaseQ = new Scanner(BASEQ); for (int j = 0; scanBaseQ.hasNextDouble(); j++) { qualityDistribution[j] = scanBaseQ.nextDouble(); } final int sampleSize = 1_000; final double logOddsThreshold = 3.0; final double result = TheoreticalSensitivity.hetSNPSensitivity(depthDistribution, qualityDistribution, sampleSize, logOddsThreshold); Assert.assertEquals(result, expectedResult, tolerance); } @DataProvider(name = "hetSensDataProvider") public Object[][] hetSensDataProvider() { final File wgsMetricsFile = new File(TEST_DIR, "test_Solexa-332667.wgs_metrics"); final File targetedMetricsFile = new File(TEST_DIR, "test_25103070136.targeted_pcr_metrics"); //These magic numbers come from a separate implementation of the code in R. return new Object[][]{ {0.897_342_54, wgsMetricsFile}, {0.956_186_66, targetedMetricsFile} }; } @Test(dataProvider = "hetSensDataProvider") public void testHetSensTargeted(final double expected, final File metricsFile) throws Exception { final double tolerance = 0.000_000_01; final MetricsFile<?, Integer> metrics = new MetricsFile<>(); try (final FileReader metricsFileReader = new FileReader(metricsFile)) { metrics.read(metricsFileReader); } final List<Histogram<Integer>> histograms = metrics.getAllHistograms(); final Histogram<Integer> depthHistogram = histograms.get(0); final Histogram<Integer> qualityHistogram = histograms.get(1); final double[] depthDistribution = TheoreticalSensitivity.normalizeHistogram(depthHistogram); final double[] qualityDistribution = TheoreticalSensitivity.normalizeHistogram(qualityHistogram); final int sampleSize = 1_000; final double logOddsThreshold = 3.0; final double result = TheoreticalSensitivity.hetSNPSensitivity(depthDistribution, qualityDistribution, sampleSize, logOddsThreshold); Assert.assertEquals(result, expected, tolerance); } @DataProvider(name = "TheoreticalSensitivityConstantDepthDataProvider") public Object[][] fractionalAlleleSensDataProvider() { final File wgsMetricsFile = new File(TEST_DIR, "test_Solexa-332667.wgs_metrics"); final File targetedMetricsFile = new File(TEST_DIR, "test_25103070136.targeted_pcr_metrics"); // These magic numbers come from a separate implementation of the code in R. return new Object[][]{ // Expected sensitivity, metrics file, allele fraction, constant depth, sample size. {1.00, wgsMetricsFile, .5, 30, 10000, 0.01}, {0.78, targetedMetricsFile, .1, 30, 10000, 0.02}, {0.26, targetedMetricsFile, 0.1, 10, 10000, 0.01} }; } @Test(dataProvider = "TheoreticalSensitivityConstantDepthDataProvider") public void testSensitivityAtConstantDepth(final double expected, final File metricsFile, final double alleleFraction, final int depth, final int sampleSize, final double tolerance) throws Exception { // This tests Theoretical Sensitivity assuming a uniform depth with a distribution of base quality scores. // Because this only tests sensitivity at a constant depth, we use this for testing the code at high depths. final MetricsFile<?, Integer> metrics = new MetricsFile<>(); try (final FileReader metricsFileReader = new FileReader(metricsFile)) { metrics.read(metricsFileReader); } final List<Histogram<Integer>> histograms = metrics.getAllHistograms(); final Histogram<Integer> qualityHistogram = histograms.get(1); // We ensure that even using different random seeds we converge to roughly the same value. for (int i = 0; i < 3; i++) { double result = TheoreticalSensitivity.sensitivityAtConstantDepth(depth, qualityHistogram, 3, sampleSize, alleleFraction, i); Assert.assertEquals(result, expected, tolerance); } } @DataProvider(name = "TheoreticalSensitivityDataProvider") public Object[][] arbFracSensDataProvider() { final File wgsMetricsFile = new File(TEST_DIR, "test_Solexa-332667.wgs_metrics"); // This test acts primarily as an integration test. The sample sizes // are not quite large enough to converge properly, but is used for the purpose of // keeping the compute time of the tests short. return new Object[][]{ {0.90, wgsMetricsFile, 0.5, 400}, {0.78, wgsMetricsFile, 0.3, 400}, {0.29, wgsMetricsFile, 0.1, 500}, {0.08, wgsMetricsFile, 0.05, 500}, }; } @Test(dataProvider = "TheoreticalSensitivityDataProvider") public void testSensitivity(final double expected, final File metricsFile, final double alleleFraction, final int sampleSize) throws Exception { // This tests Theoretical Sensitivity using distributions on both base quality scores // and the depth histogram. // We use a pretty forgiving tolerance here because for these tests // we are not using large enough sample sizes to converge. final double tolerance = 0.02; final MetricsFile<?, Integer> metrics = new MetricsFile<>(); try (final FileReader metricsFileReader = new FileReader(metricsFile)) { metrics.read(metricsFileReader); } final List<Histogram<Integer>> histograms = metrics.getAllHistograms(); final Histogram<Integer> depthHistogram = histograms.get(0); final Histogram<Integer> qualityHistogram = histograms.get(1); final double result = TheoreticalSensitivity.theoreticalSensitivity(depthHistogram, qualityHistogram, sampleSize, 3, alleleFraction); Assert.assertEquals(result, expected, tolerance); } @DataProvider(name = "equivalanceHetVsArbitrary") public Object[][] equivalenceHetVsFull() { final File wgsMetricsFile = new File(TEST_DIR, "test_Solexa-332667.wgs_metrics"); final File targetedMetricsFile = new File(TEST_DIR, "test_25103070136.targeted_pcr_metrics"); return new Object[][]{ // The sample sizes chosen here for these tests are smaller than what would normally be used // in order to keep the test time low. It should be noted that for larger sample sizes // the values converge. {wgsMetricsFile, 0.02, 500}, {targetedMetricsFile, 0.01, 500} }; } @Test(dataProvider = "equivalanceHetVsArbitrary") public void testHetVsArbitrary(final File metricsFile, final double tolerance, final int sampleSize) throws Exception { // This test compares Theoretical Sensitivity for arbitrary allele fractions with the theoretical het sensitivity // model. Since allele fraction of 0.5 is equivalent to a het, these should provide the same answer. final MetricsFile<?, Integer> metrics = new MetricsFile<>(); try (final FileReader metricsFileReader = new FileReader(metricsFile)) { metrics.read(metricsFileReader); } final List<Histogram<Integer>> histograms = metrics.getAllHistograms(); final Histogram<Integer> depthHistogram = histograms.get(0); final Histogram<Integer> qualityHistogram = histograms.get(1); final double[] qualityDistribution = TheoreticalSensitivity.normalizeHistogram(qualityHistogram); final double[] depthDistribution = TheoreticalSensitivity.normalizeHistogram(depthHistogram); final double resultFromTS = TheoreticalSensitivity.theoreticalSensitivity(depthHistogram, qualityHistogram, sampleSize, 3, 0.5); final double resultFromTHS = TheoreticalSensitivity.hetSNPSensitivity(depthDistribution, qualityDistribution, sampleSize, 3); Assert.assertEquals(resultFromTS, resultFromTHS, tolerance); } @DataProvider(name = "callingThresholdDataProvider") public Object[][] callingThreshold() { return new Object[][]{ // These values were tested with an independent implementation in R. // Test a transition due to a change in the logOddsThreshold {100, 10, 10 * 20, .1, 5.8, true}, {100, 10, 10 * 20, .1, 5.9, false}, // Test a transition due to change in average base quality from 20 to 21 {100, 10, 10 * 21, .1, 6.2, true}, {100, 10, 10 * 20, .1, 6.2, false}, // Test a transition due to change in total depth {115, 10, 10 * 21, .1, 6.2, false}, {114, 10, 10 * 21, .1, 6.2, true} }; } @Test(dataProvider = "callingThresholdDataProvider") public void testCallingThreshold(final int totalDepth, final int altDepth, final double sumOfAltQualities, final double alleleFraction, final double logOddsThreshold, final boolean expectedCall) { Assert.assertEquals(TheoreticalSensitivity.isCalled(totalDepth, altDepth, sumOfAltQualities, alleleFraction, logOddsThreshold), expectedCall); } @DataProvider(name = "sumOfGaussiansDataProvider") public Object[][] sumOfGaussians() { final File wgsMetricsFile = new File(TEST_DIR, "test_Solexa-332667.wgs_metrics"); final File targetedMetricsFile = new File(TEST_DIR, "test_25103070136.targeted_pcr_metrics"); // When we sum more base qualities from a particular distribution, it should look increasingly Gaussian. return new Object[][]{ {wgsMetricsFile, 500, 0.03}, {wgsMetricsFile, 20, 0.05}, {wgsMetricsFile, 10, 0.10}, {targetedMetricsFile, 500, 0.03}, {targetedMetricsFile, 20, 0.05}, {targetedMetricsFile, 10, 0.10} }; } @Test(dataProvider = "sumOfGaussiansDataProvider") public void testDrawSumOfQScores(final File metricsFile, final int altDepth, final double tolerance) throws Exception { final MetricsFile<TheoreticalSensitivityMetrics, Integer> metrics = new MetricsFile<>(); try (final FileReader metricsFileReader = new FileReader(metricsFile)) { metrics.read(metricsFileReader); } final List<Histogram<Integer>> histograms = metrics.getAllHistograms(); final Histogram<Integer> qualityHistogram = histograms.get(1); final TheoreticalSensitivity.RouletteWheel qualityRW = new TheoreticalSensitivity.RouletteWheel(TheoreticalSensitivity.trimDistribution(TheoreticalSensitivity.normalizeHistogram(qualityHistogram))); final Random randomNumberGenerator = new Random(51); // Calculate mean and deviation of quality score distribution to enable Gaussian sampling below final double averageQuality = qualityHistogram.getMean(); final double standardDeviationQuality = qualityHistogram.getStandardDeviation(); for (int k = 0; k < 1; k++) { int sumOfQualitiesFull = IntStream.range(0, altDepth).map(n -> qualityRW.draw()).sum(); int sumOfQualities = TheoreticalSensitivity.drawSumOfQScores(altDepth, averageQuality, standardDeviationQuality, randomNumberGenerator.nextGaussian()); Assert.assertEquals(sumOfQualitiesFull, sumOfQualities, sumOfQualitiesFull * tolerance); } } @DataProvider(name = "trimDistributionDataProvider") public Object[][] trimDistributions() { return new Object[][]{ {new double[]{}, new double[]{}}, {new double[]{0.0}, new double[]{}}, {new double[]{0.0, 0.0}, new double[]{}}, {new double[]{1.0}, new double[]{1.0}}, {new double[]{1.0, 0.0}, new double[]{1.0}}, {new double[]{0.0, 0.0, 1.0}, new double[]{0.0, 0.0, 1.0}}, {new double[]{0.0, 0.0, 1.0, 0.0, 0.0}, new double[]{0.0, 0.0, 1.0}} }; } @Test(dataProvider = "trimDistributionDataProvider") public void testTrimDistributions(final double[] distributionToTrim, final double[] expected) { Assert.assertEquals(expected, TheoreticalSensitivity.trimDistribution(distributionToTrim)); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner; import com.facebook.presto.Session; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.TableHandle; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.type.Type; import com.facebook.presto.sql.ExpressionUtils; import com.facebook.presto.sql.analyzer.Analysis; import com.facebook.presto.sql.analyzer.Field; import com.facebook.presto.sql.analyzer.RelationId; import com.facebook.presto.sql.analyzer.RelationType; import com.facebook.presto.sql.analyzer.Scope; import com.facebook.presto.sql.planner.plan.AggregationNode; import com.facebook.presto.sql.planner.plan.Assignments; import com.facebook.presto.sql.planner.plan.ExceptNode; import com.facebook.presto.sql.planner.plan.FilterNode; import com.facebook.presto.sql.planner.plan.IntersectNode; import com.facebook.presto.sql.planner.plan.JoinNode; import com.facebook.presto.sql.planner.plan.PlanNode; import com.facebook.presto.sql.planner.plan.ProjectNode; import com.facebook.presto.sql.planner.plan.SampleNode; import com.facebook.presto.sql.planner.plan.TableScanNode; import com.facebook.presto.sql.planner.plan.UnionNode; import com.facebook.presto.sql.planner.plan.UnnestNode; import com.facebook.presto.sql.planner.plan.ValuesNode; import com.facebook.presto.sql.tree.AliasedRelation; import com.facebook.presto.sql.tree.Cast; import com.facebook.presto.sql.tree.CoalesceExpression; import com.facebook.presto.sql.tree.ComparisonExpression; import com.facebook.presto.sql.tree.ComparisonExpressionType; import com.facebook.presto.sql.tree.DefaultTraversalVisitor; import com.facebook.presto.sql.tree.Except; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.ExpressionTreeRewriter; import com.facebook.presto.sql.tree.InPredicate; import com.facebook.presto.sql.tree.Intersect; import com.facebook.presto.sql.tree.Join; import com.facebook.presto.sql.tree.JoinUsing; import com.facebook.presto.sql.tree.LambdaArgumentDeclaration; import com.facebook.presto.sql.tree.LongLiteral; import com.facebook.presto.sql.tree.NodeRef; import com.facebook.presto.sql.tree.QualifiedName; import com.facebook.presto.sql.tree.Query; import com.facebook.presto.sql.tree.QuerySpecification; import com.facebook.presto.sql.tree.Relation; import com.facebook.presto.sql.tree.Row; import com.facebook.presto.sql.tree.SampledRelation; import com.facebook.presto.sql.tree.SetOperation; import com.facebook.presto.sql.tree.SymbolReference; import com.facebook.presto.sql.tree.Table; import com.facebook.presto.sql.tree.TableSubquery; import com.facebook.presto.sql.tree.Union; import com.facebook.presto.sql.tree.Unnest; import com.facebook.presto.sql.tree.Values; import com.facebook.presto.type.ArrayType; import com.facebook.presto.type.MapType; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.ListMultimap; import com.google.common.collect.UnmodifiableIterator; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import static com.facebook.presto.sql.analyzer.SemanticExceptions.notSupportedException; import static com.facebook.presto.sql.planner.ExpressionInterpreter.evaluateConstantExpression; import static com.facebook.presto.sql.tree.Join.Type.INNER; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; import static java.util.Objects.requireNonNull; class RelationPlanner extends DefaultTraversalVisitor<RelationPlan, Void> { private final Analysis analysis; private final SymbolAllocator symbolAllocator; private final PlanNodeIdAllocator idAllocator; private final Map<NodeRef<LambdaArgumentDeclaration>, Symbol> lambdaDeclarationToSymbolMap; private final Metadata metadata; private final Session session; private final SubqueryPlanner subqueryPlanner; RelationPlanner( Analysis analysis, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator, Map<NodeRef<LambdaArgumentDeclaration>, Symbol> lambdaDeclarationToSymbolMap, Metadata metadata, Session session) { requireNonNull(analysis, "analysis is null"); requireNonNull(symbolAllocator, "symbolAllocator is null"); requireNonNull(idAllocator, "idAllocator is null"); requireNonNull(lambdaDeclarationToSymbolMap, "lambdaDeclarationToSymbolMap is null"); requireNonNull(metadata, "metadata is null"); requireNonNull(session, "session is null"); this.analysis = analysis; this.symbolAllocator = symbolAllocator; this.idAllocator = idAllocator; this.lambdaDeclarationToSymbolMap = lambdaDeclarationToSymbolMap; this.metadata = metadata; this.session = session; this.subqueryPlanner = new SubqueryPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, metadata, session, analysis.getParameters()); } @Override protected RelationPlan visitTable(Table node, Void context) { Query namedQuery = analysis.getNamedQuery(node); Scope scope = analysis.getScope(node); if (namedQuery != null) { RelationPlan subPlan = process(namedQuery, null); // Add implicit coercions if view query produces types that don't match the declared output types // of the view (e.g., if the underlying tables referenced by the view changed) Type[] types = scope.getRelationType().getAllFields().stream().map(Field::getType).toArray(Type[]::new); RelationPlan withCoercions = addCoercions(subPlan, types); return new RelationPlan(withCoercions.getRoot(), scope, withCoercions.getFieldMappings()); } TableHandle handle = analysis.getTableHandle(node); ImmutableList.Builder<Symbol> outputSymbolsBuilder = ImmutableList.builder(); ImmutableMap.Builder<Symbol, ColumnHandle> columns = ImmutableMap.builder(); for (Field field : scope.getRelationType().getAllFields()) { Symbol symbol = symbolAllocator.newSymbol(field.getName().get(), field.getType()); outputSymbolsBuilder.add(symbol); columns.put(symbol, analysis.getColumn(field)); } List<Symbol> outputSymbols = outputSymbolsBuilder.build(); PlanNode root = new TableScanNode(idAllocator.getNextId(), handle, outputSymbols, columns.build(), Optional.empty(), TupleDomain.all(), null); return new RelationPlan(root, scope, outputSymbols); } @Override protected RelationPlan visitAliasedRelation(AliasedRelation node, Void context) { RelationPlan subPlan = process(node.getRelation(), context); return new RelationPlan(subPlan.getRoot(), analysis.getScope(node), subPlan.getFieldMappings()); } @Override protected RelationPlan visitSampledRelation(SampledRelation node, Void context) { RelationPlan subPlan = process(node.getRelation(), context); double ratio = analysis.getSampleRatio(node); PlanNode planNode = new SampleNode(idAllocator.getNextId(), subPlan.getRoot(), ratio, SampleNode.Type.fromType(node.getType())); return new RelationPlan(planNode, analysis.getScope(node), subPlan.getFieldMappings()); } @Override protected RelationPlan visitJoin(Join node, Void context) { // TODO: translate the RIGHT join into a mirrored LEFT join when we refactor (@martint) RelationPlan leftPlan = process(node.getLeft(), context); // Convert CROSS JOIN UNNEST to an UnnestNode if (node.getRight() instanceof Unnest || (node.getRight() instanceof AliasedRelation && ((AliasedRelation) node.getRight()).getRelation() instanceof Unnest)) { Unnest unnest; if (node.getRight() instanceof AliasedRelation) { unnest = (Unnest) ((AliasedRelation) node.getRight()).getRelation(); } else { unnest = (Unnest) node.getRight(); } if (node.getType() != Join.Type.CROSS && node.getType() != Join.Type.IMPLICIT) { throw notSupportedException(unnest, "UNNEST on other than the right side of CROSS JOIN"); } return planCrossJoinUnnest(leftPlan, node, unnest); } RelationPlan rightPlan = process(node.getRight(), context); PlanBuilder leftPlanBuilder = initializePlanBuilder(leftPlan); PlanBuilder rightPlanBuilder = initializePlanBuilder(rightPlan); // NOTE: symbols must be in the same order as the outputDescriptor List<Symbol> outputSymbols = ImmutableList.<Symbol>builder() .addAll(leftPlan.getFieldMappings()) .addAll(rightPlan.getFieldMappings()) .build(); ImmutableList.Builder<JoinNode.EquiJoinClause> equiClauses = ImmutableList.builder(); List<Expression> complexJoinExpressions = new ArrayList<>(); List<Expression> postInnerJoinConditions = new ArrayList<>(); if (node.getType() != Join.Type.CROSS && node.getType() != Join.Type.IMPLICIT) { Expression criteria = analysis.getJoinCriteria(node); RelationType left = analysis.getOutputDescriptor(node.getLeft()); RelationType right = analysis.getOutputDescriptor(node.getRight()); List<Expression> leftComparisonExpressions = new ArrayList<>(); List<Expression> rightComparisonExpressions = new ArrayList<>(); List<ComparisonExpressionType> joinConditionComparisonTypes = new ArrayList<>(); for (Expression conjunct : ExpressionUtils.extractConjuncts(criteria)) { conjunct = ExpressionUtils.normalize(conjunct); if (!isEqualComparisonExpression(conjunct) && node.getType() != INNER) { complexJoinExpressions.add(conjunct); continue; } Set<QualifiedName> dependencies = DependencyExtractor.extractNames(conjunct, analysis.getColumnReferences()); boolean isJoinUsing = node.getCriteria().filter(JoinUsing.class::isInstance).isPresent(); if (!isJoinUsing && (dependencies.stream().allMatch(left::canResolve) || dependencies.stream().allMatch(right::canResolve))) { // If the conjunct can be evaluated entirely with the inputs on either side of the join, add // it to the list complex expressions and let the optimizers figure out how to push it down later. // Due to legacy reasons, the expression for "join using" looks like "x = x", which (incorrectly) // appears to fit the condition we're after. So we skip them. complexJoinExpressions.add(conjunct); } else if (conjunct instanceof ComparisonExpression) { Expression firstExpression = ((ComparisonExpression) conjunct).getLeft(); Expression secondExpression = ((ComparisonExpression) conjunct).getRight(); ComparisonExpressionType comparisonType = ((ComparisonExpression) conjunct).getType(); Set<QualifiedName> firstDependencies = DependencyExtractor.extractNames(firstExpression, analysis.getColumnReferences()); Set<QualifiedName> secondDependencies = DependencyExtractor.extractNames(secondExpression, analysis.getColumnReferences()); if (firstDependencies.stream().allMatch(left::canResolve) && secondDependencies.stream().allMatch(right::canResolve)) { leftComparisonExpressions.add(firstExpression); rightComparisonExpressions.add(secondExpression); joinConditionComparisonTypes.add(comparisonType); } else if (firstDependencies.stream().allMatch(right::canResolve) && secondDependencies.stream().allMatch(left::canResolve)) { leftComparisonExpressions.add(secondExpression); rightComparisonExpressions.add(firstExpression); joinConditionComparisonTypes.add(comparisonType.flip()); } else { // the case when we mix symbols from both left and right join side on either side of condition. complexJoinExpressions.add(conjunct); } } else { complexJoinExpressions.add(conjunct); } } leftPlanBuilder = subqueryPlanner.handleSubqueries(leftPlanBuilder, leftComparisonExpressions, node); rightPlanBuilder = subqueryPlanner.handleSubqueries(rightPlanBuilder, rightComparisonExpressions, node); // Add projections for join criteria leftPlanBuilder = leftPlanBuilder.appendProjections(leftComparisonExpressions, symbolAllocator, idAllocator); rightPlanBuilder = rightPlanBuilder.appendProjections(rightComparisonExpressions, symbolAllocator, idAllocator); for (int i = 0; i < leftComparisonExpressions.size(); i++) { if (joinConditionComparisonTypes.get(i) == ComparisonExpressionType.EQUAL) { Symbol leftSymbol = leftPlanBuilder.translate(leftComparisonExpressions.get(i)); Symbol rightSymbol = rightPlanBuilder.translate(rightComparisonExpressions.get(i)); equiClauses.add(new JoinNode.EquiJoinClause(leftSymbol, rightSymbol)); } else { Expression leftExpression = leftPlanBuilder.rewrite(leftComparisonExpressions.get(i)); Expression rightExpression = rightPlanBuilder.rewrite(rightComparisonExpressions.get(i)); postInnerJoinConditions.add(new ComparisonExpression(joinConditionComparisonTypes.get(i), leftExpression, rightExpression)); } } } PlanNode root = new JoinNode(idAllocator.getNextId(), JoinNode.Type.typeConvert(node.getType()), leftPlanBuilder.getRoot(), rightPlanBuilder.getRoot(), equiClauses.build(), ImmutableList.<Symbol>builder() .addAll(leftPlanBuilder.getRoot().getOutputSymbols()) .addAll(rightPlanBuilder.getRoot().getOutputSymbols()) .build(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty()); if (node.getType() != INNER) { for (Expression complexExpression : complexJoinExpressions) { Set<InPredicate> inPredicates = subqueryPlanner.collectInPredicateSubqueries(complexExpression, node); if (!inPredicates.isEmpty()) { InPredicate inPredicate = Iterables.getLast(inPredicates); throw notSupportedException(inPredicate, "IN with subquery predicate in join condition"); } } // subqueries can be applied only to one side of join - left side is selected in arbitrary way leftPlanBuilder = subqueryPlanner.handleUncorrelatedSubqueries(leftPlanBuilder, complexJoinExpressions, node); } RelationPlan intermediateRootRelationPlan = new RelationPlan(root, analysis.getScope(node), outputSymbols); TranslationMap translationMap = new TranslationMap(intermediateRootRelationPlan, analysis, lambdaDeclarationToSymbolMap); translationMap.setFieldMappings(outputSymbols); translationMap.putExpressionMappingsFrom(leftPlanBuilder.getTranslations()); translationMap.putExpressionMappingsFrom(rightPlanBuilder.getTranslations()); if (node.getType() != INNER && !complexJoinExpressions.isEmpty()) { Expression joinedFilterCondition = ExpressionUtils.and(complexJoinExpressions); joinedFilterCondition = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters(), analysis), joinedFilterCondition); Expression rewrittenFilterCondition = translationMap.rewrite(joinedFilterCondition); root = new JoinNode(idAllocator.getNextId(), JoinNode.Type.typeConvert(node.getType()), leftPlanBuilder.getRoot(), rightPlanBuilder.getRoot(), equiClauses.build(), ImmutableList.<Symbol>builder() .addAll(leftPlanBuilder.getRoot().getOutputSymbols()) .addAll(rightPlanBuilder.getRoot().getOutputSymbols()) .build(), Optional.of(rewrittenFilterCondition), Optional.empty(), Optional.empty(), Optional.empty()); } if (node.getType() == INNER) { // rewrite all the other conditions using output symbols from left + right plan node. PlanBuilder rootPlanBuilder = new PlanBuilder(translationMap, root, analysis.getParameters()); rootPlanBuilder = subqueryPlanner.handleSubqueries(rootPlanBuilder, complexJoinExpressions, node); for (Expression expression : complexJoinExpressions) { expression = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters(), analysis), expression); postInnerJoinConditions.add(rootPlanBuilder.rewrite(expression)); } root = rootPlanBuilder.getRoot(); Expression postInnerJoinCriteria; if (!postInnerJoinConditions.isEmpty()) { postInnerJoinCriteria = ExpressionUtils.and(postInnerJoinConditions); root = new FilterNode(idAllocator.getNextId(), root, postInnerJoinCriteria); } } return new RelationPlan(root, analysis.getScope(node), outputSymbols); } private static boolean isEqualComparisonExpression(Expression conjunct) { return conjunct instanceof ComparisonExpression && ((ComparisonExpression) conjunct).getType() == ComparisonExpressionType.EQUAL; } private RelationPlan planCrossJoinUnnest(RelationPlan leftPlan, Join joinNode, Unnest node) { RelationType unnestOutputDescriptor = analysis.getOutputDescriptor(node); // Create symbols for the result of unnesting ImmutableList.Builder<Symbol> unnestedSymbolsBuilder = ImmutableList.builder(); for (Field field : unnestOutputDescriptor.getVisibleFields()) { Symbol symbol = symbolAllocator.newSymbol(field); unnestedSymbolsBuilder.add(symbol); } ImmutableList<Symbol> unnestedSymbols = unnestedSymbolsBuilder.build(); // Add a projection for all the unnest arguments PlanBuilder planBuilder = initializePlanBuilder(leftPlan); planBuilder = planBuilder.appendProjections(node.getExpressions(), symbolAllocator, idAllocator); TranslationMap translations = planBuilder.getTranslations(); ProjectNode projectNode = (ProjectNode) planBuilder.getRoot(); ImmutableMap.Builder<Symbol, List<Symbol>> unnestSymbols = ImmutableMap.builder(); UnmodifiableIterator<Symbol> unnestedSymbolsIterator = unnestedSymbols.iterator(); for (Expression expression : node.getExpressions()) { Type type = analysis.getType(expression); Symbol inputSymbol = translations.get(expression); if (type instanceof ArrayType) { unnestSymbols.put(inputSymbol, ImmutableList.of(unnestedSymbolsIterator.next())); } else if (type instanceof MapType) { unnestSymbols.put(inputSymbol, ImmutableList.of(unnestedSymbolsIterator.next(), unnestedSymbolsIterator.next())); } else { throw new IllegalArgumentException("Unsupported type for UNNEST: " + type); } } Optional<Symbol> ordinalitySymbol = node.isWithOrdinality() ? Optional.of(unnestedSymbolsIterator.next()) : Optional.empty(); checkState(!unnestedSymbolsIterator.hasNext(), "Not all output symbols were matched with input symbols"); UnnestNode unnestNode = new UnnestNode(idAllocator.getNextId(), projectNode, leftPlan.getFieldMappings(), unnestSymbols.build(), ordinalitySymbol); return new RelationPlan(unnestNode, analysis.getScope(joinNode), unnestNode.getOutputSymbols()); } private static Expression oneIfNull(Optional<Symbol> symbol) { if (symbol.isPresent()) { return new CoalesceExpression(symbol.get().toSymbolReference(), new LongLiteral("1")); } else { return new LongLiteral("1"); } } @Override protected RelationPlan visitTableSubquery(TableSubquery node, Void context) { return process(node.getQuery(), context); } @Override protected RelationPlan visitQuery(Query node, Void context) { return new QueryPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, metadata, session) .plan(node); } @Override protected RelationPlan visitQuerySpecification(QuerySpecification node, Void context) { return new QueryPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, metadata, session) .plan(node); } @Override protected RelationPlan visitValues(Values node, Void context) { Scope scope = analysis.getScope(node); ImmutableList.Builder<Symbol> outputSymbolsBuilder = ImmutableList.builder(); for (Field field : scope.getRelationType().getVisibleFields()) { Symbol symbol = symbolAllocator.newSymbol(field); outputSymbolsBuilder.add(symbol); } ImmutableList.Builder<List<Expression>> rows = ImmutableList.builder(); for (Expression row : node.getRows()) { ImmutableList.Builder<Expression> values = ImmutableList.builder(); if (row instanceof Row) { List<Expression> items = ((Row) row).getItems(); for (int i = 0; i < items.size(); i++) { Expression expression = items.get(i); expression = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters(), analysis), expression); Object constantValue = evaluateConstantExpression(expression, analysis.getCoercions(), metadata, session, analysis.getColumnReferences(), analysis.getParameters()); values.add(LiteralInterpreter.toExpression(constantValue, scope.getRelationType().getFieldByIndex(i).getType())); } } else { row = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters(), analysis), row); Object constantValue = evaluateConstantExpression(row, analysis.getCoercions(), metadata, session, analysis.getColumnReferences(), analysis.getParameters()); values.add(LiteralInterpreter.toExpression(constantValue, scope.getRelationType().getFieldByIndex(0).getType())); } rows.add(values.build()); } ValuesNode valuesNode = new ValuesNode(idAllocator.getNextId(), outputSymbolsBuilder.build(), rows.build()); return new RelationPlan(valuesNode, scope, outputSymbolsBuilder.build()); } @Override protected RelationPlan visitUnnest(Unnest node, Void context) { Scope scope = analysis.getScope(node); ImmutableList.Builder<Symbol> outputSymbolsBuilder = ImmutableList.builder(); for (Field field : scope.getRelationType().getVisibleFields()) { Symbol symbol = symbolAllocator.newSymbol(field); outputSymbolsBuilder.add(symbol); } List<Symbol> unnestedSymbols = outputSymbolsBuilder.build(); // If we got here, then we must be unnesting a constant, and not be in a join (where there could be column references) ImmutableList.Builder<Symbol> argumentSymbols = ImmutableList.builder(); ImmutableList.Builder<Expression> values = ImmutableList.builder(); ImmutableMap.Builder<Symbol, List<Symbol>> unnestSymbols = ImmutableMap.builder(); Iterator<Symbol> unnestedSymbolsIterator = unnestedSymbols.iterator(); for (Expression expression : node.getExpressions()) { expression = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters(), analysis), expression); Object constantValue = evaluateConstantExpression(expression, analysis.getCoercions(), metadata, session, analysis.getColumnReferences(), analysis.getParameters()); Type type = analysis.getType(expression); values.add(LiteralInterpreter.toExpression(constantValue, type)); Symbol inputSymbol = symbolAllocator.newSymbol(expression, type); argumentSymbols.add(inputSymbol); if (type instanceof ArrayType) { unnestSymbols.put(inputSymbol, ImmutableList.of(unnestedSymbolsIterator.next())); } else if (type instanceof MapType) { unnestSymbols.put(inputSymbol, ImmutableList.of(unnestedSymbolsIterator.next(), unnestedSymbolsIterator.next())); } else { throw new IllegalArgumentException("Unsupported type for UNNEST: " + type); } } Optional<Symbol> ordinalitySymbol = node.isWithOrdinality() ? Optional.of(unnestedSymbolsIterator.next()) : Optional.empty(); checkState(!unnestedSymbolsIterator.hasNext(), "Not all output symbols were matched with input symbols"); ValuesNode valuesNode = new ValuesNode(idAllocator.getNextId(), argumentSymbols.build(), ImmutableList.of(values.build())); UnnestNode unnestNode = new UnnestNode(idAllocator.getNextId(), valuesNode, ImmutableList.of(), unnestSymbols.build(), ordinalitySymbol); return new RelationPlan(unnestNode, scope, unnestedSymbols); } private RelationPlan processAndCoerceIfNecessary(Relation node, Void context) { Type[] coerceToTypes = analysis.getRelationCoercion(node); RelationPlan plan = this.process(node, context); if (coerceToTypes == null) { return plan; } return addCoercions(plan, coerceToTypes); } private RelationPlan addCoercions(RelationPlan plan, Type[] targetColumnTypes) { List<Symbol> oldSymbols = plan.getFieldMappings(); RelationType oldDescriptor = plan.getDescriptor().withOnlyVisibleFields(); verify(targetColumnTypes.length == oldSymbols.size()); ImmutableList.Builder<Symbol> newSymbols = new ImmutableList.Builder<>(); Field[] newFields = new Field[targetColumnTypes.length]; Assignments.Builder assignments = Assignments.builder(); for (int i = 0; i < targetColumnTypes.length; i++) { Symbol inputSymbol = oldSymbols.get(i); Type inputType = symbolAllocator.getTypes().get(inputSymbol); Type outputType = targetColumnTypes[i]; if (outputType != inputType) { Expression cast = new Cast(inputSymbol.toSymbolReference(), outputType.getTypeSignature().toString()); Symbol outputSymbol = symbolAllocator.newSymbol(cast, outputType); assignments.put(outputSymbol, cast); newSymbols.add(outputSymbol); } else { SymbolReference symbolReference = inputSymbol.toSymbolReference(); Symbol outputSymbol = symbolAllocator.newSymbol(symbolReference, outputType); assignments.put(outputSymbol, symbolReference); newSymbols.add(outputSymbol); } Field oldField = oldDescriptor.getFieldByIndex(i); newFields[i] = new Field( oldField.getRelationAlias(), oldField.getName(), targetColumnTypes[i], oldField.isHidden(), oldField.getOriginTable(), oldField.isAliased()); } ProjectNode projectNode = new ProjectNode(idAllocator.getNextId(), plan.getRoot(), assignments.build()); return new RelationPlan(projectNode, Scope.builder().withRelationType(RelationId.anonymous(), new RelationType(newFields)).build(), newSymbols.build()); } @Override protected RelationPlan visitUnion(Union node, Void context) { checkArgument(!node.getRelations().isEmpty(), "No relations specified for UNION"); SetOperationPlan setOperationPlan = process(node); PlanNode planNode = new UnionNode(idAllocator.getNextId(), setOperationPlan.getSources(), setOperationPlan.getSymbolMapping(), ImmutableList.copyOf(setOperationPlan.getSymbolMapping().keySet())); if (node.isDistinct()) { planNode = distinct(planNode); } return new RelationPlan(planNode, analysis.getScope(node), planNode.getOutputSymbols()); } @Override protected RelationPlan visitIntersect(Intersect node, Void context) { checkArgument(!node.getRelations().isEmpty(), "No relations specified for INTERSECT"); SetOperationPlan setOperationPlan = process(node); PlanNode planNode = new IntersectNode(idAllocator.getNextId(), setOperationPlan.getSources(), setOperationPlan.getSymbolMapping(), ImmutableList.copyOf(setOperationPlan.getSymbolMapping().keySet())); return new RelationPlan(planNode, analysis.getScope(node), planNode.getOutputSymbols()); } @Override protected RelationPlan visitExcept(Except node, Void context) { checkArgument(!node.getRelations().isEmpty(), "No relations specified for EXCEPT"); SetOperationPlan setOperationPlan = process(node); PlanNode planNode = new ExceptNode(idAllocator.getNextId(), setOperationPlan.getSources(), setOperationPlan.getSymbolMapping(), ImmutableList.copyOf(setOperationPlan.getSymbolMapping().keySet())); return new RelationPlan(planNode, analysis.getScope(node), planNode.getOutputSymbols()); } private SetOperationPlan process(SetOperation node) { List<Symbol> outputs = null; ImmutableList.Builder<PlanNode> sources = ImmutableList.builder(); ImmutableListMultimap.Builder<Symbol, Symbol> symbolMapping = ImmutableListMultimap.builder(); List<RelationPlan> subPlans = node.getRelations().stream() .map(relation -> processAndCoerceIfNecessary(relation, null)) .collect(toImmutableList()); for (RelationPlan relationPlan : subPlans) { List<Symbol> childOutputSymbols = relationPlan.getFieldMappings(); if (outputs == null) { // Use the first Relation to derive output symbol names RelationType descriptor = relationPlan.getDescriptor(); ImmutableList.Builder<Symbol> outputSymbolBuilder = ImmutableList.builder(); for (Field field : descriptor.getVisibleFields()) { int fieldIndex = descriptor.indexOf(field); Symbol symbol = childOutputSymbols.get(fieldIndex); outputSymbolBuilder.add(symbolAllocator.newSymbol(symbol.getName(), symbolAllocator.getTypes().get(symbol))); } outputs = outputSymbolBuilder.build(); } RelationType descriptor = relationPlan.getDescriptor(); checkArgument(descriptor.getVisibleFieldCount() == outputs.size(), "Expected relation to have %s symbols but has %s symbols", descriptor.getVisibleFieldCount(), outputs.size()); int fieldId = 0; for (Field field : descriptor.getVisibleFields()) { int fieldIndex = descriptor.indexOf(field); symbolMapping.put(outputs.get(fieldId), childOutputSymbols.get(fieldIndex)); fieldId++; } sources.add(relationPlan.getRoot()); } return new SetOperationPlan(sources.build(), symbolMapping.build()); } private PlanBuilder initializePlanBuilder(RelationPlan relationPlan) { TranslationMap translations = new TranslationMap(relationPlan, analysis, lambdaDeclarationToSymbolMap); // Make field->symbol mapping from underlying relation plan available for translations // This makes it possible to rewrite FieldOrExpressions that reference fields from the underlying tuple directly translations.setFieldMappings(relationPlan.getFieldMappings()); return new PlanBuilder(translations, relationPlan.getRoot(), analysis.getParameters()); } private PlanNode distinct(PlanNode node) { return new AggregationNode(idAllocator.getNextId(), node, ImmutableMap.of(), ImmutableList.of(node.getOutputSymbols()), AggregationNode.Step.SINGLE, Optional.empty(), Optional.empty()); } private static class SetOperationPlan { private final List<PlanNode> sources; private final ListMultimap<Symbol, Symbol> symbolMapping; private SetOperationPlan(List<PlanNode> sources, ListMultimap<Symbol, Symbol> symbolMapping) { this.sources = sources; this.symbolMapping = symbolMapping; } public List<PlanNode> getSources() { return sources; } public ListMultimap<Symbol, Symbol> getSymbolMapping() { return symbolMapping; } } }
package com.kii.demo.wearable; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.annotation.TargetApi; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.CheckBox; import android.widget.EditText; import com.kii.cloud.storage.Kii; import com.kii.cloud.storage.KiiUser; /** * A login screen that offers login via 4 digit pin. */ public class AuthActivity extends Activity { private static final String TAG = AuthActivity.class.getName(); /** * Keep track of the login task to ensure we can cancel it if requested. */ private UserSignInTask mSignInTask = null; private UserRegistrationTask mRegisterTask = null; private UserTokenSignInTask mTokenSignInTask = null; // UI references. private EditText mPasswordView; private EditText mPasswordVerifyView; private CheckBox mRememberCheckbox; private boolean rememberMe = false; private View mProgressView; private View mAuthFormView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_auth); Kii.initialize(AppConfig.KII_APP_ID, AppConfig.KII_APP_KEY, AppConfig.KII_SITE); // Set up the auth form. mPasswordView = (EditText) findViewById(R.id.passw); mPasswordVerifyView = (EditText) findViewById(R.id.passwVerify); mRememberCheckbox = (CheckBox) findViewById(R.id.rememberBox); Button mSignInButton = (Button) findViewById(R.id.sign_in_button); Button mRegisterButton = (Button) findViewById(R.id.register_button); mSignInButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { attemptAuth(false); } }); mRegisterButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { attemptAuth(true); } }); mRememberCheckbox.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { rememberMe = ((CheckBox) v).isChecked(); } }); mAuthFormView = findViewById(R.id.auth_form); mProgressView = findViewById(R.id.progressBar); // Now that the UI is set up we try to login via previous user access token tryLoginWithToken(); } private void tryLoginWithToken() { String token = Settings.loadAccessToken(this); if(token != null){ Log.d(TAG, "Token: " + token); showProgress(true); mTokenSignInTask = new UserTokenSignInTask(this, token); mTokenSignInTask.execute((Void) null); } } /** * Attempts to sign in or register the account specified by the login form. * If there are form errors (invalid email, missing fields, etc.), the * errors are presented and no actual login attempt is made. */ public void attemptAuth(boolean isRegistration) { if(!isRegistration) { if (mSignInTask != null) { return; } } else { if (mRegisterTask != null) { return; } } // Reset errors. mPasswordView.setError(null); mPasswordVerifyView.setError(null); // Store values at the time of the auth attempt. String password = mPasswordView.getText().toString(); String passwordVerify = mPasswordVerifyView.getText().toString(); boolean cancel = false; View focusView = null; // Check for a valid password, if the user entered one. if (TextUtils.isEmpty(password) || password.length() < 4) { mPasswordView.setError(getString(R.string.error_invalid_password)); focusView = mPasswordView; cancel = true; } else if (password.compareTo(passwordVerify) != 0) { mPasswordView.setError(getString(R.string.error_mismatch_password)); focusView = mPasswordView; cancel = true; } if (cancel) { // There was an error; don't attempt login and focus the first // form field with an error. focusView.requestFocus(); } else { // Show a progress spinner, and kick off a background task to // perform the user login attempt. showProgress(true); if(!isRegistration) { mSignInTask = new UserSignInTask(this, password, rememberMe); mSignInTask.execute((Void) null); } else { mRegisterTask = new UserRegistrationTask(this, password, rememberMe); mRegisterTask.execute((Void) null); } } } /** * Shows the progress UI and hides the login form. */ @TargetApi(Build.VERSION_CODES.HONEYCOMB_MR2) public void showProgress(final boolean show) { // On Honeycomb MR2 we have the ViewPropertyAnimator APIs, which allow // for very easy animations. If available, use these APIs to fade-in // the progress spinner. if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR2) { int shortAnimTime = getResources().getInteger(android.R.integer.config_shortAnimTime); mAuthFormView.setVisibility(show ? View.GONE : View.VISIBLE); mAuthFormView.animate().setDuration(shortAnimTime).alpha( show ? 0 : 1).setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { mAuthFormView.setVisibility(show ? View.GONE : View.VISIBLE); } }); mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); mProgressView.animate().setDuration(shortAnimTime).alpha( show ? 1 : 0).setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); } }); } else { // The ViewPropertyAnimator APIs are not available, so simply show // and hide the relevant UI components. mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); mAuthFormView.setVisibility(show ? View.GONE : View.VISIBLE); } } /** * Represents an asynchronous login task used to authenticate * the user. */ public class UserSignInTask extends AsyncTask<Void, Void, Boolean> { private final Context mContext; private final String mPassword; private final boolean mRememberMe; UserSignInTask(Context context, String password, boolean rememberMe) { mContext = context; mPassword = password; mRememberMe = rememberMe; } @Override protected Boolean doInBackground(Void... params) { // attempt sign in against Kii Cloud try { String id = Settings.id(mContext); Log.d(TAG, "Attempting sign in with id: " + id); KiiUser.logIn(id, mPassword); if(mRememberMe) { Log.d(TAG, "Storing access token..."); String accessToken = KiiUser.getCurrentUser().getAccessToken(); // Now we store the token in a local file Settings.saveAccessToken(mContext, accessToken); } } catch (Exception e) { return false; } Log.d(TAG, "Sign in successful"); return true; } @Override protected void onPostExecute(final Boolean success) { mSignInTask = null; showProgress(false); if (success) { Intent intent = new Intent(mContext, MainActivity.class); startActivity(intent); finish(); } else { mPasswordView.setError(getString(R.string.error_incorrect_password)); mPasswordView.requestFocus(); } } @Override protected void onCancelled() { mSignInTask = null; showProgress(false); } } /** * Represents an asynchronous login task used to authenticate * the user via an access token. */ public class UserTokenSignInTask extends AsyncTask<Void, Void, Boolean> { private final Context mContext; private final String mToken; UserTokenSignInTask(Context context, String token) { mContext = context; mToken = token; } @Override protected Boolean doInBackground(Void... params) { // attempt sign in against Kii Cloud using an access token try { Log.d(TAG, "Attempting sign in with access token"); KiiUser.loginWithToken(mToken); } catch (Exception e) { Log.e(TAG, e.toString()); return false; } Log.d(TAG, "Sign in successful. User id: " + KiiUser.getCurrentUser().getUsername()); return true; } @Override protected void onPostExecute(final Boolean success) { mTokenSignInTask = null; showProgress(false); if (success) { Intent intent = new Intent(mContext, MainActivity.class); startActivity(intent); finish(); } else { Log.e(TAG, "Error signing in with token"); } } @Override protected void onCancelled() { mTokenSignInTask = null; showProgress(false); } } /** * Represents an asynchronous registration task used to authenticate * the user. */ public class UserRegistrationTask extends AsyncTask<Void, Void, Boolean> { private final Context mContext; private final String mPassword; private final boolean mRememberMe; UserRegistrationTask(Context context, String password, boolean rememberMe) { mContext = context; mPassword = password; mRememberMe = rememberMe; } @Override protected Boolean doInBackground(Void... params) { // attempt registration against Kii Cloud try { String id = Settings.id(mContext); Log.d(TAG, "Attempting registration with id: " + id); KiiUser.Builder builder = KiiUser.builderWithName(id); KiiUser user = builder.build(); user.register(mPassword); if(mRememberMe) { Log.d(TAG, "Storing access token..."); String accessToken = KiiUser.getCurrentUser().getAccessToken(); // Now we store the token in a local file Settings.saveAccessToken(mContext, accessToken); } } catch (Exception e) { return false; } Log.d(TAG, "Registration successful"); return true; } @Override protected void onPostExecute(final Boolean success) { mRegisterTask = null; showProgress(false); if (success) { Intent intent = new Intent(mContext, MainActivity.class); startActivity(intent); finish(); } else { mPasswordView.setError(getString(R.string.error_incorrect_password)); mPasswordView.requestFocus(); } } @Override protected void onCancelled() { mRegisterTask = null; showProgress(false); } } }
/** */ package org.vilang.vilang.impl; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EDataType; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.impl.EFactoryImpl; import org.eclipse.emf.ecore.plugin.EcorePlugin; import org.vilang.vilang.*; /** * <!-- begin-user-doc --> * An implementation of the model <b>Factory</b>. * <!-- end-user-doc --> * @generated */ public class VilangFactoryImpl extends EFactoryImpl implements VilangFactory { /** * Creates the default factory implementation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static VilangFactory init() { try { VilangFactory theVilangFactory = (VilangFactory)EPackage.Registry.INSTANCE.getEFactory(VilangPackage.eNS_URI); if (theVilangFactory != null) { return theVilangFactory; } } catch (Exception exception) { EcorePlugin.INSTANCE.log(exception); } return new VilangFactoryImpl(); } /** * Creates an instance of the factory. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public VilangFactoryImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public EObject create(EClass eClass) { switch (eClass.getClassifierID()) { case VilangPackage.VILANG: return createVilang(); case VilangPackage.MODEL: return createModel(); case VilangPackage.MODEL_TYPE: return createModelType(); case VilangPackage.MODEL_TYPE_BAYESIAN: return createModelTypeBayesian(); case VilangPackage.MODEL_TYPE_EPL: return createModelTypeEpl(); case VilangPackage.MODEL_TYPE_FEEDFORWARD: return createModelTypeFeedforward(); case VilangPackage.MODEL_TYPE_NEAT: return createModelTypeNeat(); case VilangPackage.MODEL_TYPE_PNN: return createModelTypePnn(); case VilangPackage.MODEL_TYPE_RBFNETWORK: return createModelTypeRbfnetwork(); case VilangPackage.MODEL_TYPE_SOM: return createModelTypeSom(); case VilangPackage.MODEL_TYPE_SVM: return createModelTypeSvm(); case VilangPackage.GENERATE_NETWORK: return createGenerateNetwork(); case VilangPackage.FILE_INFO: return createFileInfo(); case VilangPackage.DATA_ELEMENTS: return createDataElements(); case VilangPackage.DATA_ELEMENT: return createDataElement(); default: throw new IllegalArgumentException("The class '" + eClass.getName() + "' is not a valid classifier"); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object createFromString(EDataType eDataType, String initialValue) { switch (eDataType.getClassifierID()) { case VilangPackage.FEEDBACK: return createFeedbackFromString(eDataType, initialValue); case VilangPackage.FILE_CHAR: return createFileCharFromString(eDataType, initialValue); case VilangPackage.NETWORK_REPRESENTATION_FORMAT: return createNetworkRepresentationFormatFromString(eDataType, initialValue); case VilangPackage.NETWORK_OUTPUT_FORMAT: return createNetworkOutputFormatFromString(eDataType, initialValue); case VilangPackage.DATA_ELEMENT_TYPE: return createDataElementTypeFromString(eDataType, initialValue); default: throw new IllegalArgumentException("The datatype '" + eDataType.getName() + "' is not a valid classifier"); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String convertToString(EDataType eDataType, Object instanceValue) { switch (eDataType.getClassifierID()) { case VilangPackage.FEEDBACK: return convertFeedbackToString(eDataType, instanceValue); case VilangPackage.FILE_CHAR: return convertFileCharToString(eDataType, instanceValue); case VilangPackage.NETWORK_REPRESENTATION_FORMAT: return convertNetworkRepresentationFormatToString(eDataType, instanceValue); case VilangPackage.NETWORK_OUTPUT_FORMAT: return convertNetworkOutputFormatToString(eDataType, instanceValue); case VilangPackage.DATA_ELEMENT_TYPE: return convertDataElementTypeToString(eDataType, instanceValue); default: throw new IllegalArgumentException("The datatype '" + eDataType.getName() + "' is not a valid classifier"); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Vilang createVilang() { VilangImpl vilang = new VilangImpl(); return vilang; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Model createModel() { ModelImpl model = new ModelImpl(); return model; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ModelType createModelType() { ModelTypeImpl modelType = new ModelTypeImpl(); return modelType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ModelTypeBayesian createModelTypeBayesian() { ModelTypeBayesianImpl modelTypeBayesian = new ModelTypeBayesianImpl(); return modelTypeBayesian; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ModelTypeEpl createModelTypeEpl() { ModelTypeEplImpl modelTypeEpl = new ModelTypeEplImpl(); return modelTypeEpl; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ModelTypeFeedforward createModelTypeFeedforward() { ModelTypeFeedforwardImpl modelTypeFeedforward = new ModelTypeFeedforwardImpl(); return modelTypeFeedforward; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ModelTypeNeat createModelTypeNeat() { ModelTypeNeatImpl modelTypeNeat = new ModelTypeNeatImpl(); return modelTypeNeat; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ModelTypePnn createModelTypePnn() { ModelTypePnnImpl modelTypePnn = new ModelTypePnnImpl(); return modelTypePnn; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ModelTypeRbfnetwork createModelTypeRbfnetwork() { ModelTypeRbfnetworkImpl modelTypeRbfnetwork = new ModelTypeRbfnetworkImpl(); return modelTypeRbfnetwork; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ModelTypeSom createModelTypeSom() { ModelTypeSomImpl modelTypeSom = new ModelTypeSomImpl(); return modelTypeSom; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ModelTypeSvm createModelTypeSvm() { ModelTypeSvmImpl modelTypeSvm = new ModelTypeSvmImpl(); return modelTypeSvm; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public GenerateNetwork createGenerateNetwork() { GenerateNetworkImpl generateNetwork = new GenerateNetworkImpl(); return generateNetwork; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public FileInfo createFileInfo() { FileInfoImpl fileInfo = new FileInfoImpl(); return fileInfo; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DataElements createDataElements() { DataElementsImpl dataElements = new DataElementsImpl(); return dataElements; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DataElement createDataElement() { DataElementImpl dataElement = new DataElementImpl(); return dataElement; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Feedback createFeedbackFromString(EDataType eDataType, String initialValue) { Feedback result = Feedback.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String convertFeedbackToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public FileChar createFileCharFromString(EDataType eDataType, String initialValue) { FileChar result = FileChar.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String convertFileCharToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NetworkRepresentationFormat createNetworkRepresentationFormatFromString(EDataType eDataType, String initialValue) { NetworkRepresentationFormat result = NetworkRepresentationFormat.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String convertNetworkRepresentationFormatToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NetworkOutputFormat createNetworkOutputFormatFromString(EDataType eDataType, String initialValue) { NetworkOutputFormat result = NetworkOutputFormat.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String convertNetworkOutputFormatToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DataElementType createDataElementTypeFromString(EDataType eDataType, String initialValue) { DataElementType result = DataElementType.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String convertDataElementTypeToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public VilangPackage getVilangPackage() { return (VilangPackage)getEPackage(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @deprecated * @generated */ @Deprecated public static VilangPackage getPackage() { return VilangPackage.eINSTANCE; } } //VilangFactoryImpl
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.io.internal; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.Iterator; import java.util.NoSuchElementException; import javax.inject.Singleton; import org.jclouds.io.ContentMetadata; import org.jclouds.io.Payload; import org.jclouds.io.Payloads; import org.jclouds.io.PayloadSlicer; import org.jclouds.io.payloads.BaseMutableContentMetadata; import org.jclouds.io.payloads.ByteSourcePayload; import com.google.common.base.Charsets; import com.google.common.base.Throwables; import com.google.common.hash.HashCode; import com.google.common.io.ByteSource; import com.google.common.io.ByteStreams; import com.google.common.io.Files; @Singleton public class BasePayloadSlicer implements PayloadSlicer { private static class InputStreamPayloadIterator implements Iterable<Payload>, Iterator<Payload> { private final InputStream input; private final ContentMetadata metaData; private Payload nextPayload; private final int readLen; InputStreamPayloadIterator(InputStream input, ContentMetadata metaData) { this.input = checkNotNull(input, "input"); this.metaData = checkNotNull(metaData, "metaData"); this.readLen = checkNotNull(this.metaData.getContentLength(), "content-length").intValue(); this.nextPayload = getNextPayload(); } @Override public boolean hasNext() { return nextPayload != null; } @Override public Payload next() { Payload payload; if (!hasNext()) throw new NoSuchElementException(); payload = nextPayload; nextPayload = getNextPayload(); return payload; } @Override public void remove() { throw new UnsupportedOperationException("Payload iterator does not support removal"); } @Override public Iterator<Payload> iterator() { return this; } private Payload getNextPayload() { byte[] content = new byte[readLen]; int offset = 0; try { while (true) { int read = input.read(content, offset, readLen - offset); if (read <= 0) { if (offset == 0) { return null; } else { break; } } offset += read; } } catch (IOException e) { throw Throwables.propagate(e); } return createPayload((content.length == offset) ? content : Arrays.copyOf(content, offset)); } private Payload createPayload(byte[] content) { Payload payload = null; if (content.length > 0) { payload = Payloads.newByteArrayPayload(content); ContentMetadata cm = metaData.toBuilder().contentLength((long)content.length).contentMD5((HashCode) null).build(); payload.setContentMetadata(BaseMutableContentMetadata.fromContentMetadata(cm)); } return payload; } } private static class ByteSourcePayloadIterator implements Iterable<Payload>, Iterator<Payload> { private final ByteSource input; private final ContentMetadata metaData; private Payload nextPayload; private long offset = 0; private final long readLen; ByteSourcePayloadIterator(ByteSource input, ContentMetadata metaData) { this.input = checkNotNull(input, "input"); this.metaData = checkNotNull(metaData, "metaData"); this.readLen = checkNotNull(this.metaData.getContentLength(), "content-length").longValue(); this.nextPayload = getNextPayload(); } @Override public boolean hasNext() { return nextPayload != null; } @Override public Payload next() { if (!hasNext()) { throw new NoSuchElementException(); } Payload payload = nextPayload; nextPayload = getNextPayload(); return payload; } @Override public void remove() { throw new UnsupportedOperationException("Payload iterator does not support removal"); } @Override public Iterator<Payload> iterator() { return this; } private Payload getNextPayload() { ByteSource byteSource; long byteSourceSize; try { if (offset >= input.size()) { return null; } byteSource = input.slice(offset, readLen); byteSourceSize = byteSource.size(); } catch (IOException e) { throw Throwables.propagate(e); } Payload nextPayload = new ByteSourcePayload(byteSource); ContentMetadata cm = metaData.toBuilder() .contentLength(byteSourceSize) .contentMD5((HashCode) null) .build(); nextPayload.setContentMetadata(BaseMutableContentMetadata.fromContentMetadata(cm)); offset += byteSourceSize; return nextPayload; } } /** * {@inheritDoc} */ @Override public Payload slice(Payload input, long offset, long length) { checkNotNull(input); checkArgument(offset >= 0, "offset is negative"); checkArgument(length >= 0, "length is negative"); Payload returnVal; if (input.getRawContent() instanceof File) { returnVal = doSlice((File) input.getRawContent(), offset, length); } else if (input.getRawContent() instanceof String) { returnVal = doSlice((String) input.getRawContent(), offset, length); } else if (input.getRawContent() instanceof byte[]) { returnVal = doSlice((byte[]) input.getRawContent(), offset, length); } else if (input.getRawContent() instanceof InputStream) { returnVal = doSlice((InputStream) input.getRawContent(), offset, length); } else if (input.getRawContent() instanceof ByteSource) { returnVal = doSlice((ByteSource) input.getRawContent(), offset, length); } else { returnVal = doSlice(input, offset, length); } return copyMetadataAndSetLength(input, returnVal, length); } protected Payload doSlice(Payload content, long offset, long length) { return doSlice(content.getInput(), offset, length); } protected Payload doSlice(String content, long offset, long length) { return doSlice(content.getBytes(), offset, length); } protected Payload doSlice(File content, long offset, long length) { return doSlice(Files.asByteSource(content), offset, length); } protected Payload doSlice(InputStream content, long offset, long length) { try { ByteStreams.skipFully(content, offset); } catch (IOException ioe) { throw Throwables.propagate(ioe); } return Payloads.newInputStreamPayload(ByteStreams.limit(content, length)); } protected Payload doSlice(ByteSource content, long offset, long length) { return Payloads.newByteSourcePayload(content.slice(offset, length)); } protected Payload doSlice(byte[] content, long offset, long length) { checkArgument(offset <= Integer.MAX_VALUE, "offset is too big for an array"); checkArgument(length <= Integer.MAX_VALUE, "length is too big for an array"); // TODO(adriancole): Make ByteArrayPayload carry offset, length as opposed to wrapping here. return Payloads.newByteSourcePayload(ByteSource.wrap(content).slice(offset, length)); } protected Payload copyMetadataAndSetLength(Payload input, Payload returnVal, long length) { returnVal.setContentMetadata(BaseMutableContentMetadata.fromContentMetadata(input.getContentMetadata() .toBuilder().contentLength(length).contentMD5((HashCode) null).build())); return returnVal; } @Override public Iterable<Payload> slice(Payload input, long size) { checkNotNull(input, "input"); checkArgument(size >= 0, "size must be non-negative but was: %s", size); ContentMetadata meta = BaseMutableContentMetadata.fromContentMetadata(input.getContentMetadata()) .toBuilder() .contentLength(size) .contentMD5((HashCode) null) .build(); Object rawContent = input.getRawContent(); if (rawContent instanceof File) { return doSlice((File) rawContent, meta); } else if (rawContent instanceof String) { return doSlice((String) rawContent, meta); } else if (rawContent instanceof byte[]) { return doSlice((byte[]) rawContent, meta); } else if (rawContent instanceof InputStream) { return doSlice((InputStream) rawContent, meta); } else if (rawContent instanceof ByteSource) { return doSlice((ByteSource) rawContent, meta); } else { return doSlice(input, meta); } } protected Iterable<Payload> doSlice(Payload input, ContentMetadata meta) { return doSlice(input.getInput(), meta); } protected Iterable<Payload> doSlice(String rawContent, ContentMetadata meta) { return doSlice(ByteSource.wrap(rawContent.getBytes(Charsets.UTF_8)), meta); } protected Iterable<Payload> doSlice(byte[] rawContent, ContentMetadata meta) { return doSlice(ByteSource.wrap(rawContent), meta); } protected Iterable<Payload> doSlice(File rawContent, ContentMetadata meta) { return doSlice(Files.asByteSource(rawContent), meta); } protected Iterable<Payload> doSlice(InputStream rawContent, ContentMetadata meta) { return new InputStreamPayloadIterator(rawContent, meta); } protected Iterable<Payload> doSlice(ByteSource rawContent, ContentMetadata meta) { return new ByteSourcePayloadIterator(rawContent, meta); } }
/* * Copyright 2020 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.server.auth.oauth2; import static java.util.Objects.requireNonNull; import java.util.Map; import java.util.function.Supplier; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Caffeine; import com.github.benmanes.caffeine.cache.CaffeineSpec; import com.google.common.collect.ImmutableSet; import com.linecorp.armeria.client.WebClient; import com.linecorp.armeria.common.annotation.Nullable; import com.linecorp.armeria.common.annotation.UnstableApi; import com.linecorp.armeria.common.auth.oauth2.ClientAuthorization; import com.linecorp.armeria.common.auth.oauth2.OAuth2TokenDescriptor; import com.linecorp.armeria.internal.server.auth.oauth2.TokenIntrospectionRequest; import com.linecorp.armeria.server.auth.Authorizer; /** * Builds a {@link OAuth2TokenIntrospectionAuthorizer} instance. */ @UnstableApi public final class OAuth2TokenIntrospectionAuthorizerBuilder { public static final String DEFAULT_CACHE_SPEC = "maximumSize=1024,expireAfterWrite=1h"; private static final CaffeineSpec DEFAULT_CACHE_SPEC_OBJ = CaffeineSpec.parse(DEFAULT_CACHE_SPEC); private final WebClient introspectionEndpoint; private final String introspectionEndpointPath; @Nullable private ClientAuthorization clientAuthorization; @Nullable private String accessTokenType; @Nullable private String realm; private final ImmutableSet.Builder<String> permittedScope = ImmutableSet.builder(); @Nullable private CaffeineSpec cacheSpec; /** * Constructs new builder for OAuth 2.0 Token Introspection {@link Authorizer}, * as per<a href="https://datatracker.ietf.org/doc/html/rfc7662#section-2">[RFC7662], Section 2</a>. * * @param introspectionEndpoint A {@link WebClient} to facilitate the Token Introspection request. Must * correspond to the Token Introspection endpoint of the OAuth 2.0 system. * @param introspectionEndpointPath A URI path that corresponds to the Token Introspection endpoint of the * OAuth 2.0 system. */ OAuth2TokenIntrospectionAuthorizerBuilder(WebClient introspectionEndpoint, String introspectionEndpointPath) { this.introspectionEndpoint = requireNonNull(introspectionEndpoint, "introspectionEndpoint"); this.introspectionEndpointPath = requireNonNull(introspectionEndpointPath, "introspectionEndpointPath"); } /** * Provides client authorization for the OAuth 2.0 Introspection requests based on encoded authorization * token and authorization type, * as per <a href="https://datatracker.ietf.org/doc/html/rfc6749#section-2.3">[RFC6749], Section 2.3</a>. * * @param authorizationSupplier A supplier of encoded client authorization token. * @param authorizationType One of the registered HTTP authentication schemes as per * <a href="https://www.iana.org/assignments/http-authschemes/http-authschemes.xhtml"> * HTTP Authentication Scheme Registry</a>. */ public OAuth2TokenIntrospectionAuthorizerBuilder clientAuthorization( Supplier<String> authorizationSupplier, String authorizationType) { clientAuthorization = ClientAuthorization.ofAuthorization(authorizationSupplier, authorizationType); return this; } /** * Provides client authorization for the OAuth 2.0 Introspection requests based on encoded authorization * token and {@code Basic} authorization type, * as per <a href="https://datatracker.ietf.org/doc/html/rfc6749#section-2.3">[RFC6749], Section 2.3</a>. * * @param authorizationSupplier A supplier of encoded client authorization token. */ public OAuth2TokenIntrospectionAuthorizerBuilder clientBasicAuthorization( Supplier<String> authorizationSupplier) { clientAuthorization = ClientAuthorization.ofBasicAuthorization(authorizationSupplier); return this; } /** * Provides client authorization for the OAuth 2.0 Introspection requests based on client credentials and * authorization type, * as per <a href="https://datatracker.ietf.org/doc/html/rfc6749#section-2.3">[RFC6749], Section 2.3</a>. * * @param credentialsSupplier A supplier of client credentials. * @param authorizationType One of the registered HTTP authentication schemes as per * <a href="https://www.iana.org/assignments/http-authschemes/http-authschemes.xhtml"> * HTTP Authentication Scheme Registry</a>. */ public OAuth2TokenIntrospectionAuthorizerBuilder clientCredentials( Supplier<? extends Map.Entry<String, String>> credentialsSupplier, String authorizationType) { clientAuthorization = ClientAuthorization.ofCredentials(credentialsSupplier, authorizationType); return this; } /** * Provides client authorization for the OAuth 2.0 Introspection requests based on client credentials and * {@code Basic} authorization type, * as per <a href="https://datatracker.ietf.org/doc/html/rfc6749#section-2.3">[RFC6749], Section 2.3</a>. * * @param credentialsSupplier A supplier of client credentials. */ public OAuth2TokenIntrospectionAuthorizerBuilder clientCredentials( Supplier<? extends Map.Entry<String, String>> credentialsSupplier) { clientAuthorization = ClientAuthorization.ofCredentials(credentialsSupplier); return this; } /** * Access Token type permitted by this authorizer, * as per <a href="https://datatracker.ietf.org/doc/html/rfc6749#section-7.1">[RFC6749], Section 7.1</a>. * The authorizer will accept any type if empty. OPTIONAL. */ public OAuth2TokenIntrospectionAuthorizerBuilder accessTokenType(String accessTokenType) { this.accessTokenType = requireNonNull(accessTokenType, "accessTokenType"); return this; } /** * An HTTP Realm - a name designating of the protected area. OPTIONAL. */ public OAuth2TokenIntrospectionAuthorizerBuilder realm(String realm) { this.realm = requireNonNull(realm, "realm"); return this; } /** * An {@link Iterable} of case-sensitive scope strings permitted by this authorizer. * The authorizer will accept any scope if empty. */ public OAuth2TokenIntrospectionAuthorizerBuilder permittedScope(Iterable<String> scope) { permittedScope.addAll(requireNonNull(scope, "scope")); return this; } /** * An array of case-sensitive scope strings permitted by this authorizer. * The authorizer will accept any scope if empty. */ public OAuth2TokenIntrospectionAuthorizerBuilder permittedScope(String... scope) { permittedScope.add(requireNonNull(scope, "scope")); return this; } /** * Provides caching facility for OAuth 2.0 {@link OAuth2TokenDescriptor} in order to avoid continuous Token * Introspection as per <a href="https://datatracker.ietf.org/doc/html/rfc7662#section-2.2">[RFC7662], Section 2.2</a>. * Sets the {@linkplain CaffeineSpec Caffeine specification string} of the cache that stores the tokens. * If not set, {@value DEFAULT_CACHE_SPEC} is used by default. */ public OAuth2TokenIntrospectionAuthorizerBuilder cacheSpec(String cacheSpec) { this.cacheSpec = CaffeineSpec.parse(cacheSpec); // parse right away return this; } /** * Builds a new instance of {@link OAuth2TokenIntrospectionAuthorizer} using configured parameters. */ public OAuth2TokenIntrospectionAuthorizer build() { // init introspection request final TokenIntrospectionRequest introspectionRequest = new TokenIntrospectionRequest(introspectionEndpoint, requireNonNull(introspectionEndpointPath, "introspectionEndpointPath"), clientAuthorization); final Cache<String, OAuth2TokenDescriptor> tokenCache = Caffeine.from(cacheSpec == null ? DEFAULT_CACHE_SPEC_OBJ : cacheSpec).build(); return new OAuth2TokenIntrospectionAuthorizer(tokenCache, accessTokenType, realm, permittedScope.build(), introspectionRequest); } }
package com.kurtulusdev.androidswipecardlayout.library; import android.content.Context; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewConfiguration; import android.view.animation.OvershootInterpolator; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; import com.bumptech.glide.Glide; import java.util.ArrayList; /** * Created by kurtulusahmet on 26.11.2015. */ public class SwipeCardLayout extends FrameLayout { Context context; RelativeLayout scContainer; View currentTopView; private ViewConfiguration viewConfig; private float mScreenWidth; private float mScreenHeight; private final int INVALID_POINTER_ID = -1337; private int mActivePointerId = INVALID_POINTER_ID; private Integer MAX_RENDERED_COUNT = 3; // CUSTOM GESTURE TRACKING FIELDS - - - - - - - - - - - - - - - - - - - - - - - - - - - - private float MAX_ROTATION_AMOUNT = 20f; private final float TOUCH_SLOP_SCALE_FACTOR_X = 5.0f; ArrayList<CardObject> cardList; public SwipeCardLayout(Context context, RelativeLayout scContainer) { super(context); this.context = context; this.scContainer = scContainer; viewConfig = ViewConfiguration.get(context); mScreenWidth = getResources().getDisplayMetrics().widthPixels; mScreenHeight = getResources().getDisplayMetrics().heightPixels; } //String image url public CardObject createCardObject(String title, String imageurl){ CardObject newCard = new CardObject(); newCard.setTitle(title); newCard.setImage(imageurl); return newCard; } //Resource id public CardObject createCardObject(String title, int resource_id){ CardObject newCard = new CardObject(); newCard.setTitle(title); newCard.setImageResourceId(resource_id); return newCard; } public void addCards(ArrayList<CardObject> cardlist){ int index = 0; this.cardList = cardlist; for(int i = 0; i < cardlist.size(); i++){ RelativeLayout newCardViewToAdd = (RelativeLayout) LayoutInflater.from(context).inflate(R.layout.card_item, scContainer, false); scContainer.addView(newCardViewToAdd, index + i); newCardViewToAdd.setTranslationY(mScreenHeight + 200); newCardViewToAdd.setScaleX(0.5f); newCardViewToAdd.setScaleY(0.5f); newCardViewToAdd.setAlpha(1f); CardViewHolder holder = new CardViewHolder(newCardViewToAdd); newCardViewToAdd.setTag(holder); } } static class CardViewHolder { public CardObject data; public ImageView imageView; public TextView textView; public CardViewHolder(View cardView){ imageView = (ImageView) cardView.findViewById(R.id.card_image); textView = (TextView) cardView.findViewById(R.id.card_text); } } public void renderCards(){ int renderedCount = -1; int i = 0; for(int x = i; x <= scContainer.getChildCount(); x++){ renderedCount++; if(renderedCount >= MAX_RENDERED_COUNT){ return; } if (renderedCount == cardList.size()){ return; } final View view; view = scContainer.getChildAt((scContainer.getChildCount() - x - 1)); CardViewHolder holder = (CardViewHolder) view.getTag(); if (renderedCount == 0){ currentTopView = view; view.setOnTouchListener(new SwipeViewOnTouchListener()); } if (renderedCount == 0) { view.setTranslationY(150f); view.setAlpha(1f); view.animate().translationY(0f).scaleX(1.0f).scaleY(1.0f).setInterpolator(new OvershootInterpolator()); } else if (renderedCount == 1) { view.setTranslationY(1000f); view.setAlpha(1f); view.animate().translationY(150f).scaleX(0.9f).scaleY(0.9f).setInterpolator(new OvershootInterpolator()); } if(holder.data == null){ holder.data = cardList.get(x); holder.textView.setText(holder.data.getTitle()); if (holder.data.getImage() == null){ holder.imageView.setImageResource(holder.data.getImageResourceId()); }else{ Glide.with(context).load(holder.data.getImage()).into(holder.imageView); } } } } private void removeTopCard(){ currentTopView.postDelayed(new Runnable() { @Override public void run() { scContainer.removeView(currentTopView); cardList.remove(0); /*if(cardList.size() <= MAX_RENDERED_COUNT){ loadMoreCards(); }*/ renderCards(); } }, 100); } public void topCardClicked() { //Toast.makeText(context, "Click", Toast.LENGTH_SHORT).show(); } private void topCardLeft() { currentTopView.animate().translationX(-mScreenWidth).setInterpolator(new OvershootInterpolator()); removeTopCard(); //Toast.makeText(getApplicationContext(), "Left", Toast.LENGTH_SHORT).show(); } private void topCardRight() { currentTopView.animate().translationX(mScreenWidth).setInterpolator(new OvershootInterpolator()); removeTopCard(); //Toast.makeText(getApplicationContext(), "Right", Toast.LENGTH_SHORT).show(); } private void snapback(View v){ if(v != null){ v.animate().translationX(0f).translationY(0f).rotation(0).setInterpolator(new OvershootInterpolator()); } } class SwipeViewOnTouchListener implements View.OnTouchListener { private float mLastTouchX = 0; private float mLastTouchY = 0; private float dx = 0; private float dy = 0; private boolean isClick = true; @Override public boolean onTouch(View v, MotionEvent event) { int action = event.getActionMasked(); switch (action) { case MotionEvent.ACTION_DOWN: { final float x = event.getRawX(); final float y = event.getRawY(); mLastTouchX = x; mLastTouchY = y; final int pointerIndex = event.getActionIndex(); mActivePointerId = event.getPointerId(pointerIndex); isClick = true; break; } case MotionEvent.ACTION_MOVE: { final float x = event.getRawX(); final float y = event.getRawY(); dx += (x - mLastTouchX); dy += (y - mLastTouchY); if (Math.abs(dx) > viewConfig.getScaledTouchSlop() || Math.abs(dy) > viewConfig.getScaledTouchSlop()) { isClick = false; } v.setTranslationX(dx); v.setTranslationY(dy); float percentScreenWidthX = (Math.abs(dx) / mScreenWidth); float rotationDegrees = percentScreenWidthX * MAX_ROTATION_AMOUNT; if (dx < 0) { rotationDegrees *= -1; } v.setRotation(rotationDegrees); mLastTouchX = x; mLastTouchY = y; break; } case MotionEvent.ACTION_UP: case MotionEvent.ACTION_CANCEL: { if (Math.abs(dx) <= viewConfig.getScaledTouchSlop() && Math.abs(dy) <= viewConfig.getScaledTouchSlop() && isClick) { topCardClicked(); } else if (Math.abs(dx) <= viewConfig.getScaledTouchSlop() * TOUCH_SLOP_SCALE_FACTOR_X) { snapback(v); } else { if (dx < 0) topCardLeft(); else topCardRight(); } dx = 0; dy = 0; mLastTouchX = 0; mLastTouchY = 0; mActivePointerId = INVALID_POINTER_ID; break; } } return true; } } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skylark; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.packages.AdvertisedProviderSet; import com.google.devtools.build.lib.packages.AspectParameters; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.Attribute.ConfigurationTransition; import com.google.devtools.build.lib.packages.BuildType; import com.google.devtools.build.lib.packages.ImplicitOutputsFunction; import com.google.devtools.build.lib.packages.PredicateWithMessage; import com.google.devtools.build.lib.packages.RequiredProviders; import com.google.devtools.build.lib.packages.RuleClass; import com.google.devtools.build.lib.packages.RuleClass.Builder.RuleClassType; import com.google.devtools.build.lib.packages.SkylarkAspect; import com.google.devtools.build.lib.packages.SkylarkClassObject; import com.google.devtools.build.lib.packages.SkylarkClassObjectConstructor; import com.google.devtools.build.lib.packages.SkylarkProviderIdentifier; import com.google.devtools.build.lib.rules.SkylarkAttr; import com.google.devtools.build.lib.rules.SkylarkFileType; import com.google.devtools.build.lib.rules.SkylarkRuleClassFunctions; import com.google.devtools.build.lib.rules.SkylarkRuleClassFunctions.RuleFunction; import com.google.devtools.build.lib.skylark.util.SkylarkTestCase; import com.google.devtools.build.lib.syntax.ClassObject; import com.google.devtools.build.lib.syntax.Environment; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.EvalUtils; import com.google.devtools.build.lib.syntax.SkylarkDict; import com.google.devtools.build.lib.syntax.SkylarkList.MutableList; import com.google.devtools.build.lib.syntax.SkylarkList.Tuple; import com.google.devtools.build.lib.syntax.SkylarkNestedSet; import com.google.devtools.build.lib.syntax.Type; import com.google.devtools.build.lib.util.FileTypeSet; import java.util.Collection; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Tests for SkylarkRuleClassFunctions. */ @RunWith(JUnit4.class) public class SkylarkRuleClassFunctionsTest extends SkylarkTestCase { @Rule public ExpectedException thrown = ExpectedException.none(); @Before public final void createBuildFile() throws Exception { scratch.file( "foo/BUILD", "genrule(name = 'foo',", " cmd = 'dummy_cmd',", " srcs = ['a.txt', 'b.img'],", " tools = ['t.exe'],", " outs = ['c.txt'])", "genrule(name = 'bar',", " cmd = 'dummy_cmd',", " srcs = [':jl', ':gl'],", " outs = ['d.txt'])", "java_library(name = 'jl',", " srcs = ['a.java'])", "genrule(name = 'gl',", " cmd = 'touch $(OUTS)',", " srcs = ['a.go'],", " outs = [ 'gl.a', 'gl.gcgox', ],", " output_to_bindir = 1,", ")"); } @Test public void testCannotOverrideBuiltInAttribute() throws Exception { ev.setFailFast(true); try { evalAndExport( "def impl(ctx): return", "r = rule(impl, attrs = {'tags': attr.string_list()})"); Assert.fail("Expected error '" + "There is already a built-in attribute 'tags' which cannot be overridden" + "' but got no error"); } catch (EvalException e) { assertThat(e).hasMessage( "There is already a built-in attribute 'tags' which cannot be overridden"); } } @Test public void testCannotOverrideBuiltInAttributeName() throws Exception { ev.setFailFast(true); try { evalAndExport( "def impl(ctx): return", "r = rule(impl, attrs = {'name': attr.string()})"); Assert.fail("Expected error '" + "There is already a built-in attribute 'name' which cannot be overridden" + "' but got no error"); } catch (EvalException e) { assertThat(e).hasMessage( "There is already a built-in attribute 'name' which cannot be overridden"); } } @Test public void testImplicitArgsAttribute() throws Exception { evalAndExport( "def _impl(ctx):", " pass", "exec_rule = rule(implementation = _impl, executable = True)", "non_exec_rule = rule(implementation = _impl)"); assertTrue(getRuleClass("exec_rule").hasAttr("args", Type.STRING_LIST)); assertFalse(getRuleClass("non_exec_rule").hasAttr("args", Type.STRING_LIST)); } private RuleClass getRuleClass(String name) throws Exception { return ((RuleFunction) lookup(name)).getRuleClass(); } private void registerDummyUserDefinedFunction() throws Exception { eval("def impl():\n" + " return 0\n"); } @Test public void testAttrWithOnlyType() throws Exception { Attribute attr = buildAttribute("a1", "attr.string_list()", ""); assertEquals(Type.STRING_LIST, attr.getType()); } private Attribute buildAttribute(String name, String... lines) throws Exception { return ((SkylarkAttr.Descriptor) evalRuleClassCode(lines)).build(name); } @Test public void testOutputListAttr() throws Exception { Attribute attr = buildAttribute("a1", "attr.output_list()"); assertEquals(BuildType.OUTPUT_LIST, attr.getType()); } @Test public void testIntListAttr() throws Exception { Attribute attr = buildAttribute("a1", "attr.int_list()"); assertEquals(Type.INTEGER_LIST, attr.getType()); } @Test public void testOutputAttr() throws Exception { Attribute attr = buildAttribute("a1", "attr.output()"); assertEquals(BuildType.OUTPUT, attr.getType()); } @Test public void testStringDictAttr() throws Exception { Attribute attr = buildAttribute("a1", "attr.string_dict(default = {'a': 'b'})"); assertEquals(Type.STRING_DICT, attr.getType()); } @Test public void testStringListDictAttr() throws Exception { Attribute attr = buildAttribute("a1", "attr.string_list_dict(default = {'a': ['b', 'c']})"); assertEquals(Type.STRING_LIST_DICT, attr.getType()); } @Test public void testAttrAllowedFileTypesAnyFile() throws Exception { Attribute attr = buildAttribute("a1", "attr.label_list(allow_files = True)"); assertEquals(FileTypeSet.ANY_FILE, attr.getAllowedFileTypesPredicate()); } @Test public void testAttrAllowedFileTypesWrongType() throws Exception { checkErrorContains( "allow_files should be a boolean or a string list", "attr.label_list(allow_files = 18)"); } @Test public void testAttrAllowedSingleFileTypesWrongType() throws Exception { checkErrorContains( "allow_single_file should be a boolean or a string list", "attr.label(allow_single_file = 18)"); } @Test public void testAttrWithList() throws Exception { Attribute attr = buildAttribute("a1", "attr.label_list(allow_files = ['.xml'])"); assertTrue(attr.getAllowedFileTypesPredicate().apply("a.xml")); assertFalse(attr.getAllowedFileTypesPredicate().apply("a.txt")); assertFalse(attr.isSingleArtifact()); } @Test public void testAttrSingleFileWithList() throws Exception { Attribute attr = buildAttribute("a1", "attr.label(allow_single_file = ['.xml'])"); assertTrue(attr.getAllowedFileTypesPredicate().apply("a.xml")); assertFalse(attr.getAllowedFileTypesPredicate().apply("a.txt")); assertTrue(attr.isSingleArtifact()); } @Test public void testAttrWithSkylarkFileType() throws Exception { Attribute attr = buildAttribute("a1", "attr.label_list(allow_files = FileType(['.xml']))"); assertTrue(attr.getAllowedFileTypesPredicate().apply("a.xml")); assertFalse(attr.getAllowedFileTypesPredicate().apply("a.txt")); } @Test public void testAttrWithProviders() throws Exception { Attribute attr = buildAttribute("a1", "attr.label_list(allow_files = True, providers = ['a', 'b'])"); assertThat(attr.getMandatoryProvidersList()) .containsExactly(ImmutableSet.of(legacy("a"), legacy("b"))); } private static SkylarkProviderIdentifier legacy(String legacyId) { return SkylarkProviderIdentifier.forLegacy(legacyId); } @Test public void testAttrWithProvidersList() throws Exception { Attribute attr = buildAttribute("a1", "attr.label_list(allow_files = True," + " providers = [['a', 'b'], ['c']])"); assertThat(attr.getMandatoryProvidersList()).containsExactly( ImmutableSet.of(legacy("a"), legacy("b")), ImmutableSet.of(legacy("c"))); } @Test public void testAttrWithWrongProvidersList() throws Exception { checkErrorContains( "element in 'providers' is of unexpected type." + " Should be list of string, but got list with an element of type int.", "attr.label_list(allow_files = True, providers = [['a', 1], ['c']])"); checkErrorContains( "element in 'providers' is of unexpected type." + " Should be list of string, but got string.", "attr.label_list(allow_files = True, providers = [['a', 'b'], 'c'])"); } @Test public void testLabelListWithAspects() throws Exception { SkylarkAttr.Descriptor attr = (SkylarkAttr.Descriptor) evalRuleClassCode( "def _impl(target, ctx):", " pass", "my_aspect = aspect(implementation = _impl)", "attr.label_list(aspects = [my_aspect])"); Object aspect = ev.lookup("my_aspect"); assertThat(aspect).isNotNull(); assertThat(attr.getAspects()).containsExactly(aspect); } @Test public void testLabelListWithAspectsError() throws Exception { checkErrorContains( "expected type 'Aspect' for 'aspects' element but got type 'int' instead", "def _impl(target, ctx):", " pass", "my_aspect = aspect(implementation = _impl)", "attr.label_list(aspects = [my_aspect, 123])"); } @Test public void testAspectExtraDeps() throws Exception { evalAndExport( "def _impl(target, ctx):", " pass", "my_aspect = aspect(_impl,", " attrs = { '_extra_deps' : attr.label(default = Label('//foo/bar:baz')) }", ")"); SkylarkAspect aspect = (SkylarkAspect) ev.lookup("my_aspect"); Attribute attribute = Iterables.getOnlyElement(aspect.getAttributes()); assertThat(attribute.getName()).isEqualTo("$extra_deps"); assertThat(attribute.getDefaultValue(null)) .isEqualTo(Label.parseAbsolute("//foo/bar:baz", false)); } @Test public void testAspectParameter() throws Exception { evalAndExport( "def _impl(target, ctx):", " pass", "my_aspect = aspect(_impl,", " attrs = { 'param' : attr.string(values=['a', 'b']) }", ")"); SkylarkAspect aspect = (SkylarkAspect) ev.lookup("my_aspect"); Attribute attribute = Iterables.getOnlyElement(aspect.getAttributes()); assertThat(attribute.getName()).isEqualTo("param"); } @Test public void testAspectParameterRequiresValues() throws Exception { checkErrorContains( "Aspect parameter attribute 'param' must have type 'string' and use the 'values' " + "restriction.", "def _impl(target, ctx):", " pass", "my_aspect = aspect(_impl,", " attrs = { 'param' : attr.string(default = 'c') }", ")"); } @Test public void testAspectParameterBadType() throws Exception { checkErrorContains( "Aspect parameter attribute 'param' must have type 'string' and use the 'values' " + "restriction.", "def _impl(target, ctx):", " pass", "my_aspect = aspect(_impl,", " attrs = { 'param' : attr.label(default = Label('//foo/bar:baz')) }", ")"); } @Test public void testAspectParameterAndExtraDeps() throws Exception { evalAndExport( "def _impl(target, ctx):", " pass", "my_aspect = aspect(_impl,", " attrs = { 'param' : attr.string(values=['a', 'b']),", " '_extra' : attr.label(default = Label('//foo/bar:baz')) }", ")"); SkylarkAspect aspect = (SkylarkAspect) ev.lookup("my_aspect"); assertThat(aspect.getAttributes()).hasSize(2); assertThat(aspect.getParamAttributes()).containsExactly("param"); } @Test public void testAspectNoDefaultValueAttribute() throws Exception { checkErrorContains( "Aspect attribute '_extra_deps' has no default value", "def _impl(target, ctx):", " pass", "my_aspect = aspect(_impl,", " attrs = { '_extra_deps' : attr.label() }", ")"); } @Test public void testNonLabelAttrWithProviders() throws Exception { checkErrorContains( "unexpected keyword 'providers' in call to string", "attr.string(providers = ['a'])"); } private static final RuleClass.ConfiguredTargetFactory<Object, Object> DUMMY_CONFIGURED_TARGET_FACTORY = new RuleClass.ConfiguredTargetFactory<Object, Object>() { @Override public Object create(Object ruleContext) throws InterruptedException { throw new IllegalStateException(); } }; private RuleClass ruleClass(String name) { return new RuleClass.Builder(name, RuleClassType.NORMAL, false) .factory(DUMMY_CONFIGURED_TARGET_FACTORY) .add(Attribute.attr("tags", Type.STRING_LIST)) .build(); } @Test public void testAttrAllowedRuleClassesSpecificRuleClasses() throws Exception { Attribute attr = buildAttribute("a", "attr.label_list(allow_rules = ['java_binary'], allow_files = True)"); assertTrue(attr.getAllowedRuleClassesPredicate().apply(ruleClass("java_binary"))); assertFalse(attr.getAllowedRuleClassesPredicate().apply(ruleClass("genrule"))); } @Test public void testAttrDefaultValue() throws Exception { Attribute attr = buildAttribute("a1", "attr.string(default = 'some value')"); assertEquals("some value", attr.getDefaultValueForTesting()); } @Test public void testAttrDefaultValueBadType() throws Exception { checkErrorContains( "method attr.string(*, default: string, mandatory: bool, values: sequence of strings) " + "is not applicable for arguments (int, bool, list): 'default' is 'int', " + "but should be 'string'", "attr.string(default = 1)"); } @Test public void testAttrMandatory() throws Exception { Attribute attr = buildAttribute("a1", "attr.string(mandatory=True)"); assertTrue(attr.isMandatory()); assertFalse(attr.isNonEmpty()); } @Test public void testAttrNonEmpty() throws Exception { Attribute attr = buildAttribute("a1", "attr.string_list(non_empty=True)"); assertTrue(attr.isNonEmpty()); assertFalse(attr.isMandatory()); } @Test public void testAttrAllowEmpty() throws Exception { Attribute attr = buildAttribute("a1", "attr.string_list(allow_empty=False)"); assertTrue(attr.isNonEmpty()); assertFalse(attr.isMandatory()); } @Test public void testAttrBadKeywordArguments() throws Exception { checkErrorContains( "unexpected keyword 'bad_keyword' in call to string", "attr.string(bad_keyword = '')"); } @Test public void testAttrCfg() throws Exception { Attribute attr = buildAttribute("a1", "attr.label(cfg = 'host', allow_files = True)"); assertEquals(ConfigurationTransition.HOST, attr.getConfigurationTransition()); } @Test public void testAttrCfgData() throws Exception { Attribute attr = buildAttribute("a1", "attr.label(cfg = 'data', allow_files = True)"); assertEquals(ConfigurationTransition.DATA, attr.getConfigurationTransition()); } @Test public void testAttrCfgTarget() throws Exception { Attribute attr = buildAttribute("a1", "attr.label(cfg = 'target', allow_files = True)"); assertEquals(ConfigurationTransition.NONE, attr.getConfigurationTransition()); } @Test public void testAttrValues() throws Exception { Attribute attr = buildAttribute("a1", "attr.string(values = ['ab', 'cd'])"); PredicateWithMessage<Object> predicate = attr.getAllowedValues(); assertThat(predicate.apply("ab")).isTrue(); assertThat(predicate.apply("xy")).isFalse(); } @Test public void testAttrIntValues() throws Exception { Attribute attr = buildAttribute("a1", "attr.int(values = [1, 2])"); PredicateWithMessage<Object> predicate = attr.getAllowedValues(); assertThat(predicate.apply(2)).isTrue(); assertThat(predicate.apply(3)).isFalse(); } @Test public void testRuleImplementation() throws Exception { evalAndExport("def impl(ctx): return None", "rule1 = rule(impl)"); RuleClass c = ((RuleFunction) lookup("rule1")).getRuleClass(); assertEquals("impl", c.getConfiguredTargetFunction().getName()); } @Test public void testLateBoundAttrWorksWithOnlyLabel() throws Exception { checkEvalError( "method attr.string(*, default: string, mandatory: bool, values: sequence of strings) " + "is not applicable for arguments (function, bool, list): 'default' is 'function', " + "but should be 'string'", "def attr_value(cfg): return 'a'", "attr.string(default=attr_value)"); } private static final Label FAKE_LABEL = Label.parseAbsoluteUnchecked("//fake/label.bzl"); @Test public void testRuleAddAttribute() throws Exception { evalAndExport("def impl(ctx): return None", "r1 = rule(impl, attrs={'a1': attr.string()})"); RuleClass c = ((RuleFunction) lookup("r1")).getRuleClass(); assertTrue(c.hasAttr("a1", Type.STRING)); } protected void evalAndExport(String... lines) throws Exception { eval(lines); SkylarkRuleClassFunctions.exportRuleFunctionsAndAspects(ev.getEnvironment(), FAKE_LABEL); } @Test public void testExportAliasedName() throws Exception { // When there are multiple names aliasing the same SkylarkExportable, the first one to be // declared should be used. Make sure we're not using lexicographical order, hash order, // non-deterministic order, or anything else. evalAndExport( "def _impl(ctx): pass", "d = rule(implementation = _impl)", "a = d", // Having more names improves the chance that non-determinism will be caught. "b = d", "c = d", "e = d", "f = d", "foo = d", "bar = d", "baz = d", "x = d", "y = d", "z = d"); String dName = ((RuleFunction) lookup("d")).getRuleClass().getName(); String fooName = ((RuleFunction) lookup("foo")).getRuleClass().getName(); assertThat(dName).isEqualTo("d"); assertThat(fooName).isEqualTo("d"); } @Test public void testOutputToGenfiles() throws Exception { evalAndExport("def impl(ctx): pass", "r1 = rule(impl, output_to_genfiles=True)"); RuleClass c = ((RuleFunction) lookup("r1")).getRuleClass(); assertFalse(c.hasBinaryOutput()); } @Test public void testRuleAddMultipleAttributes() throws Exception { evalAndExport( "def impl(ctx): return None", "r1 = rule(impl,", " attrs = {", " 'a1': attr.label_list(allow_files=True),", " 'a2': attr.int()", "})"); RuleClass c = ((RuleFunction) lookup("r1")).getRuleClass(); assertTrue(c.hasAttr("a1", BuildType.LABEL_LIST)); assertTrue(c.hasAttr("a2", Type.INTEGER)); } @Test public void testRuleAttributeFlag() throws Exception { evalAndExport( "def impl(ctx): return None", "r1 = rule(impl, attrs = {'a1': attr.string(mandatory=True)})"); RuleClass c = ((RuleFunction) lookup("r1")).getRuleClass(); assertTrue(c.getAttributeByName("a1").isMandatory()); } @Test public void testRuleOutputs() throws Exception { evalAndExport( "def impl(ctx): return None", "r1 = rule(impl, outputs = {'a': 'a.txt'})"); RuleClass c = ((RuleFunction) lookup("r1")).getRuleClass(); ImplicitOutputsFunction function = c.getDefaultImplicitOutputsFunction(); assertEquals("a.txt", Iterables.getOnlyElement(function.getImplicitOutputs(null))); } @Test public void testRuleUnknownKeyword() throws Exception { registerDummyUserDefinedFunction(); checkErrorContains( "unexpected keyword 'bad_keyword' in call to " + "rule(implementation: function, ", "rule(impl, bad_keyword = 'some text')"); } @Test public void testRuleImplementationMissing() throws Exception { checkErrorContains( "missing mandatory positional argument 'implementation' while calling " + "rule(implementation", "rule(attrs = {})"); } @Test public void testRuleBadTypeForAdd() throws Exception { registerDummyUserDefinedFunction(); checkErrorContains( "expected dict or NoneType for 'attrs' while calling rule but got string instead: " + "some text", "rule(impl, attrs = 'some text')"); } @Test public void testRuleBadTypeInAdd() throws Exception { registerDummyUserDefinedFunction(); checkErrorContains( "expected <String, Descriptor> type for 'attrs' but got <string, string> instead", "rule(impl, attrs = {'a1': 'some text'})"); } @Test public void testLabel() throws Exception { Object result = evalRuleClassCode("Label('//foo/foo:foo')"); assertThat(result).isInstanceOf(Label.class); assertEquals("//foo/foo:foo", result.toString()); } @Test public void testLabelSameInstance() throws Exception { Object l1 = evalRuleClassCode("Label('//foo/foo:foo')"); // Implicitly creates a new pkgContext and environment, yet labels should be the same. Object l2 = evalRuleClassCode("Label('//foo/foo:foo')"); assertSame(l2, l1); } @Test public void testLabelNameAndPackage() throws Exception { Object result = evalRuleClassCode("Label('//foo/bar:baz').name"); assertEquals("baz", result); // NB: implicitly creates a new pkgContext and environments, yet labels should be the same. result = evalRuleClassCode("Label('//foo/bar:baz').package"); assertEquals("foo/bar", result); } @Test public void testRuleLabelDefaultValue() throws Exception { evalAndExport( "def impl(ctx): return None\n" + "r1 = rule(impl, attrs = {'a1': " + "attr.label(default = Label('//foo:foo'), allow_files=True)})"); RuleClass c = ((RuleFunction) lookup("r1")).getRuleClass(); Attribute a = c.getAttributeByName("a1"); assertThat(a.getDefaultValueForTesting()).isInstanceOf(Label.class); assertEquals("//foo:foo", a.getDefaultValueForTesting().toString()); } @Test public void testIntDefaultValue() throws Exception { evalAndExport( "def impl(ctx): return None", "r1 = rule(impl, attrs = {'a1': attr.int(default = 40+2)})"); RuleClass c = ((RuleFunction) lookup("r1")).getRuleClass(); Attribute a = c.getAttributeByName("a1"); assertEquals(42, a.getDefaultValueForTesting()); } @Test public void testFileType() throws Exception { Object result = evalRuleClassCode("FileType(['.css'])"); SkylarkFileType fts = (SkylarkFileType) result; assertEquals(ImmutableList.of(".css"), fts.getExtensions()); } @Test public void testRuleInheritsBaseRuleAttributes() throws Exception { evalAndExport("def impl(ctx): return None", "r1 = rule(impl)"); RuleClass c = ((RuleFunction) lookup("r1")).getRuleClass(); assertTrue(c.hasAttr("tags", Type.STRING_LIST)); assertTrue(c.hasAttr("visibility", BuildType.NODEP_LABEL_LIST)); assertTrue(c.hasAttr("deprecation", Type.STRING)); assertTrue(c.hasAttr(":action_listener", BuildType.LABEL_LIST)); // required for extra actions } private void checkTextMessage(String from, String... lines) throws Exception { Object result = evalRuleClassCode(from); assertEquals(Joiner.on("\n").join(lines) + "\n", result); } @Test public void testSimpleTextMessagesBooleanFields() throws Exception { checkTextMessage("struct(name=True).to_proto()", "name: true"); checkTextMessage("struct(name=False).to_proto()", "name: false"); } @Test public void testSimpleTextMessages() throws Exception { checkTextMessage("struct(name='value').to_proto()", "name: \"value\""); checkTextMessage("struct(name=['a', 'b']).to_proto()", "name: \"a\"", "name: \"b\""); checkTextMessage("struct(name=123).to_proto()", "name: 123"); checkTextMessage("struct(name=[1, 2, 3]).to_proto()", "name: 1", "name: 2", "name: 3"); checkTextMessage("struct(a=struct(b='b')).to_proto()", "a {", " b: \"b\"", "}"); checkTextMessage( "struct(a=[struct(b='x'), struct(b='y')]).to_proto()", "a {", " b: \"x\"", "}", "a {", " b: \"y\"", "}"); checkTextMessage( "struct(a=struct(b=struct(c='c'))).to_proto()", "a {", " b {", " c: \"c\"", " }", "}"); } @Test public void testProtoFieldsOrder() throws Exception { checkTextMessage("struct(d=4, b=2, c=3, a=1).to_proto()", "a: 1", "b: 2", "c: 3", "d: 4"); } @Test public void testTextMessageEscapes() throws Exception { checkTextMessage("struct(name='a\"b').to_proto()", "name: \"a\\\"b\""); checkTextMessage("struct(name='a\\'b').to_proto()", "name: \"a'b\""); checkTextMessage("struct(name='a\\nb').to_proto()", "name: \"a\\nb\""); // struct(name="a\\\"b") -> name: "a\\\"b" checkTextMessage("struct(name='a\\\\\\\"b').to_proto()", "name: \"a\\\\\\\"b\""); } @Test public void testTextMessageInvalidElementInListStructure() throws Exception { checkErrorContains( "Invalid text format, expected a struct, a string, a bool, or " + "an int but got a list for list element in struct field 'a'", "struct(a=[['b']]).to_proto()"); } @Test public void testTextMessageInvalidStructure() throws Exception { checkErrorContains( "Invalid text format, expected a struct, a string, a bool, or an int " + "but got a function for struct field 'a'", "struct(a=rule).to_proto()"); } private void checkJson(String from, String expected) throws Exception { Object result = evalRuleClassCode(from); assertEquals(expected, result); } @Test public void testJsonBooleanFields() throws Exception { checkJson("struct(name=True).to_json()", "{\"name\":true}"); checkJson("struct(name=False).to_json()", "{\"name\":false}"); } @Test public void testJsonEncoding() throws Exception { checkJson("struct(name='value').to_json()", "{\"name\":\"value\"}"); checkJson("struct(name=['a', 'b']).to_json()", "{\"name\":[\"a\",\"b\"]}"); checkJson("struct(name=123).to_json()", "{\"name\":123}"); checkJson("struct(name=[1, 2, 3]).to_json()", "{\"name\":[1,2,3]}"); checkJson("struct(a=struct(b='b')).to_json()", "{\"a\":{\"b\":\"b\"}}"); checkJson("struct(a=[struct(b='x'), struct(b='y')]).to_json()", "{\"a\":[{\"b\":\"x\"},{\"b\":\"y\"}]}"); checkJson("struct(a=struct(b=struct(c='c'))).to_json()", "{\"a\":{\"b\":{\"c\":\"c\"}}}"); } @Test public void testJsonEscapes() throws Exception { checkJson("struct(name='a\"b').to_json()", "{\"name\":\"a\\\"b\"}"); checkJson("struct(name='a\\'b').to_json()", "{\"name\":\"a'b\"}"); checkJson("struct(name='a\\\\b').to_json()", "{\"name\":\"a\\\\b\"}"); checkJson("struct(name='a\\nb').to_json()", "{\"name\":\"a\\nb\"}"); checkJson("struct(name='a\\rb').to_json()", "{\"name\":\"a\\rb\"}"); checkJson("struct(name='a\\tb').to_json()", "{\"name\":\"a\\tb\"}"); } @Test public void testJsonNestedListStructure() throws Exception { checkJson("struct(a=[['b']]).to_json()", "{\"a\":[[\"b\"]]}"); } @Test public void testJsonInvalidStructure() throws Exception { checkErrorContains( "Invalid text format, expected a struct, a string, a bool, or an int but got a " + "function for struct field 'a'", "struct(a=rule).to_json()"); } @Test public void testLabelAttrWrongDefault() throws Exception { checkErrorContains( "expected Label or Label-returning function or NoneType for 'default' " + "while calling label but got string instead: //foo:bar", "attr.label(default = '//foo:bar')"); } @Test public void testLabelGetRelative() throws Exception { assertEquals("//foo:baz", eval("Label('//foo:bar').relative('baz')").toString()); assertEquals("//baz:qux", eval("Label('//foo:bar').relative('//baz:qux')").toString()); } @Test public void testLabelGetRelativeSyntaxError() throws Exception { checkErrorContains( "invalid target name 'bad syntax': target names may not contain ' '", "Label('//foo:bar').relative('bad syntax')"); } @Test public void testLicenseAttributesNonconfigurable() throws Exception { scratch.file("test/BUILD"); scratch.file("test/rule.bzl", "def _impl(ctx):", " return", "some_rule = rule(", " implementation = _impl,", " attrs = {", " 'licenses': attr.license()", " }", ")"); scratch.file("third_party/foo/BUILD", "load('/test/rule', 'some_rule')", "some_rule(", " name='r',", " licenses = ['unencumbered']", ")"); invalidatePackages(); // Should succeed without a "licenses attribute is potentially configurable" loading error: createRuleContext("//third_party/foo:r"); } @Test public void testStructCreation() throws Exception { // TODO(fwe): cannot be handled by current testing suite eval("x = struct(a = 1, b = 2)"); assertThat(lookup("x")).isInstanceOf(ClassObject.class); } @Test public void testStructFields() throws Exception { // TODO(fwe): cannot be handled by current testing suite eval("x = struct(a = 1, b = 2)"); ClassObject x = (ClassObject) lookup("x"); assertEquals(1, x.getValue("a")); assertEquals(2, x.getValue("b")); } @Test public void testStructEquality() throws Exception { assertTrue((Boolean) eval("struct(a = 1, b = 2) == struct(b = 2, a = 1)")); assertFalse((Boolean) eval("struct(a = 1) == struct(a = 1, b = 2)")); assertFalse((Boolean) eval("struct(a = 1, b = 2) == struct(a = 1)")); // Compare a recursive object to itself to make sure reference equality is checked assertTrue((Boolean) eval("s = (struct(a = 1, b = [])); s.b.append(s); s == s")); assertFalse((Boolean) eval("struct(a = 1, b = 2) == struct(a = 1, b = 3)")); assertFalse((Boolean) eval("struct(a = 1) == [1]")); assertFalse((Boolean) eval("[1] == struct(a = 1)")); assertTrue((Boolean) eval("struct() == struct()")); assertFalse((Boolean) eval("struct() == struct(a = 1)")); eval("foo = provider(); bar = provider()"); assertFalse((Boolean) eval("struct(a = 1) == foo(a = 1)")); assertFalse((Boolean) eval("foo(a = 1) == struct(a = 1)")); assertFalse((Boolean) eval("foo(a = 1) == bar(a = 1)")); assertTrue((Boolean) eval("foo(a = 1) == foo(a = 1)")); } @Test public void testStructIncomparability() throws Exception { checkErrorContains("Cannot compare structs", "struct(a = 1) < struct(a = 2)"); checkErrorContains("Cannot compare structs", "struct(a = 1) > struct(a = 2)"); checkErrorContains("Cannot compare structs", "struct(a = 1) <= struct(a = 2)"); checkErrorContains("Cannot compare structs", "struct(a = 1) >= struct(a = 2)"); } @Test public void testStructAccessingFieldsFromSkylark() throws Exception { eval("x = struct(a = 1, b = 2)", "x1 = x.a", "x2 = x.b"); assertThat(lookup("x1")).isEqualTo(1); assertThat(lookup("x2")).isEqualTo(2); } @Test public void testStructAccessingUnknownField() throws Exception { checkErrorContains( "'struct' object has no attribute 'c'\n" + "Available attributes: a, b", "x = struct(a = 1, b = 2)", "y = x.c"); } @Test public void testStructAccessingUnknownFieldWithArgs() throws Exception { checkErrorContains( "struct has no method 'c'", "x = struct(a = 1, b = 2)", "y = x.c()"); } @Test public void testStructAccessingNonFunctionFieldWithArgs() throws Exception { checkErrorContains( "struct field 'a' is not a function", "x = struct(a = 1, b = 2)", "x1 = x.a(1)"); } @Test public void testStructAccessingFunctionFieldWithArgs() throws Exception { eval("def f(x): return x+5", "x = struct(a = f, b = 2)", "x1 = x.a(1)"); assertThat(lookup("x1")).isEqualTo(6); } @Test public void testStructPosArgs() throws Exception { checkErrorContains( "struct(**kwargs) does not accept positional arguments, but got 1", "x = struct(1, b = 2)"); } @Test public void testStructConcatenationFieldNames() throws Exception { // TODO(fwe): cannot be handled by current testing suite eval("x = struct(a = 1, b = 2)", "y = struct(c = 1, d = 2)", "z = x + y\n"); SkylarkClassObject z = (SkylarkClassObject) lookup("z"); assertEquals(ImmutableSet.of("a", "b", "c", "d"), z.getKeys()); } @Test public void testStructConcatenationFieldValues() throws Exception { // TODO(fwe): cannot be handled by current testing suite eval("x = struct(a = 1, b = 2)", "y = struct(c = 1, d = 2)", "z = x + y\n"); SkylarkClassObject z = (SkylarkClassObject) lookup("z"); assertEquals(1, z.getValue("a")); assertEquals(2, z.getValue("b")); assertEquals(1, z.getValue("c")); assertEquals(2, z.getValue("d")); } @Test public void testStructConcatenationCommonFields() throws Exception { checkErrorContains("Cannot concat structs with common field(s): a", "x = struct(a = 1, b = 2)", "y = struct(c = 1, a = 2)", "z = x + y\n"); } @Test public void testConditionalStructConcatenation() throws Exception { // TODO(fwe): cannot be handled by current testing suite eval("def func():", " x = struct(a = 1, b = 2)", " if True:", " x += struct(c = 1, d = 2)", " return x", "x = func()"); SkylarkClassObject x = (SkylarkClassObject) lookup("x"); assertEquals(1, x.getValue("a")); assertEquals(2, x.getValue("b")); assertEquals(1, x.getValue("c")); assertEquals(2, x.getValue("d")); } @Test public void testGetattrNoAttr() throws Exception { checkErrorContains( "object of type 'struct' has no attribute \"b\"", "s = struct(a='val')", "getattr(s, 'b')"); } @Test public void testGetattr() throws Exception { eval( "s = struct(a='val')", "x = getattr(s, 'a')", "y = getattr(s, 'b', 'def')", "z = getattr(s, 'b', default = 'def')", "w = getattr(s, 'a', default='ignored')"); assertThat(lookup("x")).isEqualTo("val"); assertThat(lookup("y")).isEqualTo("def"); assertThat(lookup("z")).isEqualTo("def"); assertThat(lookup("w")).isEqualTo("val"); } @Test public void testHasattr() throws Exception { eval("s = struct(a=1)", "x = hasattr(s, 'a')", "y = hasattr(s, 'b')\n"); assertThat(lookup("x")).isEqualTo(true); assertThat(lookup("y")).isEqualTo(false); } @Test public void testStructStr() throws Exception { assertThat(eval("str(struct(x = 2, y = 3, z = 4))")) .isEqualTo("struct(x = 2, y = 3, z = 4)"); } @Test public void testStructsInSets() throws Exception { eval("depset([struct(a='a')])"); } @Test public void testStructsInDicts() throws Exception { eval("d = {struct(a = 1): 'aa', struct(b = 2): 'bb'}"); assertThat(eval("d[struct(a = 1)]")).isEqualTo("aa"); assertThat(eval("d[struct(b = 2)]")).isEqualTo("bb"); assertThat(eval("str([d[k] for k in d])")).isEqualTo("[\"aa\", \"bb\"]"); checkErrorContains( "unhashable type: 'struct'", "{struct(a = []): 'foo'}"); } @Test public void testStructMembersAreImmutable() throws Exception { checkErrorContains( "cannot assign to 's.x'", "s = struct(x = 'a')", "s.x = 'b'\n"); } @Test public void testStructDictMembersAreMutable() throws Exception { eval( "s = struct(x = {'a' : 1})", "s.x['b'] = 2\n"); assertThat(((SkylarkClassObject) lookup("s")).getValue("x")) .isEqualTo(ImmutableMap.of("a", 1, "b", 2)); } @Test public void testNsetGoodCompositeItem() throws Exception { eval("def func():", " return depset([struct(a='a')])", "s = func()"); Collection<Object> result = ((SkylarkNestedSet) lookup("s")).toCollection(); assertThat(result).hasSize(1); assertThat(result.iterator().next()).isInstanceOf(SkylarkClassObject.class); } @Test public void testNsetBadMutableItem() throws Exception { checkEvalError("depsets cannot contain mutable items", "depset([([],)])"); checkEvalError("depsets cannot contain mutable items", "depset([struct(a=[])])"); } private static SkylarkClassObject makeStruct(String field, Object value) { return SkylarkClassObjectConstructor.STRUCT.create( ImmutableMap.of(field, value), "no field '%'"); } private static SkylarkClassObject makeBigStruct(Environment env) { // struct(a=[struct(x={1:1}), ()], b=(), c={2:2}) return SkylarkClassObjectConstructor.STRUCT.create( ImmutableMap.<String, Object>of( "a", MutableList.<Object>of(env, SkylarkClassObjectConstructor.STRUCT.create(ImmutableMap.<String, Object>of( "x", SkylarkDict.<Object, Object>of(env, 1, 1)), "no field '%s'"), Tuple.of()), "b", Tuple.of(), "c", SkylarkDict.<Object, Object>of(env, 2, 2)), "no field '%s'"); } @Test public void testStructMutabilityShallow() throws Exception { assertTrue(EvalUtils.isImmutable(makeStruct("a", 1))); } private static MutableList<Object> makeList(Environment env) { return MutableList.<Object>of(env, 1, 2, 3); } @Test public void testStructMutabilityDeep() throws Exception { assertTrue(EvalUtils.isImmutable(Tuple.<Object>of(makeList(null)))); assertTrue(EvalUtils.isImmutable(makeStruct("a", makeList(null)))); assertTrue(EvalUtils.isImmutable(makeBigStruct(null))); assertFalse(EvalUtils.isImmutable(Tuple.<Object>of(makeList(ev.getEnvironment())))); assertFalse(EvalUtils.isImmutable(makeStruct("a", makeList(ev.getEnvironment())))); assertFalse(EvalUtils.isImmutable(makeBigStruct(ev.getEnvironment()))); } @Test public void declaredProviders() throws Exception { evalAndExport( "data = provider()", "d = data(x = 1, y ='abc')", "d_x = d.x", "d_y = d.y" ); assertThat(lookup("d_x")).isEqualTo(1); assertThat(lookup("d_y")).isEqualTo("abc"); SkylarkClassObjectConstructor dataConstructor = (SkylarkClassObjectConstructor) lookup("data"); SkylarkClassObject data = (SkylarkClassObject) lookup("d"); assertThat(data.getConstructor()).isEqualTo(dataConstructor); assertThat(dataConstructor.isExported()).isTrue(); assertThat(dataConstructor.getPrintableName()).isEqualTo("data"); assertThat(dataConstructor.getKey()).isEqualTo( new SkylarkClassObjectConstructor.SkylarkKey(FAKE_LABEL, "data") ); } @Test public void declaredProvidersConcatSuccess() throws Exception { evalAndExport( "data = provider()", "dx = data(x = 1)", "dy = data(y = 'abc')", "dxy = dx + dy", "x = dxy.x", "y = dxy.y" ); assertThat(lookup("x")).isEqualTo(1); assertThat(lookup("y")).isEqualTo("abc"); SkylarkClassObjectConstructor dataConstructor = (SkylarkClassObjectConstructor) lookup("data"); SkylarkClassObject dx = (SkylarkClassObject) lookup("dx"); assertThat(dx.getConstructor()).isEqualTo(dataConstructor); SkylarkClassObject dy = (SkylarkClassObject) lookup("dy"); assertThat(dy.getConstructor()).isEqualTo(dataConstructor); } @Test public void declaredProvidersConcatError() throws Exception { evalAndExport( "data1 = provider()", "data2 = provider()" ); checkEvalError("Cannot concat data1 with data2", "d1 = data1(x = 1)", "d2 = data2(y = 2)", "d = d1 + d2" ); } @Test public void structsAsDeclaredProvidersTest() throws Exception { evalAndExport( "data = struct(x = 1)" ); SkylarkClassObject data = (SkylarkClassObject) lookup("data"); assertThat(SkylarkClassObjectConstructor.STRUCT.isExported()).isTrue(); assertThat(data.getConstructor()).isEqualTo(SkylarkClassObjectConstructor.STRUCT); assertThat(data.getConstructor().getKey()) .isEqualTo(SkylarkClassObjectConstructor.STRUCT.getKey()); } @Test public void aspectAllAttrs() throws Exception { evalAndExport( "def _impl(target, ctx):", " pass", "my_aspect = aspect(_impl, attr_aspects=['*'])"); SkylarkAspect myAspect = (SkylarkAspect) lookup("my_aspect"); assertThat(myAspect.getDefinition(AspectParameters.EMPTY).propagateAlong( Attribute.attr("foo", BuildType.LABEL).allowedFileTypes().build() )).isTrue(); } @Test public void aspectRequiredAspectProvidersSingle() throws Exception { evalAndExport( "def _impl(target, ctx):", " pass", "my_aspect = aspect(_impl, required_aspect_providers=['java', 'cc'])" ); SkylarkAspect myAspect = (SkylarkAspect) lookup("my_aspect"); RequiredProviders requiredProviders = myAspect.getDefinition(AspectParameters.EMPTY) .getRequiredProvidersForAspects(); assertThat(requiredProviders.isSatisfiedBy(AdvertisedProviderSet.ANY)).isTrue(); assertThat(requiredProviders.isSatisfiedBy(AdvertisedProviderSet.EMPTY)).isFalse(); assertThat(requiredProviders.isSatisfiedBy( AdvertisedProviderSet.builder() .addSkylark("cc") .addSkylark("java") .build())) .isTrue(); assertThat(requiredProviders.isSatisfiedBy( AdvertisedProviderSet.builder() .addSkylark("cc") .build())) .isFalse(); } @Test public void aspectRequiredAspectProvidersAlternatives() throws Exception { evalAndExport( "def _impl(target, ctx):", " pass", "my_aspect = aspect(_impl, required_aspect_providers=[['java'], ['cc']])" ); SkylarkAspect myAspect = (SkylarkAspect) lookup("my_aspect"); RequiredProviders requiredProviders = myAspect.getDefinition(AspectParameters.EMPTY) .getRequiredProvidersForAspects(); assertThat(requiredProviders.isSatisfiedBy(AdvertisedProviderSet.ANY)).isTrue(); assertThat(requiredProviders.isSatisfiedBy(AdvertisedProviderSet.EMPTY)).isFalse(); assertThat(requiredProviders.isSatisfiedBy( AdvertisedProviderSet.builder() .addSkylark("java") .build())) .isTrue(); assertThat(requiredProviders.isSatisfiedBy( AdvertisedProviderSet.builder() .addSkylark("cc") .build())) .isTrue(); assertThat(requiredProviders.isSatisfiedBy( AdvertisedProviderSet.builder() .addSkylark("prolog") .build())) .isFalse(); } @Test public void aspectRequiredAspectProvidersEmpty() throws Exception { evalAndExport( "def _impl(target, ctx):", " pass", "my_aspect = aspect(_impl, required_aspect_providers=[])" ); SkylarkAspect myAspect = (SkylarkAspect) lookup("my_aspect"); RequiredProviders requiredProviders = myAspect.getDefinition(AspectParameters.EMPTY) .getRequiredProvidersForAspects(); assertThat(requiredProviders.isSatisfiedBy(AdvertisedProviderSet.ANY)).isFalse(); assertThat(requiredProviders.isSatisfiedBy(AdvertisedProviderSet.EMPTY)).isFalse(); } @Test public void aspectRequiredAspectProvidersDefault() throws Exception { evalAndExport( "def _impl(target, ctx):", " pass", "my_aspect = aspect(_impl)" ); SkylarkAspect myAspect = (SkylarkAspect) lookup("my_aspect"); RequiredProviders requiredProviders = myAspect.getDefinition(AspectParameters.EMPTY) .getRequiredProvidersForAspects(); assertThat(requiredProviders.isSatisfiedBy(AdvertisedProviderSet.ANY)).isFalse(); assertThat(requiredProviders.isSatisfiedBy(AdvertisedProviderSet.EMPTY)).isFalse(); } @Test public void starTheOnlyAspectArg() throws Exception { checkEvalError("'*' must be the only string in 'attr_aspects' list", "def _impl(target, ctx):", " pass", "aspect(_impl, attr_aspects=['*', 'foo'])"); } @Test public void testMandatoryConfigParameterForExecutableLabels() throws Exception { scratch.file("third_party/foo/extension.bzl", "def _main_rule_impl(ctx):", " pass", "my_rule = rule(_main_rule_impl,", " attrs = { ", " 'exe' : attr.label(executable = True, allow_files = True),", " },", ")" ); scratch.file("third_party/foo/BUILD", "load('extension', 'my_rule')", "my_rule(name = 'main', exe = ':tool.sh')" ); try { createRuleContext("//third_party/foo:main"); Assert.fail(); } catch (AssertionError e) { assertThat(e.getMessage()).contains("cfg parameter is mandatory when executable=True is " + "provided."); } } }
package com.atlassian.plugin.osgi.factory.transform.stage; import static com.atlassian.plugin.osgi.factory.transform.JarUtils.withJar; import static com.google.common.collect.Iterables.transform; import com.atlassian.plugin.osgi.factory.transform.JarUtils; import com.atlassian.plugin.osgi.factory.transform.PluginTransformationException; import org.apache.commons.io.IOUtils; import com.google.common.base.Function; import com.google.common.collect.ImmutableSet; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.jar.JarInputStream; /** * Contains utility functions for use in TransformStage implementations. * * @since 2.6.0 */ final class TransformStageUtils { /** * Not for instantiation. */ private TransformStageUtils() {} /** * Scan entries in jar for expectedItems. * It exits early once all the expected items are satisfied. * * @param inputStream the input jar entry stream, cannot be null. Caller is responsible for closing it. * @param expectedItems items expected from the stream, cannot be null. * @param mapper function which maps JarEntry to item which then can be matched against expectedItems, cannot be null. * * @return the set of matched items. * @throws IOException if the inputStream can't find the next entry or it is somehow corrupt */ static Set<String> scanJarForItems(final JarInputStream inputStream, final Set<String> expectedItems, final Function<JarEntry, String> mapper) throws IOException { final Set<String> matches = new HashSet<String>(); JarEntry entry; while ((entry = inputStream.getNextJarEntry()) != null) { final String item = mapper.apply(entry); if ((item != null) && expectedItems.contains(item)) { matches.add(item); // early exit opportunity if (matches.size() == expectedItems.size()) { break; } } } return Collections.unmodifiableSet(matches); } /** * Scan inner jars for expected classes. * Early exit once all the required classes are satisfied. * * @param pluginFile the plugin jar file, cannot be null. * @param innerJars the inner jars to look at, never null. This is because there can be inner jars that we're not interested in the plugin. * @param expectedClasses the classes that we expect to find, never null. * * @return the set of classes matched, never null. */ static Set<String> scanInnerJars(final File pluginFile, final Set<String> innerJars, final Set<String> expectedClasses) { return withJar(pluginFile, new JarUtils.Extractor<Set<String>>() { public Set<String> get(final JarFile pluginJarFile) { // this keeps track of all the matches. final Set<String> matches = new HashSet<String>(); // scan each inner jar. for (final String innerJar : innerJars) { JarInputStream innerJarStream = null; try { // read inner jar into JarInputStream. innerJarStream = new JarInputStream(pluginJarFile.getInputStream(pluginJarFile.getEntry(innerJar))); final Set<String> innerMatches = scanJarForItems(innerJarStream, expectedClasses, JarEntryToClassName.INSTANCE); // recalculate the matches. matches.addAll(innerMatches); } catch (final IOException ioe) { throw new PluginTransformationException("Error reading inner jar:" + innerJar + " in file: " + pluginFile, ioe); } finally { closeNestedStreamQuietly(innerJarStream); } // early exit. if (matches.size() == expectedClasses.size()) { break; } } return Collections.unmodifiableSet(matches); } }); } /** * Try to close the given streams in order. * Exit once one is closed. * * @param streams streams to be closed. The higher ones must come first. */ static void closeNestedStreamQuietly(final InputStream... streams) { for (final InputStream stream : streams) { if (stream != null) { IOUtils.closeQuietly(stream); break; } } } /** * Extracts package name from the given class name. * * @param fullClassName a valid class name. * @return package name. */ static String getPackageName(final String fullClassName) { return PackageName.INSTANCE.apply(fullClassName); } /** * Extract package names from the given set of classes. * * @param classes set of classes, cannot be null. * @return a set of package names, can be empty but never null. */ static Set<String> getPackageNames(final Iterable<String> classes) { return ImmutableSet.copyOf(transform(classes, PackageName.INSTANCE)); } /** * Convert a jar path to class name. * such as "com/atlassian/Test.class" -> "com.atlassian.Test". * * @param jarPath the entry name inside jar. * @return class name, or null if the path is not a class file. */ static String jarPathToClassName(final String jarPath) { if ((jarPath == null) || !jarPath.contains(".class")) { return null; } return jarPath.replaceAll("/", ".").substring(0, jarPath.length() - ".class".length()); } /** * Class name -> package name transformer. */ enum PackageName implements Function<String, String> { INSTANCE; public String apply(final String fullClassName) { // A valid java class name must have a dot in it. return fullClassName.substring(0, fullClassName.lastIndexOf(".")); } } /** * Maps jarEntry -> class name. */ enum JarEntryToClassName implements Function<JarEntry, String> { INSTANCE; public String apply(final JarEntry entry) { final String jarPath = entry.getName(); if ((jarPath == null) || !jarPath.contains(".class")) { return null; } return jarPath.replaceAll("/", ".").substring(0, jarPath.length() - ".class".length()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.click.element; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import org.apache.click.Context; import org.apache.click.util.HtmlStringBuffer; /** * Provides a base class for rendering HTML elements, for example * JavaScript (&lt;script&gt;) and Cascading Stylesheets * (&lt;link&gt; / &lt;style&gt;). * <p/> * Subclasses should override {@link #getTag()} to return a specific HTML tag. */ public class Element implements Serializable { private static final long serialVersionUID = 1L; // Variables -------------------------------------------------------------- /** The Element attributes Map. */ private Map<String, String> attributes; // Public Properties ------------------------------------------------------ /** * Returns the Element HTML tag, the default value is <tt>null</tt>. * <p/> * Subclasses should override this method and return the correct tag. * * @return this Element HTML tag */ public String getTag() { return null; } /** * Return the HTML attribute with the given name, or null if the * attribute does not exist. * * @param name the name of link HTML attribute * @return the link HTML attribute */ public String getAttribute(String name) { if (hasAttributes()) { return getAttributes().get(name); } return null; } /** * Set the Element attribute with the given attribute name and value. * * @param name the attribute name * @param value the attribute value * @throws IllegalArgumentException if name parameter is null */ public void setAttribute(String name, String value) { if (name == null) { throw new IllegalArgumentException("Null name parameter"); } if (value != null) { getAttributes().put(name, value); } else { getAttributes().remove(name); } } /** * Return the Element attributes Map. * * @return the Element attributes Map. */ public Map<String, String> getAttributes() { if (attributes == null) { attributes = new HashMap<String, String>(); } return attributes; } /** * Return true if the Element has attributes or false otherwise. * * @return true if the Element has attributes on false otherwise */ public boolean hasAttributes() { return attributes != null && !attributes.isEmpty(); } /** * Returns true if specified attribute is defined, false otherwise. * * @param name the specified attribute to check * @return true if name is a defined attribute */ public boolean hasAttribute(String name) { return hasAttributes() && getAttributes().containsKey(name); } /** * Return the "id" attribute value or null if no id is defined. * * @return HTML element identifier attribute "id" value or null if no id * is defined */ public String getId() { return getAttribute("id"); } /** * Set the HTML id attribute for the with the given value. * * @param id the element HTML id attribute value to set */ public void setId(String id) { if (id != null) { setAttribute("id", id); } else { getAttributes().remove("id"); } } // Public Methods --------------------------------------------------------- /** * Return the thread local Context. * * @return the thread local Context */ public Context getContext() { return Context.getThreadLocalContext(); } /** * Render the HTML representation of the Element to the specified buffer. * <p/> * If {@link #getTag()} returns null, this method will return an empty * string. * * @param buffer the specified buffer to render the Element output to */ public void render(HtmlStringBuffer buffer) { if (getTag() == null) { return; } renderTagBegin(getTag(), buffer); renderTagEnd(getTag(), buffer); } /** * Return the HTML string representation of the Element. * * @return the HTML string representation of the Element */ @Override public String toString() { if (getTag() == null) { return ""; } HtmlStringBuffer buffer = new HtmlStringBuffer(getElementSizeEst()); render(buffer); return buffer.toString(); } // Protected Methods ------------------------------------------------------ /** * Append all the Element attributes to the specified buffer. * * @param buffer the specified buffer to append all the attributes */ protected void appendAttributes(HtmlStringBuffer buffer) { if (hasAttributes()) { buffer.appendAttributes(attributes); } } // Package Private Methods ------------------------------------------------ /** * Render the specified {@link #getTag() tag} and {@link #getAttributes()}. * <p/> * <b>Please note:</b> the tag will not be closed by this method. This * enables callers of this method to append extra attributes as needed. * <p/> * For example the result of calling: * <pre class="prettyprint"> * Field field = new TextField("mytext"); * HtmlStringBuffer buffer = new HtmlStringBuffer(); * field.renderTagBegin("div", buffer); * </pre> * will be: * <pre class="prettyprint"> * &lt;div name="mytext" id="mytext" * </pre> * Note that the tag is not closed. * * @param tagName the name of the tag to render * @param buffer the buffer to append the output to */ void renderTagBegin(String tagName, HtmlStringBuffer buffer) { if (tagName == null) { throw new IllegalStateException("Tag cannot be null"); } buffer.elementStart(tagName); buffer.appendAttribute("id", getId()); appendAttributes(buffer); } /** * Closes the specified {@link #getTag() tag}. * * @param tagName the name of the tag to close * @param buffer the buffer to append the output to */ void renderTagEnd(String tagName, HtmlStringBuffer buffer) { buffer.elementEnd(); } /** * Return the estimated rendered element size in characters. * * @return the estimated rendered element size in characters */ int getElementSizeEst() { int size = 0; if (getTag() != null && hasAttributes()) { //length of the markup -> </> == 3 //1 * tag.length() size += 3 + getTag().length(); //using 20 as an estimate size += 20 * getAttributes().size(); } return size; } }
// Copyright 2013 Cloudera Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.cloudera.impala.catalog; import static org.junit.Assert.fail; import java.util.Map; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import com.cloudera.impala.testutil.CatalogServiceTestCatalog; import com.cloudera.impala.analysis.LiteralExpr; import com.cloudera.impala.common.AnalysisException; import com.cloudera.impala.common.ImpalaException; import com.cloudera.impala.thrift.ImpalaInternalServiceConstants; import com.cloudera.impala.thrift.TAccessLevel; import com.cloudera.impala.thrift.THBaseTable; import com.cloudera.impala.thrift.THdfsPartition; import com.cloudera.impala.thrift.THdfsTable; import com.cloudera.impala.thrift.TTable; import com.cloudera.impala.thrift.TTableType; import com.google.common.collect.Lists; /** * Test suite to verify proper conversion of Catalog objects to/from Thrift structs. */ public class CatalogObjectToFromThriftTest { private static CatalogServiceCatalog catalog_; @BeforeClass public static void setUp() throws Exception { catalog_ = CatalogServiceTestCatalog.create(); } @AfterClass public static void cleanUp() { catalog_.close(); } @Test public void TestPartitionedTable() throws CatalogException { String[] dbNames = {"functional", "functional_avro", "functional_parquet", "functional_seq"}; for (String dbName: dbNames) { Table table = catalog_.getOrLoadTable(dbName, "alltypes"); TTable thriftTable = table.toThrift(); Assert.assertEquals(thriftTable.tbl_name, "alltypes"); Assert.assertEquals(thriftTable.db_name, dbName); Assert.assertTrue(thriftTable.isSetTable_type()); Assert.assertEquals(thriftTable.getClustering_columns().size(), 2); Assert.assertEquals(thriftTable.getTable_type(), TTableType.HDFS_TABLE); THdfsTable hdfsTable = thriftTable.getHdfs_table(); Assert.assertTrue(hdfsTable.hdfsBaseDir != null); // The table has 24 partitions + the default partition Assert.assertEquals(hdfsTable.getPartitions().size(), 25); Assert.assertTrue(hdfsTable.getPartitions().containsKey( new Long(ImpalaInternalServiceConstants.DEFAULT_PARTITION_ID))); for (Map.Entry<Long, THdfsPartition> kv: hdfsTable.getPartitions().entrySet()) { if (kv.getKey() == ImpalaInternalServiceConstants.DEFAULT_PARTITION_ID) { Assert.assertEquals(kv.getValue().getPartitionKeyExprs().size(), 0); } else { Assert.assertEquals(kv.getValue().getPartitionKeyExprs().size(), 2); } } // Now try to load the thrift struct. Table newTable = Table.fromThrift(catalog_.getDb(dbName), thriftTable); Assert.assertTrue(newTable instanceof HdfsTable); Assert.assertEquals(newTable.name_, thriftTable.tbl_name); Assert.assertEquals(newTable.numClusteringCols_, 2); // Currently only have table stats on "functional.alltypes" if (dbName.equals("functional")) Assert.assertEquals(7300, newTable.numRows_); HdfsTable newHdfsTable = (HdfsTable) newTable; Assert.assertEquals(newHdfsTable.getPartitions().size(), 25); boolean foundDefaultPartition = false; for (HdfsPartition hdfsPart: newHdfsTable.getPartitions()) { if (hdfsPart.getId() == ImpalaInternalServiceConstants.DEFAULT_PARTITION_ID) { Assert.assertEquals(foundDefaultPartition, false); foundDefaultPartition = true; } else { Assert.assertEquals(hdfsPart.getFileDescriptors().size(), 1); Assert.assertTrue( hdfsPart.getFileDescriptors().get(0).getFileBlocks().size() > 0); // Verify the partition access level is getting set properly. The alltypes_seq // table has two partitions that are read_only. if (dbName.equals("functional_seq") && ( hdfsPart.getPartitionName().equals("year=2009/month=1") || hdfsPart.getPartitionName().equals("year=2009/month=3"))) { Assert.assertEquals(TAccessLevel.READ_ONLY, hdfsPart.getAccessLevel()); } else { Assert.assertEquals(TAccessLevel.READ_WRITE, hdfsPart.getAccessLevel()); } } } Assert.assertEquals(foundDefaultPartition, true); } } /** * Validates proper to/fromThrift behavior for a table whose column definition does not * match its Avro schema definition. The expected behavior is that the Avro schema * definition will override any columns defined in the table. */ @Test public void TestMismatchedAvroAndTableSchemas() throws CatalogException { Table table = catalog_.getOrLoadTable("functional_avro_snap", "schema_resolution_test"); TTable thriftTable = table.toThrift(); Assert.assertEquals(thriftTable.tbl_name, "schema_resolution_test"); Assert.assertTrue(thriftTable.isSetTable_type()); Assert.assertEquals(thriftTable.getColumns().size(), 8); Assert.assertEquals(thriftTable.getClustering_columns().size(), 0); Assert.assertEquals(thriftTable.getTable_type(), TTableType.HDFS_TABLE); // Now try to load the thrift struct. Table newTable = Table.fromThrift(catalog_.getDb("functional_avro_snap"), thriftTable); Assert.assertEquals(newTable.getColumns().size(), 8); // The table schema does not match the Avro schema - it has only 2 columns. Assert.assertEquals(newTable.getMetaStoreTable().getSd().getCols().size(), 2); } @Test public void TestHBaseTables() throws CatalogException { String dbName = "functional_hbase"; Table table = catalog_.getOrLoadTable(dbName, "alltypes"); TTable thriftTable = table.toThrift(); Assert.assertEquals(thriftTable.tbl_name, "alltypes"); Assert.assertEquals(thriftTable.db_name, dbName); Assert.assertTrue(thriftTable.isSetTable_type()); Assert.assertEquals(thriftTable.getClustering_columns().size(), 1); Assert.assertEquals(thriftTable.getTable_type(), TTableType.HBASE_TABLE); THBaseTable hbaseTable = thriftTable.getHbase_table(); Assert.assertEquals(hbaseTable.getFamilies().size(), 13); Assert.assertEquals(hbaseTable.getQualifiers().size(), 13); Assert.assertEquals(hbaseTable.getBinary_encoded().size(), 13); for (boolean isBinaryEncoded: hbaseTable.getBinary_encoded()) { // None of the columns should be binary encoded. Assert.assertTrue(!isBinaryEncoded); } Table newTable = Table.fromThrift(catalog_.getDb(dbName), thriftTable); Assert.assertTrue(newTable instanceof HBaseTable); HBaseTable newHBaseTable = (HBaseTable) newTable; Assert.assertEquals(newHBaseTable.getColumns().size(), 13); Assert.assertEquals(newHBaseTable.getColumn("double_col").getType(), Type.DOUBLE); Assert.assertEquals(newHBaseTable.getNumClusteringCols(), 1); } @Test public void TestHBaseTableWithBinaryEncodedCols() throws CatalogException { String dbName = "functional_hbase"; Table table = catalog_.getOrLoadTable(dbName, "alltypessmallbinary"); TTable thriftTable = table.toThrift(); Assert.assertEquals(thriftTable.tbl_name, "alltypessmallbinary"); Assert.assertEquals(thriftTable.db_name, dbName); Assert.assertTrue(thriftTable.isSetTable_type()); Assert.assertEquals(thriftTable.getClustering_columns().size(), 1); Assert.assertEquals(thriftTable.getTable_type(), TTableType.HBASE_TABLE); THBaseTable hbaseTable = thriftTable.getHbase_table(); Assert.assertEquals(hbaseTable.getFamilies().size(), 13); Assert.assertEquals(hbaseTable.getQualifiers().size(), 13); Assert.assertEquals(hbaseTable.getBinary_encoded().size(), 13); // Count the number of columns that are binary encoded. int numBinaryEncodedCols = 0; for (boolean isBinaryEncoded: hbaseTable.getBinary_encoded()) { if (isBinaryEncoded) ++numBinaryEncodedCols; } Assert.assertEquals(numBinaryEncodedCols, 10); // Verify that creating a table from this thrift struct results in a valid // Table. Table newTable = Table.fromThrift(catalog_.getDb(dbName), thriftTable); Assert.assertTrue(newTable instanceof HBaseTable); HBaseTable newHBaseTable = (HBaseTable) newTable; Assert.assertEquals(newHBaseTable.getColumns().size(), 13); Assert.assertEquals(newHBaseTable.getColumn("double_col").getType(), Type.DOUBLE); Assert.assertEquals(newHBaseTable.getNumClusteringCols(), 1); } @Test public void TestTableLoadingErrors() throws ImpalaException { Table table = catalog_.getOrLoadTable("functional", "hive_index_tbl"); TTable thriftTable = table.toThrift(); Assert.assertEquals(thriftTable.tbl_name, "hive_index_tbl"); Assert.assertEquals(thriftTable.db_name, "functional"); table = catalog_.getOrLoadTable("functional", "alltypes"); HdfsTable hdfsTable = (HdfsTable) table; // Get a partition from the table. HdfsPartition part = hdfsTable.getPartitions().get(hdfsTable.getPartitions().size() - 1); // Create a dummy partition with an invalid decimal type. try { HdfsPartition dummyPart = new HdfsPartition(hdfsTable, part.toHmsPartition(), Lists.newArrayList(LiteralExpr.create("1.1", ScalarType.createDecimalType(1, 0)), LiteralExpr.create("1.1", ScalarType.createDecimalType(1, 0))), null, Lists.<HdfsPartition.FileDescriptor>newArrayList(), TAccessLevel.READ_WRITE); fail("Expected metadata to be malformed."); } catch (AnalysisException e) { Assert.assertTrue(e.getMessage().contains("invalid DECIMAL(1,0) value: 1.1")); } } @Test public void TestView() throws CatalogException { Table table = catalog_.getOrLoadTable("functional", "view_view"); TTable thriftTable = table.toThrift(); Assert.assertEquals(thriftTable.tbl_name, "view_view"); Assert.assertEquals(thriftTable.db_name, "functional"); Assert.assertFalse(thriftTable.isSetHdfs_table()); Assert.assertFalse(thriftTable.isSetHbase_table()); Assert.assertTrue(thriftTable.isSetMetastore_table()); } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.plugins.gradle.service.settings; import com.intellij.icons.AllIcons; import com.intellij.ide.util.projectWizard.WizardContext; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ApplicationNamesInfo; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.externalSystem.model.settings.LocationSettingType; import com.intellij.openapi.externalSystem.service.execution.ExternalSystemJdkUtil; import com.intellij.openapi.externalSystem.settings.ExternalProjectSettings; import com.intellij.openapi.externalSystem.util.ExternalSystemUiUtil; import com.intellij.openapi.externalSystem.util.PaintAwarePanel; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.projectRoots.SdkModificator; import com.intellij.openapi.roots.ui.configuration.SdkComboBox; import com.intellij.openapi.roots.ui.configuration.SdkLookupProvider; import com.intellij.openapi.roots.ui.configuration.projectRoot.ProjectSdksModel; import com.intellij.openapi.roots.ui.util.CompositeAppearance; import com.intellij.openapi.ui.ComboBox; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.util.*; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.*; import com.intellij.ui.components.JBCheckBox; import com.intellij.ui.components.JBLabel; import com.intellij.util.Alarm; import com.intellij.util.Consumer; import com.intellij.util.ObjectUtils; import com.intellij.util.ThrowableRunnable; import com.intellij.util.ui.GridBag; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import com.intellij.xml.util.XmlStringUtil; import one.util.streamex.StreamEx; import org.gradle.util.GradleVersion; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.gradle.execution.target.GradleRuntimeTargetUI; import org.jetbrains.plugins.gradle.execution.target.TargetPathFieldWithBrowseButton; import org.jetbrains.plugins.gradle.service.GradleInstallationManager; import org.jetbrains.plugins.gradle.settings.DistributionType; import org.jetbrains.plugins.gradle.settings.GradleProjectSettings; import org.jetbrains.plugins.gradle.settings.TestRunner; import org.jetbrains.plugins.gradle.util.GradleBundle; import org.jetbrains.plugins.gradle.util.GradleConstants; import org.jetbrains.plugins.gradle.util.GradleUtil; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.File; import java.util.*; import java.util.concurrent.TimeUnit; import static com.intellij.openapi.externalSystem.util.ExternalSystemUiUtil.INSETS; import static com.intellij.openapi.projectRoots.impl.SdkConfigurationUtil.createUniqueSdkName; import static com.intellij.openapi.roots.ui.configuration.SdkComboBoxModel.createJdkComboBoxModel; import static com.intellij.openapi.roots.ui.configuration.SdkLookupProvider.SdkInfo; import static org.jetbrains.plugins.gradle.util.GradleJvmComboBoxUtil.*; import static org.jetbrains.plugins.gradle.util.GradleJvmResolutionUtil.getGradleJvmLookupProvider; import static org.jetbrains.plugins.gradle.util.GradleJvmUtil.nonblockingResolveGradleJvmInfo; /** * @author Vladislav.Soroka */ @SuppressWarnings("FieldCanBeLocal") // Used implicitly by reflection at disposeUIResources() and showUi() public class IdeaGradleProjectSettingsControlBuilder implements GradleProjectSettingsControlBuilder { private static final Logger LOG = Logger.getInstance(IdeaGradleProjectSettingsControlBuilder.class); private static final long BALLOON_DELAY_MILLIS = TimeUnit.SECONDS.toMillis(1); private static final String HIDDEN_KEY = "hidden"; @NotNull private final GradleProjectSettings myInitialSettings; @NotNull private final Alarm myAlarm = new Alarm(); /** * The target {@link Project} reference of the UI control. * It can be the current project of the settings UI configurable (see {@org.jetbrains.plugins.gradle.service.settings.GradleConfigurable}), * or the target project from the wizard context. */ @NotNull private final Ref<Project> myProjectRef = Ref.create(); @NotNull private final Disposable myProjectRefDisposable = () -> myProjectRef.set(null); @Nullable JComboBox<DistributionTypeItem> myGradleDistributionComboBox; @Nullable JBLabel myGradleDistributionHint; @NotNull private LocationSettingType myGradleHomeSettingType = LocationSettingType.UNKNOWN; private boolean myShowBalloonIfNecessary; @Nullable private TargetPathFieldWithBrowseButton myGradleHomePathField; @SuppressWarnings({"unused", "RedundantSuppression"}) // used by ExternalSystemUiUtil.showUi to show/hide the component via reflection private JPanel myGradlePanel; @Nullable private JLabel myGradleJdkLabel; @Nullable private SdkComboBox myGradleJdkComboBox; private JPanel myGradleJdkComboBoxWrapper; private boolean dropGradleJdkComponents; private boolean dropUseWrapperButton; private boolean dropCustomizableWrapperButton; private boolean dropUseLocalDistributionButton; private boolean dropUseBundledDistributionButton; @SuppressWarnings({"unused", "RedundantSuppression"}) // used by ExternalSystemUiUtil.showUi to show/hide the component via reflection private JPanel myImportPanel; private JPanel myModulePerSourceSetPanel; @Nullable private JBCheckBox myResolveModulePerSourceSetCheckBox; private boolean dropResolveModulePerSourceSetCheckBox; @Nullable private JBCheckBox myResolveExternalAnnotationsCheckBox; private boolean dropResolveExternalAnnotationsCheckBox = false; @Nullable private JLabel myDelegateBuildLabel; @Nullable private ComboBox<BuildRunItem> myDelegateBuildCombobox; private boolean dropDelegateBuildCombobox; @Nullable private JLabel myTestRunnerLabel; @Nullable private ComboBox<TestRunnerItem> myTestRunnerCombobox; private boolean dropTestRunnerCombobox; private JPanel myDelegatePanel; public IdeaGradleProjectSettingsControlBuilder(@NotNull GradleProjectSettings initialSettings) { myInitialSettings = initialSettings; } public IdeaGradleProjectSettingsControlBuilder dropGradleJdkComponents() { dropGradleJdkComponents = true; return this; } public IdeaGradleProjectSettingsControlBuilder dropUseWrapperButton() { dropUseWrapperButton = true; return this; } public IdeaGradleProjectSettingsControlBuilder dropCustomizableWrapperButton() { dropCustomizableWrapperButton = true; return this; } public IdeaGradleProjectSettingsControlBuilder dropUseLocalDistributionButton() { dropUseLocalDistributionButton = true; return this; } public IdeaGradleProjectSettingsControlBuilder dropUseBundledDistributionButton() { dropUseBundledDistributionButton = true; return this; } /** * @deprecated see {@link ExternalProjectSettings#setUseAutoImport} for details */ @Deprecated(forRemoval = true) public IdeaGradleProjectSettingsControlBuilder dropUseAutoImportBox() { return this; } public IdeaGradleProjectSettingsControlBuilder dropResolveModulePerSourceSetCheckBox() { dropResolveModulePerSourceSetCheckBox = true; return this; } public IdeaGradleProjectSettingsControlBuilder dropResolveExternalAnnotationsCheckBox() { dropResolveExternalAnnotationsCheckBox = true; return this; } public IdeaGradleProjectSettingsControlBuilder dropDelegateBuildCombobox() { dropDelegateBuildCombobox = true; return this; } public IdeaGradleProjectSettingsControlBuilder dropTestRunnerCombobox() { dropTestRunnerCombobox = true; return this; } @Override public void showUi(boolean show) { ExternalSystemUiUtil.showUi(this, show); if (show) { // some controls need to remain hidden depending on the selection // also error notifications should be shown updateDistributionComponents(); updateDeprecatedControls(); } } @Override @NotNull public GradleProjectSettings getInitialSettings() { return myInitialSettings; } @Override public void createAndFillControls(PaintAwarePanel content, int indentLevel) { content.setPaintCallback(graphics -> showBalloonIfNecessary()); content.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { if (!"ancestor".equals(evt.getPropertyName())) { return; } // Configure the balloon to show on initial configurable drawing. myShowBalloonIfNecessary = evt.getNewValue() != null && evt.getOldValue() == null; if (evt.getNewValue() == null && evt.getOldValue() != null) { // Cancel delayed balloons when the configurable is hidden. myAlarm.cancelAllRequests(); } } }); addImportComponents(content, indentLevel); addDelegationComponents(content, indentLevel); addGradleComponents(content, indentLevel); } private void addImportComponents(PaintAwarePanel content, int indentLevel) { myImportPanel = addComponentsGroup(null, content, indentLevel, panel -> { if (!dropResolveModulePerSourceSetCheckBox) { myModulePerSourceSetPanel = new JPanel(new GridBagLayout()); panel.add(myModulePerSourceSetPanel, ExternalSystemUiUtil.getFillLineConstraints(0).insets(0, 0, 0, 0)); myModulePerSourceSetPanel.add( myResolveModulePerSourceSetCheckBox = new JBCheckBox(GradleBundle.message("gradle.settings.text.module.per.source.set", getIDEName())), ExternalSystemUiUtil.getFillLineConstraints(indentLevel)); JBLabel myResolveModulePerSourceSetHintLabel = new JBLabel( XmlStringUtil.wrapInHtml(GradleBundle.message("gradle.settings.text.module.per.source.set.hint")), UIUtil.ComponentStyle.SMALL); myResolveModulePerSourceSetHintLabel.setIcon(AllIcons.General.BalloonWarning12); myResolveModulePerSourceSetHintLabel.setVerticalTextPosition(SwingConstants.TOP); myResolveModulePerSourceSetHintLabel.setForeground(UIUtil.getLabelFontColor(UIUtil.FontColor.BRIGHTER)); GridBag constraints = ExternalSystemUiUtil.getFillLineConstraints(indentLevel); constraints.insets.top = 0; constraints.insets.left += UIUtil.getCheckBoxTextHorizontalOffset(myResolveModulePerSourceSetCheckBox); myModulePerSourceSetPanel.add(myResolveModulePerSourceSetHintLabel, constraints); } if (!dropResolveExternalAnnotationsCheckBox) { panel.add( myResolveExternalAnnotationsCheckBox = new JBCheckBox(GradleBundle.message("gradle.settings.text.download.annotations")), ExternalSystemUiUtil.getFillLineConstraints(indentLevel)); } }); } @Override public void disposeUIResources() { ExternalSystemUiUtil.disposeUi(this); Disposer.dispose(myAlarm); } /** * Updates GUI of the gradle configurable in order to show deduced path to gradle (if possible). */ private void deduceGradleHomeIfPossible() { if (myGradleHomePathField == null) return; File gradleHome = GradleInstallationManager.getInstance().getAutodetectedGradleHome(myProjectRef.get()); if (gradleHome == null) { new DelayedBalloonInfo(MessageType.WARNING, LocationSettingType.UNKNOWN, BALLOON_DELAY_MILLIS).run(); return; } myGradleHomeSettingType = LocationSettingType.DEDUCED; new DelayedBalloonInfo(MessageType.INFO, LocationSettingType.DEDUCED, BALLOON_DELAY_MILLIS).run(); myGradleHomePathField.setText(gradleHome.getPath()); myGradleHomePathField.getTextField().setForeground(LocationSettingType.DEDUCED.getColor()); } private void addGradleComponents(PaintAwarePanel content, int indentLevel) { myGradlePanel = addComponentsGroup(GradleConstants.GRADLE_NAME, content, indentLevel, panel -> { //NON-NLS GRADLE_NAME addGradleChooserComponents(panel, indentLevel + 1); addGradleJdkComponents(panel, indentLevel + 1); }); } @Override public IdeaGradleProjectSettingsControlBuilder addGradleJdkComponents(JPanel content, int indentLevel) { if (!dropGradleJdkComponents) { Project project = ProjectManager.getInstance().getDefaultProject(); myGradleJdkLabel = new JBLabel(GradleBundle.message("gradle.settings.text.jvm.path")); myGradleJdkComboBoxWrapper = new JPanel(new BorderLayout()); recreateGradleJdkComboBox(project, new ProjectSdksModel()); myGradleJdkLabel.setLabelFor(myGradleJdkComboBoxWrapper); content.add(myGradleJdkLabel, ExternalSystemUiUtil.getLabelConstraints(indentLevel)); content.add(myGradleJdkComboBoxWrapper, ExternalSystemUiUtil.getFillLineConstraints(0)); } return this; } @Override public IdeaGradleProjectSettingsControlBuilder addGradleChooserComponents(JPanel content, int indentLevel) { ArrayList<DistributionTypeItem> availableDistributions = new ArrayList<>(); if (!dropUseWrapperButton) availableDistributions.add(new DistributionTypeItem(DistributionType.DEFAULT_WRAPPED)); if (!dropCustomizableWrapperButton) availableDistributions.add(new DistributionTypeItem(DistributionType.WRAPPED)); if (!dropUseLocalDistributionButton) availableDistributions.add(new DistributionTypeItem(DistributionType.LOCAL)); if (!dropUseBundledDistributionButton) availableDistributions.add(new DistributionTypeItem(DistributionType.BUNDLED)); myGradleDistributionComboBox = new ComboBox<>(); myGradleDistributionComboBox.setRenderer(new MyItemCellRenderer<>()); myGradleDistributionHint = new JBLabel(); myGradleHomePathField = new TargetPathFieldWithBrowseButton(); myGradleDistributionHint.setLabelFor(myGradleHomePathField); myGradleHomePathField.getTextField().getDocument().addDocumentListener(new DocumentListener() { @Override public void insertUpdate(DocumentEvent e) { myGradleHomePathField.getTextField().setForeground(LocationSettingType.EXPLICIT_CORRECT.getColor()); } @Override public void removeUpdate(DocumentEvent e) { myGradleHomePathField.getTextField().setForeground(LocationSettingType.EXPLICIT_CORRECT.getColor()); } @Override public void changedUpdate(DocumentEvent e) { } }); myGradleDistributionComboBox.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { updateDistributionComponents(); } }); myGradleDistributionComboBox.setModel(new CollectionComboBoxModel<>(availableDistributions)); if (!availableDistributions.isEmpty()) { content.add(new JBLabel(GradleBundle.message("gradle.settings.text.distribution")), ExternalSystemUiUtil.getLabelConstraints(indentLevel)); content.add(myGradleDistributionComboBox, ExternalSystemUiUtil.getLabelConstraints(0)); JPanel additionalControlsPanel = new JPanel(new GridBagLayout()); additionalControlsPanel.add(myGradleDistributionHint); additionalControlsPanel.add(myGradleHomePathField, ExternalSystemUiUtil.getFillLineConstraints(0)); content.add(additionalControlsPanel, ExternalSystemUiUtil.getFillLineConstraints(0).insets(0, 0, 0, 0)); // adjust the combobox height to match the height of the editor and Gradle GDK combobox. // - without setting the prefered size, it's resized when path component is shown/hidden // - without adjusting the height the combobox is a little smaller then the next combobox (Gradle JVM) boolean macTheme = UIUtil.isUnderDefaultMacTheme(); myGradleDistributionComboBox.setPreferredSize(new Dimension(myGradleDistributionComboBox.getPreferredSize().width, myGradleHomePathField.getPreferredSize().height + (macTheme ? 3 : 0))); } return this; } private void updateDistributionComponents() { if (myGradleDistributionComboBox == null) return; if (myGradleHomePathField == null) return; boolean localEnabled = getSelectedGradleDistribution() == DistributionType.LOCAL; boolean wrapperSelected = getSelectedGradleDistribution() == DistributionType.DEFAULT_WRAPPED; myGradleHomePathField.setEnabled(localEnabled); myGradleHomePathField.setVisible(localEnabled); if (myGradleDistributionHint != null) { myGradleDistributionHint.setEnabled(wrapperSelected); myGradleDistributionHint.setVisible(wrapperSelected); } if (localEnabled) { if (myGradleHomePathField.getText().isEmpty()) { deduceGradleHomeIfPossible(); } else { Project project = myProjectRef.get(); if (GradleInstallationManager.getInstance().isGradleSdkHome(project, myGradleHomePathField.getText())) { myGradleHomeSettingType = LocationSettingType.EXPLICIT_CORRECT; } else { myGradleHomeSettingType = LocationSettingType.EXPLICIT_INCORRECT; myShowBalloonIfNecessary = true; } } showBalloonIfNecessary(); } else { myAlarm.cancelAllRequests(); } } @Nullable private DistributionType getSelectedGradleDistribution() { if (myGradleDistributionComboBox == null) return null; Object selection = myGradleDistributionComboBox.getSelectedItem(); return selection == null ? null : ((DistributionTypeItem)selection).value; } @Override public boolean validate(GradleProjectSettings settings) throws ConfigurationException { if (myGradleJdkComboBox != null && !ApplicationManager.getApplication().isUnitTestMode()) { SdkInfo sdkInfo = getSelectedGradleJvmInfo(myGradleJdkComboBox); if (sdkInfo instanceof SdkInfo.Undefined) { throw new ConfigurationException(GradleBundle.message("gradle.jvm.undefined")); } if (sdkInfo instanceof SdkInfo.Resolved) { String homePath = ((SdkInfo.Resolved)sdkInfo).getHomePath(); if (!ExternalSystemJdkUtil.isValidJdk(homePath)) { throw new ConfigurationException(GradleBundle.message("gradle.jvm.incorrect", homePath)); } } } if (myGradleHomePathField != null && getSelectedGradleDistribution() == DistributionType.LOCAL) { String gradleHomePath = FileUtil.toCanonicalPath(myGradleHomePathField.getText()); if (StringUtil.isEmpty(gradleHomePath)) { myGradleHomeSettingType = LocationSettingType.UNKNOWN; throw new ConfigurationException(GradleBundle.message("gradle.home.setting.type.explicit.empty", gradleHomePath)); } else if (!GradleInstallationManager.getInstance().isGradleSdkHome(myProjectRef.get(), new File(gradleHomePath))) { myGradleHomeSettingType = LocationSettingType.EXPLICIT_INCORRECT; new DelayedBalloonInfo(MessageType.ERROR, myGradleHomeSettingType, 0).run(); throw new ConfigurationException(GradleBundle.message("gradle.home.setting.type.explicit.incorrect", gradleHomePath)); } } return true; } private @NotNull SdkLookupProvider getSdkLookupProvider(@NotNull Project project) { return getGradleJvmLookupProvider(project, myInitialSettings); } private @NotNull SdkInfo getSelectedGradleJvmInfo(@NotNull SdkComboBox comboBox) { Project project = comboBox.getModel().getProject(); SdkLookupProvider sdkLookupProvider = getSdkLookupProvider(project); String externalProjectPath = myInitialSettings.getExternalProjectPath(); Sdk projectSdk = comboBox.getModel().getSdksModel().getProjectSdk(); String gradleJvm = getSelectedGradleJvmReference(comboBox, sdkLookupProvider); return nonblockingResolveGradleJvmInfo(sdkLookupProvider, project, projectSdk, externalProjectPath, gradleJvm); } @Override public void apply(GradleProjectSettings settings) { settings.setCompositeBuild(myInitialSettings.getCompositeBuild()); if (myGradleHomePathField != null) { String gradleHomePath = FileUtil.toCanonicalPath(myGradleHomePathField.getText()); File gradleHomeFile = new File(gradleHomePath); String finalGradleHomePath; if (GradleInstallationManager.getInstance().isGradleSdkHome(myProjectRef.get(), gradleHomeFile)) { finalGradleHomePath = gradleHomePath; } else { finalGradleHomePath = GradleInstallationManager.getInstance().suggestBetterGradleHomePath(myProjectRef.get(), gradleHomePath); if (finalGradleHomePath != null) { SwingUtilities.invokeLater(() -> { myGradleHomePathField.setText(finalGradleHomePath); }); } } if (StringUtil.isEmpty(finalGradleHomePath)) { settings.setGradleHome(null); } else { settings.setGradleHome(finalGradleHomePath); GradleUtil.storeLastUsedGradleHome(finalGradleHomePath); } } if (myGradleJdkComboBox != null) { wrapExceptions(() -> myGradleJdkComboBox.getModel().getSdksModel().apply()); SdkLookupProvider sdkLookupProvider = getSdkLookupProvider(myGradleJdkComboBox.getModel().getProject()); String gradleJvm = getSelectedGradleJvmReference(myGradleJdkComboBox, sdkLookupProvider); settings.setGradleJvm(StringUtil.isEmpty(gradleJvm) ? null : gradleJvm); } if (myResolveModulePerSourceSetCheckBox != null) { settings.setResolveModulePerSourceSet(myResolveModulePerSourceSetCheckBox.isSelected()); } if (myResolveExternalAnnotationsCheckBox != null) { settings.setResolveExternalAnnotations(myResolveExternalAnnotationsCheckBox.isSelected()); } if (myGradleDistributionComboBox != null) { Object selected = myGradleDistributionComboBox.getSelectedItem(); if (selected instanceof DistributionTypeItem) { settings.setDistributionType(((DistributionTypeItem)selected).value); } } if (myDelegateBuildCombobox != null) { Object delegateBuildSelectedItem = myDelegateBuildCombobox.getSelectedItem(); if (delegateBuildSelectedItem instanceof BuildRunItem) { settings.setDelegatedBuild(ObjectUtils.notNull(((BuildRunItem)delegateBuildSelectedItem).value, GradleProjectSettings.DEFAULT_DELEGATE)); } } if (myTestRunnerCombobox != null) { Object testRunnerSelectedItem = myTestRunnerCombobox.getSelectedItem(); if (testRunnerSelectedItem instanceof TestRunnerItem) { settings.setTestRunner(ObjectUtils.notNull(((TestRunnerItem)testRunnerSelectedItem).value, GradleProjectSettings.DEFAULT_TEST_RUNNER)); } } } @Override public boolean isModified() { if (myGradleDistributionComboBox != null && myGradleDistributionComboBox.getSelectedItem() instanceof DistributionTypeItem && ((DistributionTypeItem)myGradleDistributionComboBox.getSelectedItem()).value != myInitialSettings.getDistributionType()) { return true; } if (myResolveModulePerSourceSetCheckBox != null && (myResolveModulePerSourceSetCheckBox.isSelected() != myInitialSettings.isResolveModulePerSourceSet())) { return true; } if (myResolveExternalAnnotationsCheckBox != null && (myResolveExternalAnnotationsCheckBox.isSelected() != myInitialSettings.isResolveExternalAnnotations())) { return true; } if (myDelegateBuildCombobox != null && myDelegateBuildCombobox.getSelectedItem() instanceof MyItem && !Objects.equals(((MyItem<?>)myDelegateBuildCombobox.getSelectedItem()).value, myInitialSettings.getDelegatedBuild())) { return true; } if (myTestRunnerCombobox != null && myTestRunnerCombobox.getSelectedItem() instanceof MyItem && !Objects.equals(((MyItem<?>)myTestRunnerCombobox.getSelectedItem()).value, myInitialSettings.getTestRunner())) { return true; } if (myGradleJdkComboBox != null) { SdkLookupProvider sdkLookupProvider = getSdkLookupProvider(myGradleJdkComboBox.getModel().getProject()); String gradleJvm = getSelectedGradleJvmReference(myGradleJdkComboBox, sdkLookupProvider); if (!StringUtil.equals(gradleJvm, myInitialSettings.getGradleJvm())) { return true; } if (myGradleJdkComboBox.getModel().getSdksModel().isModified()) { return true; } } if (myGradleHomePathField == null) return false; String gradleHome = FileUtil.toCanonicalPath(myGradleHomePathField.getText()); if (StringUtil.isEmpty(gradleHome)) { return !StringUtil.isEmpty(myInitialSettings.getGradleHome()); } else { return !gradleHome.equals(myInitialSettings.getGradleHome()); } } @Override public void reset(@Nullable Project project, GradleProjectSettings settings, boolean isDefaultModuleCreation) { reset(project, settings, isDefaultModuleCreation, null); } @Override public void reset(@Nullable Project project, GradleProjectSettings settings, boolean isDefaultModuleCreation, @Nullable WizardContext wizardContext) { updateProjectRef(project, wizardContext); String gradleHome = settings.getGradleHome(); if (myGradleHomePathField != null) { GradleRuntimeTargetUI.installActionListener(myGradleHomePathField, myProjectRef.get(), GradleBundle.message("gradle.settings.text.home.path")); myGradleHomePathField.setText(gradleHome == null ? "" : gradleHome); myGradleHomePathField.getTextField().setForeground(LocationSettingType.EXPLICIT_CORRECT.getColor()); } resetImportControls(settings); resetGradleJdkComboBox(project, settings, wizardContext); resetWrapperControls(settings.getExternalProjectPath(), settings, isDefaultModuleCreation); resetGradleDelegationControls(wizardContext); if (StringUtil.isEmpty(gradleHome)) { myGradleHomeSettingType = LocationSettingType.UNKNOWN; deduceGradleHomeIfPossible(); } else { File gradleHomeFile = new File(gradleHome); if (GradleInstallationManager.getInstance().isGradleSdkHome(project, gradleHomeFile)) { myGradleHomeSettingType = LocationSettingType.EXPLICIT_CORRECT; } else { myGradleHomeSettingType = GradleInstallationManager.getInstance().suggestBetterGradleHomePath(project, gradleHome) != null ? LocationSettingType.EXPLICIT_CORRECT : LocationSettingType.EXPLICIT_INCORRECT; } myAlarm.cancelAllRequests(); if (myGradleHomeSettingType == LocationSettingType.EXPLICIT_INCORRECT && settings.getDistributionType() == DistributionType.LOCAL) { new DelayedBalloonInfo(MessageType.ERROR, myGradleHomeSettingType, 0).run(); } } updateDeprecatedControls(); } @Override public void update(String linkedProjectPath, GradleProjectSettings settings, boolean isDefaultModuleCreation) { resetWrapperControls(linkedProjectPath, settings, isDefaultModuleCreation); resetImportControls(settings); updateDeprecatedControls(); } private void resetImportControls(GradleProjectSettings settings) { if (myResolveModulePerSourceSetCheckBox != null) { myResolveModulePerSourceSetCheckBox.setSelected(settings.isResolveModulePerSourceSet()); boolean showSetting = !settings.isResolveModulePerSourceSet() || Registry.is("gradle.settings.showDeprecatedSettings", false); myModulePerSourceSetPanel.putClientProperty(HIDDEN_KEY, showSetting); } if (myResolveExternalAnnotationsCheckBox != null) { myResolveExternalAnnotationsCheckBox.setSelected(settings.isResolveExternalAnnotations()); } } private void updateDeprecatedControls() { if (myModulePerSourceSetPanel != null) { myModulePerSourceSetPanel.setVisible(myModulePerSourceSetPanel.getClientProperty(HIDDEN_KEY) == Boolean.TRUE); } } protected void resetGradleJdkComboBox(@Nullable final Project project, GradleProjectSettings settings, @Nullable WizardContext wizardContext) { ProjectSdksModel sdksModel = new ProjectSdksModel(); resetGradleJdkComboBox(project, settings, wizardContext, sdksModel); } protected final void resetGradleJdkComboBox( @Nullable Project project, @NotNull GradleProjectSettings settings, @Nullable WizardContext wizardContext, @NotNull ProjectSdksModel sdksModel ) { if (myGradleJdkComboBox == null) return; project = project == null || project.isDisposed() ? ProjectManager.getInstance().getDefaultProject() : project; Sdk projectSdk = wizardContext != null ? wizardContext.getProjectJdk() : null; setupProjectSdksModel(sdksModel, project, projectSdk); recreateGradleJdkComboBox(project, sdksModel); SdkLookupProvider sdkLookupProvider = getSdkLookupProvider(project); String externalProjectPath = myInitialSettings.getExternalProjectPath(); addUsefulGradleJvmReferences(myGradleJdkComboBox, externalProjectPath); setSelectedGradleJvmReference(myGradleJdkComboBox, sdkLookupProvider, externalProjectPath, settings.getGradleJvm()); } private void recreateGradleJdkComboBox(@NotNull Project project, @NotNull ProjectSdksModel sdksModel) { if (myGradleJdkComboBox != null) { myGradleJdkComboBoxWrapper.remove(myGradleJdkComboBox); } myGradleJdkComboBox = new SdkComboBox(createJdkComboBoxModel(project, sdksModel)); myGradleJdkComboBoxWrapper.add(myGradleJdkComboBox, BorderLayout.CENTER); } private void resetWrapperControls(String linkedProjectPath, @NotNull GradleProjectSettings settings, boolean isDefaultModuleCreation) { if (myGradleDistributionComboBox == null) return; if (isDefaultModuleCreation) { DistributionTypeItem toRemove = new DistributionTypeItem(DistributionType.WRAPPED); ((CollectionComboBoxModel<DistributionTypeItem>)myGradleDistributionComboBox.getModel()).remove(toRemove); } if (StringUtil.isEmpty(linkedProjectPath) && !isDefaultModuleCreation) { myGradleDistributionComboBox.setSelectedItem(new DistributionTypeItem(DistributionType.LOCAL)); return; } if (myGradleDistributionHint != null && !dropUseWrapperButton) { final boolean isGradleDefaultWrapperFilesExist = GradleUtil.isGradleDefaultWrapperFilesExist(linkedProjectPath); boolean showError = !isGradleDefaultWrapperFilesExist && !isDefaultModuleCreation; myGradleDistributionHint.setText(showError ? GradleBundle.message("gradle.settings.wrapper.not.found") : null); myGradleDistributionHint.setIcon(showError ? AllIcons.General.Error : null); } if (settings.getDistributionType() == null) { if (myGradleDistributionComboBox.getItemCount() > 0) { myGradleDistributionComboBox.setSelectedIndex(0); } } else { myGradleDistributionComboBox.setSelectedItem(new DistributionTypeItem(settings.getDistributionType())); } } private void addDelegationComponents(PaintAwarePanel content, int indentLevel) { myDelegatePanel = addComponentsGroup(GradleBundle.message("gradle.settings.text.build.run.title"), content, indentLevel, panel -> { if (dropDelegateBuildCombobox && dropTestRunnerCombobox) return; JBLabel label = new JBLabel( XmlStringUtil.wrapInHtml(GradleBundle.message("gradle.settings.text.build.run.hint", getIDEName())), UIUtil.ComponentStyle.SMALL); label.setForeground(UIUtil.getLabelFontColor(UIUtil.FontColor.BRIGHTER)); GridBag constraints = ExternalSystemUiUtil.getFillLineConstraints(indentLevel + 1); constraints.insets.bottom = UIUtil.LARGE_VGAP; panel.add(label, constraints); if (!dropDelegateBuildCombobox) { BuildRunItem[] states = new BuildRunItem[]{new BuildRunItem(Boolean.TRUE), new BuildRunItem(Boolean.FALSE)}; myDelegateBuildCombobox = new ComboBox<>(states); myDelegateBuildCombobox.setRenderer(new MyItemCellRenderer<>()); myDelegateBuildCombobox.setSelectedItem(new BuildRunItem(myInitialSettings.getDelegatedBuild())); myDelegateBuildLabel = new JBLabel(GradleBundle.message("gradle.settings.text.build.run")); panel.add(myDelegateBuildLabel, getLabelConstraints(indentLevel + 1)); panel.add(myDelegateBuildCombobox, getLabelConstraints(0)); panel.add(Box.createGlue(), ExternalSystemUiUtil.getFillLineConstraints(indentLevel + 1)); myDelegateBuildLabel.setLabelFor(myDelegateBuildCombobox); } if (!dropTestRunnerCombobox) { TestRunnerItem[] testRunners = StreamEx.of(TestRunner.values()).map(TestRunnerItem::new).toArray(TestRunnerItem[]::new); myTestRunnerCombobox = new ComboBox<>(testRunners); myTestRunnerCombobox.setRenderer(new MyItemCellRenderer<>()); myTestRunnerCombobox.setSelectedItem(new TestRunnerItem(myInitialSettings.getTestRunner())); // make sure that the two adjacent comboboxes have same size myTestRunnerCombobox.setPrototypeDisplayValue(new TestRunnerItem(TestRunner.CHOOSE_PER_TEST)); if (myDelegateBuildCombobox != null) { myDelegateBuildCombobox.setPreferredSize(myTestRunnerCombobox.getPreferredSize()); } myTestRunnerLabel = new JBLabel(GradleBundle.message("gradle.settings.text.run.tests")); panel.add(myTestRunnerLabel, getLabelConstraints(indentLevel + 1)); panel.add(myTestRunnerCombobox, getLabelConstraints(0)); panel.add(Box.createGlue(), ExternalSystemUiUtil.getFillLineConstraints(indentLevel + 1)); myTestRunnerLabel.setLabelFor(myTestRunnerCombobox); } }); } private void resetGradleDelegationControls(@Nullable WizardContext wizardContext) { if (wizardContext != null) { dropTestRunnerCombobox(); dropDelegateBuildCombobox(); if (myDelegatePanel != null) { Container parent = myDelegatePanel.getParent(); if (parent != null) { parent.remove(myDelegatePanel); } myDelegatePanel = null; myDelegateBuildCombobox = null; myTestRunnerCombobox = null; } return; } if (myDelegateBuildCombobox != null) { myDelegateBuildCombobox.setSelectedItem(new BuildRunItem(myInitialSettings.getDelegatedBuild())); } if (myTestRunnerCombobox != null) { myTestRunnerCombobox.setSelectedItem(new TestRunnerItem(myInitialSettings.getTestRunner())); } } void showBalloonIfNecessary() { if (!myShowBalloonIfNecessary || (myGradleHomePathField != null && !myGradleHomePathField.isEnabled())) { return; } myShowBalloonIfNecessary = false; MessageType messageType = null; switch (myGradleHomeSettingType) { case DEDUCED: messageType = MessageType.INFO; break; case EXPLICIT_INCORRECT: case UNKNOWN: messageType = MessageType.ERROR; break; default: } if (messageType != null) { new DelayedBalloonInfo(messageType, myGradleHomeSettingType, BALLOON_DELAY_MILLIS).run(); } } private void updateProjectRef(@Nullable Project project, @Nullable WizardContext wizardContext) { if (wizardContext != null && wizardContext.getProject() != null) { project = wizardContext.getProject(); } if (project != null && project != myProjectRef.get() && Disposer.findRegisteredObject(project, myProjectRefDisposable) == null) { Disposer.register(project, myProjectRefDisposable); } myProjectRef.set(project); } private static JPanel addComponentsGroup(@Nullable @NlsContexts.Separator String title, PaintAwarePanel content, int indentLevel, @NotNull Consumer<JPanel> configuration) { JPanel result = new JPanel(new GridBagLayout()); if (title != null) { GridBag constraints = ExternalSystemUiUtil.getFillLineConstraints(indentLevel); constraints.insets.top = UIUtil.LARGE_VGAP; result.add(new TitledSeparator(title), constraints); } int count = result.getComponentCount(); configuration.consume(result); if (result.getComponentCount() > count) { content.add(result, ExternalSystemUiUtil.getFillLineConstraints(0).insets(0, 0, 0, 0)); } return result; } private static void setupProjectSdksModel(@NotNull ProjectSdksModel sdksModel, @NotNull Project project, @Nullable Sdk projectSdk) { sdksModel.reset(project); deduplicateSdkNames(sdksModel); if (projectSdk == null) { projectSdk = sdksModel.getProjectSdk(); // Find real sdk // see ProjectSdksModel#getProjectSdk for details projectSdk = sdksModel.findSdk(projectSdk); } if (projectSdk != null) { // resolves executable JDK // e.g: for Android projects projectSdk = ExternalSystemJdkUtil.resolveDependentJdk(projectSdk); // Find editable sdk // see ProjectSdksModel#getProjectSdk for details projectSdk = sdksModel.findSdk(projectSdk.getName()); } sdksModel.setProjectSdk(projectSdk); } @NotNull private static GridBag getLabelConstraints(int indentLevel) { Insets insets = JBUI.insets(0, INSETS + INSETS * indentLevel, 0, INSETS); return new GridBag().anchor(GridBagConstraints.WEST).weightx(0).insets(insets); } private static void wrapExceptions(ThrowableRunnable<Throwable> runnable) { try { runnable.run(); } catch (Throwable ex) { throw new IllegalStateException(ex); } } /** * Deduplicates sdks name in corrupted sdks model */ private static void deduplicateSdkNames(@NotNull ProjectSdksModel projectSdksModel) { Set<String> processedNames = new HashSet<>(); Collection<Sdk> editableSdks = projectSdksModel.getProjectSdks().values(); for (Sdk sdk : editableSdks) { if (processedNames.contains(sdk.getName())) { SdkModificator sdkModificator = sdk.getSdkModificator(); String name = createUniqueSdkName(sdk.getName(), editableSdks); sdkModificator.setName(name); sdkModificator.commitChanges(); } processedNames.add(sdk.getName()); } } @NlsSafe static String getIDEName() { return ApplicationNamesInfo.getInstance().getFullProductName(); } private static class MyItemCellRenderer<T> extends ColoredListCellRenderer<MyItem<T>> { @Override protected void customizeCellRenderer(@NotNull JList<? extends MyItem<T>> list, MyItem<T> value, int index, boolean selected, boolean hasFocus) { if (value == null) return; CompositeAppearance.DequeEnd ending = new CompositeAppearance().getEnding(); ending.addText(value.getText(), getTextAttributes(selected)); if (value.getComment() != null) { SimpleTextAttributes commentAttributes = getCommentAttributes(selected); ending.addComment(value.getComment(), commentAttributes); } ending.getAppearance().customize(this); } @NotNull private static SimpleTextAttributes getTextAttributes(boolean selected) { return selected && !(SystemInfoRt.isWindows && UIManager.getLookAndFeel().getName().contains("Windows")) ? SimpleTextAttributes.SELECTED_SIMPLE_CELL_ATTRIBUTES : SimpleTextAttributes.SIMPLE_CELL_ATTRIBUTES; } @NotNull private static SimpleTextAttributes getCommentAttributes(boolean selected) { return SystemInfo.isMac && selected ? new SimpleTextAttributes(SimpleTextAttributes.STYLE_PLAIN, JBColor.WHITE) : SimpleTextAttributes.GRAY_ATTRIBUTES; } } private static abstract class MyItem<T> { @Nullable protected final T value; private MyItem(@Nullable T value) { this.value = value; } @NlsContexts.ListItem protected abstract String getText(); @NlsContexts.ListItem protected abstract String getComment(); @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof MyItem)) return false; MyItem item = (MyItem)o; return Objects.equals(value, item.value); } @Override public int hashCode() { return Objects.hash(value); } } private class DelayedBalloonInfo implements Runnable { private final MessageType myMessageType; private final @Nls String myText; private final long myTriggerTime; DelayedBalloonInfo(@NotNull MessageType messageType, @NotNull LocationSettingType settingType, long delayMillis) { myMessageType = messageType; myText = settingType.getDescription(GradleConstants.SYSTEM_ID); myTriggerTime = System.currentTimeMillis() + delayMillis; } @Override public void run() { long diff = myTriggerTime - System.currentTimeMillis(); if (diff > 0) { myAlarm.cancelAllRequests(); myAlarm.addRequest(this, diff); return; } if (myGradleHomePathField == null || !myGradleHomePathField.isShowing()) { // Don't schedule the balloon if the configurable is hidden. return; } ExternalSystemUiUtil.showBalloon(myGradleHomePathField, myMessageType, myText); } } private final class BuildRunItem extends MyItem<Boolean> { private BuildRunItem(@Nullable Boolean value) { super(value); } @Override protected String getText() { return getText(value); } @Override protected String getComment() { return Comparing.equal(value, GradleProjectSettings.DEFAULT_DELEGATE) ? GradleBundle.message("gradle.settings.text.default") : null; } @NotNull @NlsContexts.ListItem private String getText(@Nullable Boolean state) { if (state == Boolean.TRUE) { return GradleConstants.GRADLE_NAME; //NON-NLS GRADLE_NAME } if (state == Boolean.FALSE) { return getIDEName(); } LOG.error("Unexpected: " + state); return GradleBundle.message("gradle.settings.text.unexpected", state); } } private final class TestRunnerItem extends MyItem<TestRunner> { private TestRunnerItem(@Nullable TestRunner value) { super(value); } @Override protected String getText() { return getText(value); } @Override protected String getComment() { return Comparing.equal(value, GradleProjectSettings.DEFAULT_TEST_RUNNER) ? GradleBundle.message("gradle.settings.text.default") : null; } @NotNull @NlsContexts.ListItem private String getText(@Nullable TestRunner runner) { if (runner == TestRunner.GRADLE) { return GradleConstants.GRADLE_NAME; //NON-NLS GRADLE_NAME } if (runner == TestRunner.PLATFORM) { return getIDEName(); } if (runner == TestRunner.CHOOSE_PER_TEST) { return GradleBundle.message("gradle.settings.text.build.run.per.test"); } LOG.error("Unexpected: " + runner); return GradleBundle.message("gradle.settings.text.unexpected", runner); } } private final class DistributionTypeItem extends MyItem<DistributionType> { private DistributionTypeItem(@Nullable DistributionType value) { super(value); } @Override protected String getText() { return getText(value); } @Override protected String getComment() { return null; } @NotNull @NlsContexts.ListItem private String getText(@Nullable DistributionType value) { if (value != null) { switch (value) { case BUNDLED: return GradleBundle.message("gradle.settings.text.distribution.bundled", GradleVersion.current().getVersion()); case DEFAULT_WRAPPED: return GradleBundle.message("gradle.settings.text.distribution.wrapper"); case WRAPPED: return GradleBundle.message("gradle.settings.text.distribution.wrapper.task"); case LOCAL: return GradleBundle.message("gradle.settings.text.distribution.location"); } } LOG.error("Unexpected: " + value); return GradleBundle.message("gradle.settings.text.unexpected", value); } } }
package eggdropsoap.ropesandbridges; import java.lang.Math; public class FreeHangingRopeCurve { private class _Coord { public int x = 0; public int y = 0; public int z = 0; public _Coord(int x, int y, int z) { this.x = x; this.y = y; this.z = z; } public _Coord(){} }; private _Coord lowside; private _Coord highside; private int ropeLength; private double horizontalDistance; private double droop; private class _y_func { double mu; double k; double x1; double L1; public _y_func() { L1 = _Find_L1(droop, ropeLength, highside.y - lowside.y); mu = (2*droop) / (Math.pow(L1, 2) - Math.pow(droop, 2) ); k = lowside.y - droop - Math.pow(mu, -1); x1 = _asinh(mu * L1) / mu; System.out.printf("y = %3.3fcosh(%3.3f x - %3.3f) + %3.3f\n", Math.pow(mu, -1), mu, mu*x1, k); } public double get(double x) { return Math.pow(mu, -1) * Math.cosh(mu * x - mu * x1) + k; } } private _y_func yfunc; private double _getDistance(_Coord a, _Coord b) { return Utils.getDistance(a.x, a.y, a.z, b.x, b.y, b.z); } private double _getHorizontalDistance(_Coord a, _Coord b) { return Utils.getHorizontalDistance(a.x, a.y, a.z, b.x, b.y, b.z); } protected FreeHangingRopeCurve(int ropeLength, int side1x, int side1y, int side1z, int side2x, int side2y, int side2z) { this.ropeLength = ropeLength; if ( side1y <= side2y ) { this.lowside = new _Coord(side1x, side1y, side1z); this.highside = new _Coord(side2x, side2y, side2z); } else { this.highside = new _Coord(side1x, side1y, side1z); this.lowside = new _Coord(side2x, side2y, side2z); } this.horizontalDistance = _getHorizontalDistance(this.lowside, this.highside); System.out.printf("[Ropes and Bridges] Horizontal Distance is %3.3f.\n", this.horizontalDistance); this.droop = _getDroop(); System.out.printf("[Ropes and Bridges] Droop is %3.3f (%3.3fm above void) on a %dm rope from %d, %d, %d to %d, %d, %d.\n", droop, lowside.y - droop, ropeLength, side1x, side1y, side1z, side2x, side2y, side2z); double L1 = _Find_L1(droop, ropeLength, highside.y - lowside.y); System.out.printf("[Ropes and Bridges] L1 is %3.3f.\n", L1); this.yfunc = new _y_func(); for (int i = 0; i <= horizontalDistance; i++) { System.out.printf("[Ropes and Bridges] %d,%3.2f\n", i, yfunc.get(i)); } } private double _getDroop() { return _Solve_h(highside.y - lowside.y, ropeLength, horizontalDistance); } public double getDroop() { return droop; } public int getBlockHeight(int x, int z) { return 0; //FIXME } public double getHeight(double x, double z) { return 0.0; //FIXME } // implements equation (10) to find horizontal length L1 from bottom of the curve to the lowest side private double _Find_L1(double h, double L, double a) { return -( ( h * L - Math.sqrt( h * (a + h) * (Math.pow(L, 2) - Math.pow(a, 2)) ) ) / a ); } // asinh isn't defined in java.lang.Math private double _asinh(double x) { return Math.log(x + Math.sqrt(x*x + 1.0)); } private static double MAXERR = 1e-10; // Absolute precision of calculation private static double MAXIT = 100; // Maximum iterations (will never reach 100 unless an error has occurred) // a very direct adaptation of Ruud v Gessel's C++ atanh() to Java private double _atanh(double x) { return 0.5 * Math.log((1+x)/(1-x)); } // a very direct adaptation of Ruud v Gessel's C++ Calc_D() to Java private double _Calc_D(double a, double L, double h, double sgn) // Calculates d from equation 11 { double q=2*sgn*Math.sqrt(h*(a+h)*(L*L-a*a)); // + or - 2* the root used in (11) return ((L*L-a*a)*(a+2*h)-L*q)/(a*a)*_atanh(a*a/(L*(a+2*h)-q)); // return calculated d from eq (11) } // a very direct adaptation of Ruud v Gessel's C++ Solve_h() to Java private double _Solve_h(double a, double L, double d) // Routine to solve h from a, L and d { int n=0; // Iteration counter (quit if >MAXIT) double s=((L*L-a*a)/(2*a)*Math.log((L+a)/(L-a))<d) ?-1:1; // Left or right of Y axis ? double lower=0, upper=(L-a)/2; // h must be within this range while((upper-lower) > MAXERR && (++n)<MAXIT) // Repeat until range narrow enough or MAXIT if(_Calc_D(a,L,(upper+lower)/2,s)*s<d*s) upper=(upper+lower)/2; else lower=(upper+lower)/2; // Narrows the range of possible h System.out.printf("Found h=%3.10f after %d iterations.\n",(upper+lower)/2,n); // If you see 100 iterations assume an error return s*((upper+lower)/2); // Returns h (- signals right of Y axis) } // a very direct adaptation of Ruud v Gessel's C++ Solve_L() to Java private double _Solve_L(double a, double h, double d) // Routine to solve L from a, h and d { int n=0; // Iteration counter (quit if >MAXIT) double lower=Math.sqrt((d*d+a*a)), upper=2*h+d+a; // L must be within this range while((upper-lower) > MAXERR && (++n)<MAXIT) // Repeat until range narrow enough or MAXIT if(_Calc_D(a,(upper+lower)/2,h,1)>d) upper=(upper+lower)/2; else lower=(upper+lower)/2; // Narrows the range of possible L System.out.printf("Found L=%3.10f after %d iterations.\n",(upper+lower)/2,n); // If you see 100 iterations assume an error return (upper+lower)/2; // Returns L } } /* Reference C code from * http://members.chello.nl/j.beentjes3/Ruud/catfiles/catenary.pdf */ /********************************************************************** * * Writen by Ruud v Gessel october 2007 for fun * * An example of how to handle numerical solutions for the examples * used in catenary.pdf which is located on my site. * * The methode used to solve the atanh equations is fairly easy, * it looks a bit like a binary search methode. Just have a look * at the functions and you will understand. * * The program was created from scratch in a pretty short time and * may (or will) therefore contain errors. * * HAVE FUN * Ruud. * *********************************************************************/ /* #include "stdafx.h" #include "math.h" #define MAXERR 1e-10 // Absolute precision of calculation #define MAXIT 100 // Maximum iterations (will never reach 100 unless an error has occurred) #define TV ((upper+lower)/2) // Test value for our iteration routines, gives the middle of the range of the solution double atanh(double x) // Not defined in math library { return 0.5*log((1+x)/(1-x)); // Return atanh(x) } double Calc_D(double a, double L, double h, double sgn) // Calculates d from equation 11 { double q=2*sgn*sqrt(h*(a+h)*(L*L-a*a)); // + or - 2* the root used in (11) return ((L*L-a*a)*(a+2*h)-L*q)/(a*a)*atanh(a*a/(L*(a+2*h)-q)); // return calculated d from eq (11) } double Solve_h(double a, double L, double d) // Routine to solve h from a, L and d { int n=0; // Iteration counter (quit if >MAXIT) double s=((L*L-a*a)/(2*a)*log((L+a)/(L-a))<d) ?-1:1; // Left or right of Y axis ? double lower=0, upper=(L-a)/2; // h must be within this range while((upper-lower) > MAXERR && (++n)<MAXIT) // Repeat until range narrow enough or MAXIT if(Calc_D(a,L,TV,s)*s<d*s) upper=TV; else lower=TV; // Narrows the range of possible h printf("Found h=%3.10f after %d iterations.\n\r",TV,n); // If you see 100 iterations assume an error return s*TV; // Returns h (- signals right of Y axis) } double Solve_L(double a, double h, double d) // Routine to solve L from a, h and d { int n=0; // Iteration counter (quit if >MAXIT) double lower=sqrt((d*d+a*a)), upper=2*h+d+a; // L must be within this range while((upper-lower) > MAXERR && (++n)<MAXIT) // Repeat until range narrow enough or MAXIT if(Calc_D(a,TV,h,1)>d) upper=TV; else lower=TV; // Narrows the range of possible L printf("Found L=%3.10f after %d iterations.\n\r",TV,n); // If you see 100 iterations assume an error return TV; // Returns L } double asinh(double x) // Not defined in math library { return log(x+sqrt(x*x+1)); // Return asinh(x) } void ShowAll(double h, double L, double h1, double h2, double d) { double sgn=(h<0) ? -1: 1; // sgn has the sign of h (- ==> right of Y axis) h=h*sgn; // h always positive double a=h2-h1; // a= height difference double L1=-(h*L-sgn*sqrt(h*(a+h)*(L*L-a*a)))/a; // L1 from equation (10) double u=2*h/(L1*L1-h*h); // u fro L1 and h double x1=asinh(u*L1)/u; // x1 from u and L1 double k=h1-h-1/u; // k from h1, h and u // Show all values, note that h2 is calculated using all calculated values in this sub // it is shown only to verify the result, h2 shown her should be very close to actual h2 printf("u=%1.9f, phi=%3.9f, k=%3.9f, h2=%3.9f, DP=%3.9f \n\r\n\r",u,-u*x1,k,cosh(u*d-u*x1)/u+k,h1-h); } int main(int argc, char* argv[]) { double h,L,d; printf("y=cosh(u.x+phi)/u+k\n\n\r"); d=Calc_D(5,30,2,1); // From example 1 ShowAll(2,30,10,15,d); h=Solve_h(5,28,20); // From example 2 ShowAll(h,28,10,15,20); h=Solve_h(10,15,11); // From example 3 ShowAll(h,15,10,20,11); L=Solve_L(6,4,30); // From example 4 ShowAll(4,L,12,18,30); L=Solve_L(1000,999,10); // For fun ShowAll(999,L,1000,2000,10); L=Solve_L(0.0001,0.01,10); // For fun ShowAll(0.01,L,10,10.0001,10); return 0; } */
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.function.Function; public class SourceFieldMapper extends MetadataFieldMapper { public static final String NAME = "_source"; public static final String RECOVERY_SOURCE_NAME = "_recovery_source"; public static final String CONTENT_TYPE = "_source"; private final Function<Map<String, ?>, Map<String, Object>> filter; public static class Defaults { public static final String NAME = SourceFieldMapper.NAME; public static final boolean ENABLED = true; public static final MappedFieldType FIELD_TYPE = new SourceFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.NONE); // not indexed FIELD_TYPE.setStored(true); FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } } public static class Builder extends MetadataFieldMapper.Builder<Builder, SourceFieldMapper> { private boolean enabled = Defaults.ENABLED; private String[] includes = null; private String[] excludes = null; public Builder() { super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); } public Builder enabled(boolean enabled) { this.enabled = enabled; return this; } public Builder includes(String[] includes) { this.includes = includes; return this; } public Builder excludes(String[] excludes) { this.excludes = excludes; return this; } @Override public SourceFieldMapper build(BuilderContext context) { return new SourceFieldMapper(enabled, includes, excludes, context.indexSettings()); } } public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { Builder builder = new Builder(); for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { builder.enabled(XContentMapValues.nodeBooleanValue(fieldNode, name + ".enabled")); iterator.remove(); } else if (fieldName.equals("includes")) { List<Object> values = (List<Object>) fieldNode; String[] includes = new String[values.size()]; for (int i = 0; i < includes.length; i++) { includes[i] = values.get(i).toString(); } builder.includes(includes); iterator.remove(); } else if (fieldName.equals("excludes")) { List<Object> values = (List<Object>) fieldNode; String[] excludes = new String[values.size()]; for (int i = 0; i < excludes.length; i++) { excludes[i] = values.get(i).toString(); } builder.excludes(excludes); iterator.remove(); } } return builder; } @Override public MetadataFieldMapper getDefault(ParserContext context) { final Settings indexSettings = context.mapperService().getIndexSettings().getSettings(); return new SourceFieldMapper(indexSettings); } } static final class SourceFieldType extends MappedFieldType { SourceFieldType() {} protected SourceFieldType(SourceFieldType ref) { super(ref); } @Override public MappedFieldType clone() { return new SourceFieldType(this); } @Override public String typeName() { return CONTENT_TYPE; } @Override public Query existsQuery(QueryShardContext context) { throw new QueryShardException(context, "The _source field is not searchable"); } @Override public Query termQuery(Object value, QueryShardContext context) { throw new QueryShardException(context, "The _source field is not searchable"); } } private final boolean enabled; /** indicates whether the source will always exist and be complete, for use by features like the update API */ private final boolean complete; private final String[] includes; private final String[] excludes; private SourceFieldMapper(Settings indexSettings) { this(Defaults.ENABLED, null, null, indexSettings); } private SourceFieldMapper(boolean enabled, String[] includes, String[] excludes, Settings indexSettings) { super(NAME, Defaults.FIELD_TYPE.clone(), Defaults.FIELD_TYPE, indexSettings); // Only stored. this.enabled = enabled; this.includes = includes; this.excludes = excludes; final boolean filtered = (includes != null && includes.length > 0) || (excludes != null && excludes.length > 0); this.filter = enabled && filtered && fieldType().stored() ? XContentMapValues.filter(includes, excludes) : null; this.complete = enabled && includes == null && excludes == null; } public boolean enabled() { return enabled; } public String[] excludes() { return this.excludes != null ? this.excludes : Strings.EMPTY_ARRAY; } public String[] includes() { return this.includes != null ? this.includes : Strings.EMPTY_ARRAY; } public boolean isComplete() { return complete; } @Override public void preParse(ParseContext context) throws IOException { super.parse(context); } @Override public void parse(ParseContext context) throws IOException { // nothing to do here, we will call it in pre parse } @Override protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException { BytesReference originalSource = context.sourceToParse().source(); XContentType contentType = context.sourceToParse().getXContentType(); final BytesReference adaptedSource = applyFilters(originalSource, contentType); if (adaptedSource != null) { final BytesRef ref = adaptedSource.toBytesRef(); fields.add(new StoredField(fieldType().name(), ref.bytes, ref.offset, ref.length)); } if (originalSource != null && adaptedSource != originalSource) { // if we omitted source or modified it we add the _recovery_source to ensure we have it for ops based recovery BytesRef ref = originalSource.toBytesRef(); fields.add(new StoredField(RECOVERY_SOURCE_NAME, ref.bytes, ref.offset, ref.length)); fields.add(new NumericDocValuesField(RECOVERY_SOURCE_NAME, 1)); } } @Nullable public BytesReference applyFilters(@Nullable BytesReference originalSource, @Nullable XContentType contentType) throws IOException { if (enabled && fieldType().stored() && originalSource != null) { // Percolate and tv APIs may not set the source and that is ok, because these APIs will not index any data if (filter != null) { // we don't update the context source if we filter, we want to keep it as is... Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(originalSource, true, contentType); Map<String, Object> filteredSource = filter.apply(mapTuple.v2()); BytesStreamOutput bStream = new BytesStreamOutput(); XContentType actualContentType = mapTuple.v1(); XContentBuilder builder = XContentFactory.contentBuilder(actualContentType, bStream).map(filteredSource); builder.close(); return bStream.bytes(); } else { return originalSource; } } else { return null; } } @Override protected String contentType() { return CONTENT_TYPE; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); // all are defaults, no need to write it at all if (!includeDefaults && enabled == Defaults.ENABLED && includes == null && excludes == null) { return builder; } builder.startObject(contentType()); if (includeDefaults || enabled != Defaults.ENABLED) { builder.field("enabled", enabled); } if (includes != null) { builder.array("includes", includes); } else if (includeDefaults) { builder.array("includes", Strings.EMPTY_ARRAY); } if (excludes != null) { builder.array("excludes", excludes); } else if (includeDefaults) { builder.array("excludes", Strings.EMPTY_ARRAY); } builder.endObject(); return builder; } @Override protected void doMerge(Mapper mergeWith) { SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith; List<String> conflicts = new ArrayList<>(); if (this.enabled != sourceMergeWith.enabled) { conflicts.add("Cannot update enabled setting for [_source]"); } if (Arrays.equals(includes(), sourceMergeWith.includes()) == false) { conflicts.add("Cannot update includes setting for [_source]"); } if (Arrays.equals(excludes(), sourceMergeWith.excludes()) == false) { conflicts.add("Cannot update excludes setting for [_source]"); } if (conflicts.isEmpty() == false) { throw new IllegalArgumentException("Can't merge because of conflicts: " + conflicts); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.dkajiwara.savestate; import android.os.Bundle; import android.os.Parcelable; import android.support.annotation.NonNull; import android.util.Log; import android.util.SparseArray; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.List; /** * @author dkajiwara */ public class InjectSave { /** TAG.*/ private static final String TAG = InjectSave.class.getSimpleName(); /** Suffix for Bundle key.*/ private static final String SUFFIX = "$$SaveState"; private InjectSave() { } /** * Save the instance variable field SaveState Annotation is set to this bundle. * <p/> * Examples: * <pre>{@code * '@SaveState * String title = null; //AutoSave * * '@Override * public void onSaveInstanceState(Bundle outState) { * super.onSaveInstanceState(outState); * InjectSave.saveInstanceState(this, outState); * view.setText(title); * } * }</pre> * @param target context * @param outState bundle */ public static void saveInstanceState(@NonNull Object target, @NonNull Bundle outState){ Class clazz = target.getClass(); List<Field> fields = ReflectUtils.getDeclaredAnnotationFields(clazz, SaveState.class); for (Field field : fields) { try { String key = field.getName(); Object value = field.get(target); put(outState, key + SUFFIX, value); } catch (IllegalAccessException e) { e.printStackTrace(); } } } private static void put(Bundle bundle, String key, Object value) { Log.d(TAG, "key " + key + ", value " + value); if (value instanceof Integer) { bundle.putInt(key, (Integer) value); } else if (value instanceof int[]) { bundle.putIntArray(key, (int[]) value); } else if (value instanceof String) { bundle.putString(key, (String) value); } else if (value instanceof String[]) { bundle.putStringArray(key, (String[]) value); } else if (value instanceof Boolean) { bundle.putBoolean(key, (Boolean) value); } else if (value instanceof Character) { bundle.putChar(key, (Character) value); } else if (value instanceof char[]) { bundle.putCharArray(key, (char[]) value); } else if (value instanceof Float) { bundle.putFloat(key, (Float) value); } else if (value instanceof float[]) { bundle.putFloatArray(key, (float[]) value); } else if (value instanceof Short) { bundle.putShort(key, (Short) value); } else if (value instanceof short[]) { bundle.putShortArray(key, (short[]) value); } else if (value instanceof Long) { bundle.putLong(key, (Long) value); } else if (value instanceof long[]) { bundle.putLongArray(key, (long[]) value); } else if (value instanceof Byte) { bundle.putByte(key, (Byte) value); } else if (value instanceof byte[]) { bundle.putByteArray(key, (byte[]) value); } else if (value instanceof CharSequence) { bundle.putCharSequence(key, (CharSequence) value); } else if (value instanceof CharSequence[]) { bundle.putCharSequenceArray(key, (CharSequence[]) value); } else if (value instanceof Bundle) { bundle.putBundle(key, (Bundle) value); } else if (value instanceof Parcelable) { bundle.putParcelable(key, (Parcelable) value); } else if (value instanceof Parcelable[]) { bundle.putParcelableArray(key, (Parcelable[]) value); } else if (isStringArrayList(value)) { bundle.putStringArrayList(key, (ArrayList<String>) value); } else if (isIntegerArrayList(value)) { bundle.putIntegerArrayList(key, (ArrayList<Integer>) value); } else if (isCharSequenceArrayList(value)) { bundle.putCharSequenceArrayList(key, (ArrayList<CharSequence>) value); } else if (isSparseArray(value)) { bundle.putSparseParcelableArray(key, (SparseArray<? extends Parcelable>) value); } else if (isParcelableArray(value)) { bundle.putParcelableArrayList(key, (ArrayList<? extends Parcelable>) value); } else { throw new IllegalArgumentException(); } } private static boolean isParcelableArray(Object value) { if (!(value instanceof ArrayList<?>)) { return false; } List<?> list = (ArrayList<?>) value; for (Object object : list) { if (!(object instanceof Parcelable)) { return false; } } return true; } private static boolean isSparseArray(Object value) { if (!(value instanceof SparseArray<?>)) { return false; } SparseArray<?> list = (SparseArray<?>) value; for (int i = 0, max = list.size(); i < max; i++) { if (!(list.valueAt(i) instanceof Parcelable)) { return false; } } return true; } private static boolean isCharSequenceArrayList(Object value) { if (!(value instanceof ArrayList<?>)) { return false; } List<?> list = (ArrayList<?>) value; for (Object object : list) { if (!(object instanceof CharSequence)) { return false; } } return true; } private static boolean isIntegerArrayList(Object value) { if (!(value instanceof ArrayList<?>)) { return false; } List<?> list = (ArrayList<?>) value; for (Object object : list) { if (!(object instanceof Integer)) { return false; } } return true; } private static boolean isStringArrayList(Object value) { if (!(value instanceof ArrayList<?>)) { return false; } List<?> list = (ArrayList<?>) value; for (Object object : list) { if (!(object instanceof String)) { return false; } } return true; } /** * Restore the instance variable field SaveState annotation has been set from this Bundle. * * Examples: * <pre>{@code * '@SaveState * String title = null; //AutoSave * * '@Override * protected void onCreate(Bundle savedInstanceState) { * super.onCreate(savedInstanceState); * InjectSave.restoreInstanceState(this, savedInstanceState); * } * }</pre> * * @param object context * @param savedInstanceState bundle */ public static void restoreInstanceState(@NonNull Object object, @NonNull Bundle savedInstanceState) { Class clazz = object.getClass(); List<Field> fields = ReflectUtils.getDeclaredAnnotationFields(clazz, SaveState.class); for (Field field : fields) { String key = field.getName(); Object value = savedInstanceState.get(key + SUFFIX); try { field.set(object, value); } catch (IllegalAccessException e) { e.printStackTrace(); } } } private static final class ReflectUtils { /** * Returns an array of Field objects that specified annotation. * * @param clazz class * @param targetAnnotation annotation * @return */ private static List<Field> getDeclaredAnnotationFields(@NonNull Class clazz, @NonNull Class targetAnnotation) { List<Field> fields = new ArrayList<>(); for (Field filed : clazz.getDeclaredFields()) { if (filed.getAnnotation(targetAnnotation) == null) { continue; } int modifiers = filed.getModifiers(); if (Modifier.isFinal(modifiers) || Modifier.isStatic(modifiers)) { continue; } filed.setAccessible(true); fields.add(filed); } return fields; } private ReflectUtils() {} } }
/* * Copyright 2016 Niklas Schelten * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.raspi.chatapp.util.internet; import android.content.Context; import android.content.Intent; import android.support.annotation.Nullable; import android.support.v4.content.LocalBroadcastManager; import android.util.Log; import com.raspi.chatapp.util.Constants; import com.raspi.chatapp.util.storage.MessageHistory; import org.jivesoftware.smack.ConnectionConfiguration; import org.jivesoftware.smack.ConnectionListener; import org.jivesoftware.smack.ReconnectionManager; import org.jivesoftware.smack.SmackConfiguration; import org.jivesoftware.smack.SmackException; import org.jivesoftware.smack.StanzaListener; import org.jivesoftware.smack.XMPPConnection; import org.jivesoftware.smack.chat.Chat; import org.jivesoftware.smack.chat.ChatManager; import org.jivesoftware.smack.filter.StanzaFilter; import org.jivesoftware.smack.packet.Presence; import org.jivesoftware.smack.packet.Stanza; import org.jivesoftware.smack.roster.Roster; import org.jivesoftware.smack.roster.RosterEntry; import org.jivesoftware.smack.tcp.XMPPTCPConnection; import org.jivesoftware.smack.tcp.XMPPTCPConnectionConfiguration; import org.jivesoftware.smackx.ping.PingFailedListener; import org.jivesoftware.smackx.ping.PingManager; import org.w3c.dom.Document; import org.w3c.dom.Element; import java.io.StringWriter; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; /** * XmppManager is the wrapper Singleton for the xmppConnection which provides * all important functions. It is a Singleton to prohibit that one * message is received twice and to make sure it always exists :) */ public class XmppManager{ /** * the HttpUrl to the server */ public static final String SERVER = "raspi-server.ddns.net"; private static final String SERVICE = "chatapp.com"; private static final int PORT = 5222; private static class Holder{ static final XmppManager INSTANCE = new XmppManager(SERVER, SERVICE, PORT); } private static final int packetReplyTime = 5000; private String server; private String service; private int port; private XMPPTCPConnection connection; private static LocalBroadcastManager LBMgr; /** * returns an instance of the xmppManager * * @param context - the context with which to initialize a * LocalBroadCastManager, if this is not the first call of * this function it might also be null * @return */ @Nullable public static XmppManager getInstance(Context context){ //yes this is the lazy implementation for the LBMgr but I think for the // LBMgr it is not that important that there might be a second // initialization if (LBMgr == null && context != null) LBMgr = LocalBroadcastManager.getInstance(context); return Holder.INSTANCE; } public static XmppManager getInstance(){ return Holder.INSTANCE; } /** * creates a IM Manager with the given server ID * * @param server host address * @param service service name * @param port port */ protected XmppManager(String server, String service, int port){ this.server = server; this.service = service; this.port = port; Log.d("DEBUG", "Success: created xmppManager"); } /** * initializes the connection with the server * * @return true if a connection could be established */ public boolean init(){ // configure the smack SmackConfiguration.setDefaultPacketReplyTimeout(packetReplyTime); // set smack to try reconnect every5 secs when loosing connection ReconnectionManager.setEnabledPerDefault(true); ReconnectionManager.setDefaultReconnectionPolicy(ReconnectionManager.ReconnectionPolicy.FIXED_DELAY); ReconnectionManager.setDefaultFixedDelay(5); // build the connectionConfig XMPPTCPConnectionConfiguration config = XMPPTCPConnectionConfiguration.builder() .setServiceName(service) .setHost(server) .setPort(port) .setSendPresence(false) .setSecurityMode(ConnectionConfiguration.SecurityMode.ifpossible).build(); // create the connection and enable necessary features connection = new XMPPTCPConnection(config); // stream management prevents message loss connection.setUseStreamManagement(true); connection.addConnectionListener(connectionListener); connection.addAsyncStanzaListener(stanzaListener, new StanzaFilter(){ @Override public boolean accept(Stanza stanza){ return (stanza.getError() != null); } }); // enable the pingManager to ping every 60 seconds and try to reconnect in order to maintain // the connection PingManager pingManager = PingManager.getInstanceFor(connection); pingManager.setPingInterval(60); pingManager.registerPingFailedListener(new PingFailedListener(){ @Override public void pingFailed(){ try{ connection.connect(); }catch (Exception e){ Log.e("ERROR", "Couldn't connect."); Log.e("ERROR", e.toString()); } } }); // finally try to connect to the server try{ connection.connect(); }catch (Exception e){ Log.e("ERROR", "Couldn't connect."); Log.e("ERROR", e.toString()); return false; } Log.d("DEBUG", "Success: Initialized XmppManager."); return true; } private StanzaListener stanzaListener = new StanzaListener(){ @Override public void processPacket(Stanza packet) throws SmackException.NotConnectedException{ Log.d("STANZA RECEIVED", packet.toString()); } }; /** * logs in to the server * * @param username the username to use * @param password the corresponding password * @return true if the login was successful */ public boolean performLogin(String username, String password){ if (connection != null && connection.isConnected()) try{ connection.login(username, password); Log.d("DEBUG", "Success: Logged in."); return true; }catch (Exception e){ Log.e("ERROR", "Couldn't log in."); Log.e("ERROR", e.toString()); return false; } Log.d("DEBUG", "Couldn't log in: No connection."); return false; } /** * returns the roster for the current connection * * @return the roster and null if the roster cannot be accessed */ public Roster getRoster(){ if (connection != null && connection.isConnected()){ Log.d("DEBUG", "Success: returning roster."); return Roster.getInstanceFor(connection); }else{ Log.d("DEBUG", "Couldn't get the roster: No connection."); return null; } } /** * send the raw string as a message without wrapping it with xml attributes * @param message the message to be sent * @param buddyJID the buddy to send the message to */ public void sendRaw(String message, String buddyJID){ ChatManager chatManager = ChatManager.getInstanceFor(connection); if (connection != null && connection.isConnected() && chatManager != null){ try{ Chat chat = chatManager.createChat(buddyJID); chat.sendMessage(message); }catch (Exception e){ e.printStackTrace(); } } } /** * sends a text message * * @param message the message text to send * @param buddyJID the Buddy to receive the message * @return true if sending was successful */ public boolean sendTextMessage(String message, String buddyJID, long id){ ChatManager chatManager = ChatManager.getInstanceFor(connection); if (connection != null && connection.isConnected() && chatManager != null){ if (buddyJID.indexOf('@') == -1) buddyJID += "@" + service; Chat chat = chatManager.createChat(buddyJID); try{ // wrap the message with all necessary xml attributes Document doc = DocumentBuilderFactory.newInstance() .newDocumentBuilder().newDocument(); Element msg = doc.createElement("message"); doc.appendChild(msg); msg.setAttribute("type", MessageHistory.TYPE_TEXT); msg.setAttribute("id", String.valueOf(id)); Element file = doc.createElement("content"); msg.appendChild(file); file.setTextContent(message); // transform everything to a string Transformer t = TransformerFactory.newInstance().newTransformer(); StringWriter writer = new StringWriter(); StreamResult r = new StreamResult(writer); t.transform(new DOMSource(doc), r); message = writer.toString(); // send the message chat.sendMessage(message); Log.d("DEBUG", "Success: Sent message"); return true; }catch (Exception e){ Log.e("ERROR", "Couldn't send message."); Log.e("ERROR", e.toString()); return false; } } Log.e("ERROR", "Sending failed: No connection."); return false; } /** * sends an image message * * @param serverFile the file on the server * @param description the description of the sent image * @param buddyJID the Buddy to receive the message * @return true if sending was successful */ public boolean sendImageMessage(String serverFile, String description, String buddyJID, long id){ if (buddyJID.indexOf('@') == -1) buddyJID += "@" + service; ChatManager chatManager = ChatManager.getInstanceFor(connection); if (connection != null && connection.isConnected() && chatManager != null){ Chat chat = chatManager.createChat(buddyJID); try{ //generate the message in order to set the type to image Document doc = DocumentBuilderFactory.newInstance() .newDocumentBuilder().newDocument(); Element msg = doc.createElement("message"); doc.appendChild(msg); msg.setAttribute("type", MessageHistory.TYPE_IMAGE); msg.setAttribute("id", String.valueOf(id)); Element file = doc.createElement("file"); msg.appendChild(file); file.setTextContent(serverFile); Element desc = doc.createElement("description"); msg.appendChild(desc); desc.setTextContent(description); // create the string Transformer t = TransformerFactory.newInstance().newTransformer(); StringWriter writer = new StringWriter(); StreamResult r = new StreamResult(writer); t.transform(new DOMSource(doc), r); String message = writer.toString(); // send the message chat.sendMessage(message); Log.d("DEBUG", "Success: Sent message"); return true; }catch (Exception e){ Log.e("ERROR", "Couldn't send message."); Log.e("ERROR", e.toString()); return false; } } Log.e("ERROR", "Sending failed: No connection."); return false; } /** * send an acknowledgement * @param buddyId the buddyId to receive the acknowledgement * @param othersId the id the buddy has sent the message with * @param type the type of acknowledgement to send * @return true if sending was successful */ public boolean sendAcknowledgement(String buddyId, long othersId, String type){ ChatManager chatManager = ChatManager.getInstanceFor(connection); if (connection != null && connection.isConnected() && chatManager != null){ if (buddyId.indexOf('@') == -1) buddyId += "@" + service; Chat chat = chatManager.createChat(buddyId); try{ // create the message structure Document doc = DocumentBuilderFactory.newInstance() .newDocumentBuilder().newDocument(); Element ack = doc.createElement("acknowledgement"); doc.appendChild(ack); ack.setAttribute("id", String.valueOf(othersId)); ack.setAttribute("type", type); // create the string representation of the message Transformer t = TransformerFactory.newInstance().newTransformer(); StringWriter writer = new StringWriter(); StreamResult r = new StreamResult(writer); t.transform(new DOMSource(doc), r); String message = writer.toString(); // send the message chat.sendMessage(message); Log.d("DEBUG", "Success: Sent message"); return true; }catch (Exception e){ Log.e("ERROR", "Couldn't send message."); Log.e("ERROR", e.toString()); return false; } } Log.e("ERROR", "Sending failed: No connection."); return false; } /** * creates a list of all RosterEntries * @return the rosterEntryArray */ public RosterEntry[] listRoster(){ try{ // get the roster and if it is not loaded reload it Roster roster = Roster.getInstanceFor(connection); if (!roster.isLoaded()) roster.reloadAndWait(); RosterEntry[] result = new RosterEntry[roster.getEntries().size()]; int i = 0; // loop through all roster entries and append them to the array for (RosterEntry entry: roster.getEntries()){ result[i++] = entry; } return result; }catch (Exception e){ e.printStackTrace(); } return new RosterEntry[0]; } /** * sets the status * * @param available if true the status type will be set to available otherwise to unavailable * @param status the status message * @return true if setting the status was successful */ public boolean setStatus(boolean available, String status){ if (connection != null && connection.isConnected()){ // set the presence type Presence presence = new Presence(available ? Presence.Type.available : Presence.Type.unavailable); presence.setStatus(status); try{ connection.sendStanza(presence); Log.d("DEBUG", "Success: Set status."); return true; }catch (Exception e){ System.err.println(e.toString()); Log.e("ERROR", "Error while setting status."); return false; } } Log.e("ERROR", "Setting status failed: No connection."); return false; } /** * returns whether the app has an active connection to the server * @return true if the connection is connected to the xmpp server */ public boolean isConnected(){ return connection != null && connection.isConnected(); } /** * returns the underlying connection to the server * @return the connection */ public XMPPTCPConnection getConnection(){ return connection; } // just for debug purposes private ConnectionListener connectionListener = new ConnectionListener(){ @Override public void connected(XMPPConnection connection){ Log.d("XMPP_MANAGER", "connected successfully"); } @Override public void authenticated(XMPPConnection connection, boolean resumed){ if (resumed) Log.d("XMPP_MANAGER", "authenticated successfully a resumed " + "connection"); else Log.d("XMPP_MANAGER", "authenticated successfully a not resumed " + "connection"); } @Override public void connectionClosed(){ Log.d("XMPP_MANAGER", "closed the connection successfully"); } @Override public void connectionClosedOnError(Exception e){ Log.d("XMPP_MANAGER", "Connection closed on error"); } @Override public void reconnectionSuccessful(){ Log.d("XMPP_MANAGER", "reconnected successfully"); LBMgr.sendBroadcast(new Intent(Constants.RECONNECTED)); } @Override public void reconnectingIn(int seconds){ } @Override public void reconnectionFailed(Exception e){ Log.d("XMPP_MANAGER", "reconnecting failed"); } }; }
/* Copyright (c) 2001-2011, The HSQL Development Group * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the HSQL Development Group nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG, * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hsqldb.jdbc; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.OutputStream; import java.sql.Blob; import java.sql.SQLException; import org.hsqldb.error.ErrorCode; import org.hsqldb.lib.KMPSearchAlgorithm; import org.hsqldb.lib.java.JavaSystem; // boucherb@users 2004-04-xx - patch 1.7.2 - position and truncate methods // implemented; minor changes for moderate thread // safety and optimal performance // boucherb@users 2004-04-xx - doc 1.7.2 - javadocs updated; methods put in // correct (historical, interface declared) order // boucherb@users 2005-12-07 - patch 1.8.0.x - initial JDBC 4.0 support work // boucherb@users 2006-05-22 - doc 1.9.0 - full synch up to Mustang Build 84 // - patch 1.9.0 - setBinaryStream improvement // patch 1.9.0 // - fixed invalid reference to new BinaryStream(...) in getBinaryStream // // patch 1.9.0 - full synch up to Mustang b90 // - better bounds checking // - added support for clients to decide whether getBinaryStream // uses copy of internal byte buffer /** * The representation (mapping) in * the Java<sup><font size=-2>TM</font></sup> programming * language of an SQL * <code>BLOB</code> value. An SQL <code>BLOB</code> is a built-in type * that stores a Binary Large Object as a column value in a row of * a database table. By default drivers implement <code>Blob</code> using * an SQL <code>locator(BLOB)</code>, which means that a * <code>Blob</code> object contains a logical pointer to the * SQL <code>BLOB</code> data rather than the data itself. * A <code>Blob</code> object is valid for the duration of the * transaction in which is was created. * * <P>Methods in the interfaces {@link java.sql.ResultSet}, * {@link java.sql.CallableStatement}, and {@link java.sql.PreparedStatement}, such as * <code>getBlob</code> and <code>setBlob</code> allow a programmer to * access an SQL <code>BLOB</code> value. * The <code>Blob</code> interface provides methods for getting the * length of an SQL <code>BLOB</code> (Binary Large Object) value, * for materializing a <code>BLOB</code> value on the client, and for * determining the position of a pattern of bytes within a * <code>BLOB</code> value. In addition, this interface has methods for updating * a <code>BLOB</code> value. * <p> * All methods on the <code>Blob</code> interface must be fully implemented if the * JDBC driver supports the data type. * * <!-- start Release-specific documentation --> * <div class="ReleaseSpecificDocumentation"> * <h3>HSQLDB-Specific Information:</h3> <p> * * Previous to 2.0, the HSQLDB driver did not implement Blob using an SQL * locator(BLOB). That is, an HSQLDB Blob object did not contain a logical * pointer to SQL BLOB data; rather it directly contained a representation of * the data (a byte array). As a result, an HSQLDB Blob object was itself * valid beyond the duration of the transaction in which is was created, * although it did not necessarily represent a corresponding value * on the database. Also, the interface methods for updating a BLOB value * were unsupported, with the exception of the truncate method, * in that it could be used to truncate the local value. <p> * * Starting with 2.0, the HSQLDB driver fully supports both local and remote * SQL BLOB data implementations, meaning that an HSQLDB Blob object <em>may</em> * contain a logical pointer to remote SQL BLOB data (see {@link JDBCBlobClient * JDBCBlobClient}) or it may directly contain a local representation of the * data (as implemented in this class). In particular, when the product is built * under JDK 1.6+ and the Blob instance is constructed as a result of calling * JDBCConnection.createBlob(), then the resulting Blob instance is initially * disconnected (is not bound to the transaction scope of the vending Connection * object), the data is contained directly and all interface methods for * updating the BLOB value are supported for local use until the first * invocation of free(); otherwise, an HSQLDB Blob's implementation is * determined at runtime by the driver, it is typically not valid beyond the * duration of the transaction in which is was created, and there no * standard way to query whether it represents a local or remote * value.<p> * * </div> * <!-- end Release-specific documentation --> * * @author james house jhouse@part.net * @author boucherb@users * @version 2.0 * @since JDK 1.2, HSQLDB 1.7.2 * @revised JDK 1.6, HSQLDB 2.0 */ public class JDBCBlob implements Blob { /** * Returns the number of bytes in the <code>BLOB</code> value * designated by this <code>Blob</code> object. * @return length of the <code>BLOB</code> in bytes * @exception SQLException if there is an error accessing the * length of the <code>BLOB</code> * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since JDK 1.2, HSQLDB 1.7.2 */ public long length() throws SQLException { return getData().length; } /** * Retrieves all or part of the <code>BLOB</code> * value that this <code>Blob</code> object represents, as an array of * bytes. This <code>byte</code> array contains up to <code>length</code> * consecutive bytes starting at position <code>pos</code>. * * <!-- start release-specific documentation --> * <div class="ReleaseSpecificDocumentation"> * <h3>HSQLDB-Specific Information:</h3> <p> * * The official specification above is ambiguous in that it does not * precisely indicate the policy to be observed when * pos > this.length() - length. One policy would be to retrieve the * octets from pos to this.length(). Another would be to throw an * exception. HSQLDB observes the second policy. * </div> <!-- end release-specific documentation --> * * @param pos the ordinal position of the first byte in the * <code>BLOB</code> value to be extracted; the first byte is at * position 1 * @param length the number of consecutive bytes to be copied; JDBC 4.1[the value * for length must be 0 or greater] * @return a byte array containing up to <code>length</code> * consecutive bytes from the <code>BLOB</code> value designated * by this <code>Blob</code> object, starting with the * byte at position <code>pos</code> * @exception SQLException if there is an error accessing the * <code>BLOB</code> value; if pos is less than 1 or length is * less than 0 * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @see #setBytes * @since JDK 1.2, HSQLDB 1.7.2 */ public byte[] getBytes(long pos, final int length) throws SQLException { final byte[] data = getData(); final int dlen = data.length; if (pos < MIN_POS || pos > MIN_POS + dlen) { throw Util.outOfRangeArgument("pos: " + pos); } pos--; if (length < 0 || length > dlen - pos) { throw Util.outOfRangeArgument("length: " + length); } final byte[] result = new byte[length]; System.arraycopy(data, (int) pos, result, 0, length); return result; } /** * Retrieves the <code>BLOB</code> value designated by this * <code>Blob</code> instance as a stream. * * @return a stream containing the <code>BLOB</code> data * @exception SQLException if there is an error accessing the * <code>BLOB</code> value * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @see #setBinaryStream * @since JDK 1.2, HSQLDB 1.7.2 */ public InputStream getBinaryStream() throws SQLException { return new ByteArrayInputStream(getData()); } /** * Retrieves the byte position at which the specified byte array * <code>pattern</code> begins within the <code>BLOB</code> * value that this <code>Blob</code> object represents. The * search for <code>pattern</code> begins at position * <code>start</code>. * * @param pattern the byte array for which to search * @param start the position at which to begin searching; the * first position is 1 * @return the position at which the pattern appears, else -1 * @exception SQLException if there is an error accessing the * <code>BLOB</code> or if start is less than 1 * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since JDK 1.2, HSQLDB 1.7.2 */ public long position(final byte[] pattern, final long start) throws SQLException { final byte[] data = getData(); final int dlen = data.length; if (start < MIN_POS) { throw Util.outOfRangeArgument("start: " + start); } else if (start > dlen || pattern == null) { return -1L; } // by now, we know start <= Integer.MAX_VALUE; final int startIndex = (int) start - 1; final int plen = pattern.length; if (plen == 0 || startIndex > dlen - plen) { return -1L; } final int result = KMPSearchAlgorithm.search(data, pattern, KMPSearchAlgorithm.computeTable(pattern), startIndex); return (result == -1) ? -1 : result + 1; } /** * Retrieves the byte position in the <code>BLOB</code> value * designated by this <code>Blob</code> object at which * <code>pattern</code> begins. The search begins at position * <code>start</code>. * * @param pattern the <code>Blob</code> object designating * the <code>BLOB</code> value for which to search * @param start the position in the <code>BLOB</code> value * at which to begin searching; the first position is 1 * @return the position at which the pattern begins, else -1 * @exception SQLException if there is an error accessing the * <code>BLOB</code> value or if start is less than 1 * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since JDK 1.2, HSQLDB 1.7.2 */ public long position(final Blob pattern, long start) throws SQLException { final byte[] data = getData(); final int dlen = data.length; if (start < MIN_POS) { throw Util.outOfRangeArgument("start: " + start); } else if (start > dlen || pattern == null) { return -1L; } // by now, we know start <= Integer.MAX_VALUE; final int startIndex = (int) start - 1; final long plen = pattern.length(); if (plen == 0 || startIndex > ((long) dlen) - plen) { return -1L; } // by now, we know plen <= Integer.MAX_VALUE final int iplen = (int) plen; byte[] bytePattern; if (pattern instanceof JDBCBlob) { bytePattern = ((JDBCBlob) pattern).data(); } else { bytePattern = pattern.getBytes(1L, iplen); } final int result = KMPSearchAlgorithm.search(data, bytePattern, KMPSearchAlgorithm.computeTable(bytePattern), startIndex); return (result == -1) ? -1 : result + 1; } // -------------------------- JDBC 3.0 ----------------------------------- /** * Writes the given array of bytes to the <code>BLOB</code> value that * this <code>Blob</code> object represents, starting at position * <code>pos</code>, and returns the number of bytes written. * The array of bytes will overwrite the existing bytes * in the <code>Blob</code> object starting at the position * <code>pos</code>. If the end of the <code>Blob</code> value is reached * while writing the array of bytes, then the length of the <code>Blob</code> * value will be increased to accommodate the extra bytes. * <p> * <b>Note:</b> If the value specified for <code>pos</code> * is greater then the length+1 of the <code>BLOB</code> value then the * behavior is undefined. Some JDBC drivers may throw a * <code>SQLException</code> while other drivers may support this * operation. * * <!-- start release-specific documentation --> * <div class="ReleaseSpecificDocumentation"> * <h3>HSQLDB-Specific Information:</h3> <p> * * Starting with HSQLDB 2.0 this feature is supported. <p> * * When built under JDK 1.6+ and the Blob instance is constructed as a * result of calling JDBCConnection.createBlob(), this operation affects * only the client-side value; it has no effect upon a value stored in a * database because JDBCConnection.createBlob() constructs disconnected, * initially empty Blob instances. To propagate the Blob value to a database * in this case, it is required to supply the Blob instance to an updating * or inserting setXXX method of a Prepared or Callable Statement, or to * supply the Blob instance to an updateXXX method of an updateable * ResultSet. <p> * * <b>Implementation Notes:</b><p> * * Starting with HSQLDB 2.1, JDBCBlob no longer utilizes volatile fields * and is effectively thread safe, but still uses local variable * snapshot isolation. <p> * * As such, the synchronization policy still does not strictly enforce * serialized read/write access to the underlying data <p> * * So, if an application may perform concurrent JDBCBlob modifications and * the integrity of the application depends on total order Blob modification * semantics, then such operations should be synchronized on an appropriate * monitor. <p> * * </div> * <!-- end release-specific documentation --> * * @param pos the position in the <code>BLOB</code> object at which * to start writing; the first position is 1 * @param bytes the array of bytes to be written to the <code>BLOB</code> * value that this <code>Blob</code> object represents * @return the number of bytes written * @exception SQLException if there is an error accessing the * <code>BLOB</code> value or if pos is less than 1 * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @see #getBytes * @since JDK 1.4, HSQLDB 1.7.2 * @revised JDK 1.6, HSQLDB 2.0 */ public int setBytes(long pos, byte[] bytes) throws SQLException { if (bytes == null) { throw Util.nullArgument("bytes"); } return (setBytes(pos, bytes, 0, bytes.length)); } /** * Writes all or part of the given <code>byte</code> array to the * <code>BLOB</code> value that this <code>Blob</code> object represents * and returns the number of bytes written. * Writing starts at position <code>pos</code> in the <code>BLOB</code> * value; <code>len</code> bytes from the given byte array are written. * The array of bytes will overwrite the existing bytes * in the <code>Blob</code> object starting at the position * <code>pos</code>. If the end of the <code>Blob</code> value is reached * while writing the array of bytes, then the length of the <code>Blob</code> * value will be increased to accommodate the extra bytes. * <p> * <b>Note:</b> If the value specified for <code>pos</code> * is greater then the length+1 of the <code>BLOB</code> value then the * behavior is undefined. Some JDBC drivers may throw a * <code>SQLException</code> while other drivers may support this * operation. * * <!-- start release-specific documentation --> * <div class="ReleaseSpecificDocumentation"> * <h3>HSQLDB-Specific Information:</h3> <p> * * Starting with HSQLDB 2.0 this feature is supported. <p> * * When built under JDK 1.6+ and the Blob instance is constructed as a * result of calling JDBCConnection.createBlob(), this operation affects * only the client-side value; it has no effect upon a value stored in a * database because JDBCConnection.createBlob() constructs disconnected, * initially empty Blob instances. To propagate the Blob value to a database * in this case, it is required to supply the Blob instance to an updating * or inserting setXXX method of a Prepared or Callable Statement, or to * supply the Blob instance to an updateXXX method of an updateable * ResultSet. <p> * * <b>Implementation Notes:</b><p> * * If the value specified for <code>pos</code> * is greater than the length of the <code>BLOB</code> value, then * the <code>BLOB</code> value is extended in length to accept the * written octets and the undefined region up to <code>pos</code> is * filled with (byte)0. <p> * * Starting with HSQLDB 2.1, JDBCBlob no longer utilizes volatile fields * and is effectively thread safe, but still uses local variable * snapshot isolation. <p> * * As such, the synchronization policy still does not strictly enforce * serialized read/write access to the underlying data <p> * * So, if an application may perform concurrent JDBCBlob modifications and * the integrity of the application depends on total order Blob modification * semantics, then such operations should be synchronized on an appropriate * monitor. <p> * * </div> * <!-- end release-specific documentation --> * * @param pos the position in the <code>BLOB</code> object at which * to start writing; the first position is 1 * @param bytes the array of bytes to be written to this <code>BLOB</code> * object * @param offset the offset into the array <code>bytes</code> at which * to start reading the bytes to be set * @param len the number of bytes to be written to the <code>BLOB</code> * value from the array of bytes <code>bytes</code> * @return the number of bytes written * @exception SQLException if there is an error accessing the * <code>BLOB</code> value or if pos is less than 1 * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @see #getBytes * @since JDK 1.4, HSQLDB 1.7.2 * @revised JDK 1.6, HSQLDB 2.0 */ public int setBytes(long pos, byte[] bytes, int offset, int len) throws SQLException { if (!m_createdByConnection) { /** @todo - better error message */ throw Util.notSupported(); } if (bytes == null) { throw Util.nullArgument("bytes"); } if (offset < 0 || offset > bytes.length) { throw Util.outOfRangeArgument("offset: " + offset); } if (len > bytes.length - offset) { throw Util.outOfRangeArgument("len: " + len); } if (pos < MIN_POS || pos > 1L + (Integer.MAX_VALUE - len)) { throw Util.outOfRangeArgument("pos: " + pos); } pos--; byte[] data = getData(); final int dlen = data.length; if ((pos + len) > dlen) { byte[] temp = new byte[(int) pos + len]; System.arraycopy(data, 0, temp, 0, dlen); data = temp; temp = null; } System.arraycopy(bytes, offset, data, (int) pos, len); // paranoia, in case somone free'd us during the array copies. checkClosed(); setData(data); return len; } /** * Retrieves a stream that can be used to write to the <code>BLOB</code> * value that this <code>Blob</code> object represents. The stream begins * at position <code>pos</code>. * The bytes written to the stream will overwrite the existing bytes * in the <code>Blob</code> object starting at the position * <code>pos</code>. If the end of the <code>Blob</code> value is reached * while writing to the stream, then the length of the <code>Blob</code> * value will be increased to accommodate the extra bytes. * <p> * <b>Note:</b> If the value specified for <code>pos</code> * is greater then the length+1 of the <code>BLOB</code> value then the * behavior is undefined. Some JDBC drivers may throw a * <code>SQLException</code> while other drivers may support this * operation. * * <!-- start release-specific documentation --> * <div class="ReleaseSpecificDocumentation"> * <h3>HSQLDB-Specific Information:</h3> <p> * * Starting with HSQLDB 2.0 this feature is supported. <p> * * When built under JDK 1.6+ and the Blob instance is constructed as a * result of calling JDBCConnection.createBlob(), this operation affects * only the client-side value; it has no effect upon a value stored in a * database because JDBCConnection.createBlob() constructs disconnected, * initially empty Blob instances. To propagate the Blob value to a database * in this case, it is required to supply the Blob instance to an updating * or inserting setXXX method of a Prepared or Callable Statement, or to * supply the Blob instance to an updateXXX method of an updateable * ResultSet. <p> * * <b>Implementation Notes:</b><p> * * The data written to the stream does not appear in this * Blob until the stream is closed <p> * * When the stream is closed, if the value specified for <code>pos</code> * is greater than the length of the <code>BLOB</code> value, then * the <code>BLOB</code> value is extended in length to accept the * written octets and the undefined region up to <code>pos</code> is * filled with (byte)0. <p> * * Starting with HSQLDB 2.1, JDBCBlob no longer utilizes volatile fields * and is effectively thread safe, but still uses local variable * snapshot isolation. <p> * * As such, the synchronization policy still does not strictly enforce * serialized read/write access to the underlying data <p> * * So, if an application may perform concurrent JDBCBlob modifications and * the integrity of the application depends on total order Blob modification * semantics, then such operations should be synchronized on an appropriate * monitor. <p> * * </div> * <!-- end release-specific documentation --> * * @param pos the position in the <code>BLOB</code> value at which * to start writing; the first position is 1 * @return a <code>java.io.OutputStream</code> object to which data can * be written * @exception SQLException if there is an error accessing the * <code>BLOB</code> value or if pos is less than 1 * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @see #getBinaryStream * @since JDK 1.4, HSQLDB 1.7.2 * @revised JDK 1.6, HSQLDB 2.0 */ public OutputStream setBinaryStream(final long pos) throws SQLException { if (!m_createdByConnection) { /** @todo - Better error message */ throw Util.notSupported(); } if (pos < MIN_POS || pos > MAX_POS) { throw Util.outOfRangeArgument("pos: " + pos); } checkClosed(); return new java.io.ByteArrayOutputStream() { public synchronized void close() throws java.io.IOException { try { JDBCBlob.this.setBytes(pos, toByteArray()); } catch (SQLException se) { throw JavaSystem.toIOException(se); } finally { super.close(); } } }; } /** * Truncates the <code>BLOB</code> value that this <code>Blob</code> * object represents to be <code>len</code> bytes in length. * <p> * <b>Note:</b> If the value specified for <code>pos</code> * is greater then the length+1 of the <code>BLOB</code> value then the * behavior is undefined. Some JDBC drivers may throw a * <code>SQLException</code> while other drivers may support this * operation. * * <!-- start release-specific documentation --> * <div class="ReleaseSpecificDocumentation"> * <h3>HSQLDB-Specific Information:</h3> <p> * * Starting with HSQLDB 2.0 this feature is fully supported. <p> * * When built under JDK 1.6+ and the Blob instance is constructed as a * result of calling JDBCConnection.createBlob(), this operation affects * only the client-side value; it has no effect upon a value stored in a * database because JDBCConnection.createBlob() constructs disconnected, * initially empty Blob instances. To propagate the truncated Blob value to * a database in this case, it is required to supply the Blob instance to * an updating or inserting setXXX method of a Prepared or Callable * Statement, or to supply the Blob instance to an updateXXX method of an * updateable ResultSet. <p> * * </div> * <!-- end release-specific documentation --> * * @param len the length, in bytes, to which the <code>BLOB</code> value * that this <code>Blob</code> object represents should be truncated * @exception SQLException if there is an error accessing the * <code>BLOB</code> value or if len is less than 0 * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since JDK 1.4, HSQLDB 1.7.2 * @revised JDK 1.6, HSQLDB 2.0 */ public void truncate(final long len) throws SQLException { final byte[] data = getData(); if (len < 0 || len > data.length) { throw Util.outOfRangeArgument("len: " + len); } if (len == data.length) { return; } byte[] newData = new byte[(int) len]; System.arraycopy(data, 0, newData, 0, (int) len); checkClosed(); // limit possible race-condition with free() setData(newData); } //------------------------- JDBC 4.0 ----------------------------------- /** * This method frees the <code>Blob</code> object and releases the resources that * it holds. The object is invalid once the <code>free</code> * method is called. * <p> * After <code>free</code> has been called, any attempt to invoke a * method other than <code>free</code> will result in a <code>SQLException</code> * being thrown. If <code>free</code> is called multiple times, the subsequent * calls to <code>free</code> are treated as a no-op. * <p> * * @throws SQLException if an error occurs releasing * the Blob's resources * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since JDK 1.6, HSQLDB 2.0 */ public synchronized void free() throws SQLException { m_closed = true; m_data = null; } /** * Returns an <code>InputStream</code> object that contains a partial <code>Blob</code> value, * starting with the byte specified by pos, which is length bytes in length. * * @param pos the offset to the first byte of the partial value to be retrieved. * The first byte in the <code>Blob</code> is at position 1 * @param length the length in bytes of the partial value to be retrieved * @return <code>InputStream</code> through which the partial <code>Blob</code> value can be read. * @throws SQLException if pos is less than 1 or if pos is greater than the number of bytes * in the <code>Blob</code> or if pos + length is greater than the number of bytes * in the <code>Blob</code> * * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since JDK 1.6, HSQLDB 2.0 */ public InputStream getBinaryStream(long pos, long length) throws SQLException { final byte[] data = getData(); final int dlen = data.length; if (pos < MIN_POS || pos > dlen) { throw Util.outOfRangeArgument("pos: " + pos); } pos--; if (length < 0 || length > dlen - pos) { throw Util.outOfRangeArgument("length: " + length); } if (pos == 0 && length == dlen) { return new ByteArrayInputStream(data); } final byte[] result = new byte[(int) length]; System.arraycopy(data, (int) pos, result, 0, (int) length); return new ByteArrayInputStream(result); } // ---------------------- internal implementation -------------------------- public static final long MIN_POS = 1L; public static final long MAX_POS = 1L + (long) Integer.MAX_VALUE; private boolean m_closed; private byte[] m_data; private final boolean m_createdByConnection; /** * Constructs a new JDBCBlob instance wrapping the given octet sequence. <p> * * This constructor is used internally to retrieve result set values as * Blob objects, yet it must be public to allow access from other packages. * As such (in the interest of efficiency) this object maintains a reference * to the given octet sequence rather than making a copy; special care * should be taken by external clients never to use this constructor with a * byte array object that may later be modified externally. * * @param data the octet sequence representing the Blob value * @throws SQLException if the argument is null */ public JDBCBlob(final byte[] data) throws SQLException { if (data == null) { throw Util.nullArgument(); } m_data = data; m_createdByConnection = false; } protected JDBCBlob() { m_data = new byte[0]; m_createdByConnection = true; } protected synchronized void checkClosed() throws SQLException { if (m_closed) { throw Util.sqlException(ErrorCode.X_07501); } } protected byte[] data() throws SQLException { return getData(); } private synchronized byte[] getData() throws SQLException { checkClosed(); return m_data; } private synchronized void setData(byte[] data) throws SQLException { checkClosed(); m_data = data; } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v8/services/keyword_plan_service.proto package com.google.ads.googleads.v8.services; /** * <pre> * Request message for [KeywordPlanService.GenerateForecastTimeSeries][google.ads.googleads.v8.services.KeywordPlanService.GenerateForecastTimeSeries]. * </pre> * * Protobuf type {@code google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest} */ public final class GenerateForecastTimeSeriesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest) GenerateForecastTimeSeriesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use GenerateForecastTimeSeriesRequest.newBuilder() to construct. private GenerateForecastTimeSeriesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GenerateForecastTimeSeriesRequest() { keywordPlan_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new GenerateForecastTimeSeriesRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GenerateForecastTimeSeriesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); keywordPlan_ = s; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v8.services.KeywordPlanServiceProto.internal_static_google_ads_googleads_v8_services_GenerateForecastTimeSeriesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v8.services.KeywordPlanServiceProto.internal_static_google_ads_googleads_v8_services_GenerateForecastTimeSeriesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest.class, com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest.Builder.class); } public static final int KEYWORD_PLAN_FIELD_NUMBER = 1; private volatile java.lang.Object keywordPlan_; /** * <pre> * Required. The resource name of the keyword plan to be forecasted. * </pre> * * <code>string keyword_plan = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code> * @return The keywordPlan. */ @java.lang.Override public java.lang.String getKeywordPlan() { java.lang.Object ref = keywordPlan_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); keywordPlan_ = s; return s; } } /** * <pre> * Required. The resource name of the keyword plan to be forecasted. * </pre> * * <code>string keyword_plan = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code> * @return The bytes for keywordPlan. */ @java.lang.Override public com.google.protobuf.ByteString getKeywordPlanBytes() { java.lang.Object ref = keywordPlan_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); keywordPlan_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(keywordPlan_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, keywordPlan_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(keywordPlan_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, keywordPlan_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest)) { return super.equals(obj); } com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest other = (com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest) obj; if (!getKeywordPlan() .equals(other.getKeywordPlan())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + KEYWORD_PLAN_FIELD_NUMBER; hash = (53 * hash) + getKeywordPlan().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Request message for [KeywordPlanService.GenerateForecastTimeSeries][google.ads.googleads.v8.services.KeywordPlanService.GenerateForecastTimeSeries]. * </pre> * * Protobuf type {@code google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest) com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v8.services.KeywordPlanServiceProto.internal_static_google_ads_googleads_v8_services_GenerateForecastTimeSeriesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v8.services.KeywordPlanServiceProto.internal_static_google_ads_googleads_v8_services_GenerateForecastTimeSeriesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest.class, com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest.Builder.class); } // Construct using com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); keywordPlan_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v8.services.KeywordPlanServiceProto.internal_static_google_ads_googleads_v8_services_GenerateForecastTimeSeriesRequest_descriptor; } @java.lang.Override public com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest getDefaultInstanceForType() { return com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest build() { com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest buildPartial() { com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest result = new com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest(this); result.keywordPlan_ = keywordPlan_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest) { return mergeFrom((com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest other) { if (other == com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest.getDefaultInstance()) return this; if (!other.getKeywordPlan().isEmpty()) { keywordPlan_ = other.keywordPlan_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object keywordPlan_ = ""; /** * <pre> * Required. The resource name of the keyword plan to be forecasted. * </pre> * * <code>string keyword_plan = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code> * @return The keywordPlan. */ public java.lang.String getKeywordPlan() { java.lang.Object ref = keywordPlan_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); keywordPlan_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Required. The resource name of the keyword plan to be forecasted. * </pre> * * <code>string keyword_plan = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code> * @return The bytes for keywordPlan. */ public com.google.protobuf.ByteString getKeywordPlanBytes() { java.lang.Object ref = keywordPlan_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); keywordPlan_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Required. The resource name of the keyword plan to be forecasted. * </pre> * * <code>string keyword_plan = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code> * @param value The keywordPlan to set. * @return This builder for chaining. */ public Builder setKeywordPlan( java.lang.String value) { if (value == null) { throw new NullPointerException(); } keywordPlan_ = value; onChanged(); return this; } /** * <pre> * Required. The resource name of the keyword plan to be forecasted. * </pre> * * <code>string keyword_plan = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearKeywordPlan() { keywordPlan_ = getDefaultInstance().getKeywordPlan(); onChanged(); return this; } /** * <pre> * Required. The resource name of the keyword plan to be forecasted. * </pre> * * <code>string keyword_plan = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for keywordPlan to set. * @return This builder for chaining. */ public Builder setKeywordPlanBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); keywordPlan_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest) private static final com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest(); } public static com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GenerateForecastTimeSeriesRequest> PARSER = new com.google.protobuf.AbstractParser<GenerateForecastTimeSeriesRequest>() { @java.lang.Override public GenerateForecastTimeSeriesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GenerateForecastTimeSeriesRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<GenerateForecastTimeSeriesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GenerateForecastTimeSeriesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v8.services.GenerateForecastTimeSeriesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package com.scispike.ws; import java.net.MalformedURLException; import java.net.URL; import java.util.Hashtable; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import java.util.Timer; import java.util.TimerTask; import org.java_websocket.WebSocket.READYSTATE; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import com.scispike.callback.Callback; import com.scispike.callback.Event; import com.scispike.callback.EventEmitter; import com.scispike.conversation.AuthFunction; public class ReconnectingSocket { private static final int bo_min = 1000; public static int HB_INTERVAL = 30000; public static int BO_MAX = 10 * 1000; final Map<String, Boolean> hb = new Hashtable<String, Boolean>(); final Map<String, Integer> backOff = new Hashtable<String, Integer>(); SockJsClient socket; boolean isConnecting = false; final Map<String, TimerTask> globalTasks = new Hashtable<String, TimerTask>(); final Timer timer = new Timer(true); final String urlPrefix; final AuthFunction authFunction; final Set<EventEmitter<String>> eventEmitters = new LinkedHashSet<EventEmitter<String>>(); public ReconnectingSocket(final String urlPrefix, final AuthFunction authFunction) { this.urlPrefix = urlPrefix; this.authFunction = authFunction; } void doReconnect() { doDisconnect(); doConnect(); } URL createUrl(String url) { try { return new URL(url); } catch (MalformedURLException e) { throw new RuntimeException(e); } } void resetBackoff() { backOff.put(urlPrefix, bo_min); } void cancelHeartbeat() { TimerTask t = globalTasks.get(urlPrefix); if (t != null) { t.cancel(); timer.purge(); } } void retryConnect() { int backoff = backOff.get(urlPrefix); backOff.put(urlPrefix, Math.min(BO_MAX, backoff * 2)); cancelHeartbeat(); TimerTask t = new TimerTask() { @Override public void run() { doConnect(); } }; timer.schedule(t, backoff); } void emit(String message, String... data) { for (EventEmitter<String> e : eventEmitters) { e.emit(message, data); } } void resetHeartbeat() { hb.put(urlPrefix, false); cancelHeartbeat(); TimerTask t = new TimerTask() { @Override public void run() { if (!hb.get(urlPrefix)) { doReconnect(); } else { resetHeartbeat(); } } }; globalTasks.put(urlPrefix, t); timer.schedule(t, HB_INTERVAL); } void doConnect() { isConnecting = true; authFunction.auth(new Callback<String, String>() { @Override public void call(String error, String... args) { if (error != null) { System.err.println(error); retryConnect(); return; } final String token = args[0]; final String sessionId = args[1]; final String url = urlPrefix + "/ws/" + sessionId + "/auth/" + token; final SockJsClient mySocket = new SockJsClient(createUrl(url)) { @Override public void onError(Exception arg0) { cancelHeartbeat(); doDisconnect(this); emit("error", arg0.getMessage()); } @Override public void onClose(int arg0, String arg1, boolean arg2) { cancelHeartbeat(); isConnecting = false; retryConnect(); emit("error", "disconnected"); emit("socket::disconnected"); } @Override void onJOpen() { resetBackoff(); resetHeartbeat(); } @Override void onJClose() { resetBackoff(); cancelHeartbeat(); } @Override void onHeartbeat() { hb.put(urlPrefix, true); } @Override void onData(Object data) { JSONObject msg = (JSONObject) data; try { Object d = msg.has("data") ? msg.get("data") : null; String event = msg.getString("event"); if (d != null) { emit(event, d.toString()); } else { emit(event); } if (event.equals(sessionId + ":connected")) { socket = this; emit("socket::connected"); } } catch (JSONException e) { e.printStackTrace(); } } }; mySocket.connect(); } }); } public boolean isConnected() { return socket != null && socket.getReadyState() == READYSTATE.OPEN; } public void send(String msg, Callback<String, String> cb) { if (socket != null) { JSONArray j = new JSONArray(); j.put(msg); socket.send(j.toString()); if (cb != null) { cb.call(null); } } else if (cb != null) { cb.call("Not connected"); } } public void connect(EventEmitter<String> eventEmitter) { eventEmitters.add(eventEmitter); if (!isConnecting) { resetBackoff(); doConnect(); } else if (isConnected()) { eventEmitter.emit("socket::connected"); } } private void doDisconnect() { doDisconnect(null); } private void doDisconnect(SockJsClient sockJsClient) { resetBackoff(); if(socket != null){ sockJsClient = socket; socket=null; } if (sockJsClient != null){ try { sockJsClient.closeBlocking(); } catch (InterruptedException e) { e.printStackTrace(); } } } protected void disconnect(final EventEmitter<String> eventEmitter, final Event<String> done) { if (eventEmitters.size() ==1 && socket != null) { eventEmitter.once("socket::disconnected", new Event<String>() { @Override public void onEmit(String... data) { if (done != null) { done.onEmit(data); } eventEmitters.remove(eventEmitter); eventEmitter.removeAllListeners(); } }); doDisconnect(); } else { eventEmitters.remove(eventEmitter); eventEmitter.emit("socket::disconnected"); eventEmitter.removeAllListeners(); } } public void disconnect(EventEmitter<String> eventEmitter) { disconnect(eventEmitter, null); } public void disconnectAll(Event<String> done) { if(eventEmitters.size()>0){ for (EventEmitter<String> eventEmitter : eventEmitters) { disconnect(eventEmitter, done); } } else { done.onEmit(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.api.records.impl.pb; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto; import com.google.protobuf.TextFormat; public class ApplicationAttemptReportPBImpl extends ApplicationAttemptReport { ApplicationAttemptReportProto proto = ApplicationAttemptReportProto .getDefaultInstance(); ApplicationAttemptReportProto.Builder builder = null; boolean viaProto = false; private ApplicationAttemptId ApplicationAttemptId; private ContainerId amContainerId; public ApplicationAttemptReportPBImpl() { builder = ApplicationAttemptReportProto.newBuilder(); } public ApplicationAttemptReportPBImpl(ApplicationAttemptReportProto proto) { this.proto = proto; viaProto = true; } @Override public ApplicationAttemptId getApplicationAttemptId() { if (this.ApplicationAttemptId != null) { return this.ApplicationAttemptId; } ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder; if (!p.hasApplicationAttemptId()) { return null; } this.ApplicationAttemptId = convertFromProtoFormat(p.getApplicationAttemptId()); return this.ApplicationAttemptId; } @Override public String getHost() { ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder; if (!p.hasHost()) { return null; } return p.getHost(); } @Override public int getRpcPort() { ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder; return p.getRpcPort(); } @Override public String getTrackingUrl() { ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder; if (!p.hasTrackingUrl()) { return null; } return p.getTrackingUrl(); } @Override public String getOriginalTrackingUrl() { ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder; if (!p.hasOriginalTrackingUrl()) { return null; } return p.getOriginalTrackingUrl(); } @Override public String getDiagnostics() { ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder; if (!p.hasDiagnostics()) { return null; } return p.getDiagnostics(); } @Override public YarnApplicationAttemptState getYarnApplicationAttemptState() { ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder; if (!p.hasYarnApplicationAttemptState()) { return null; } return convertFromProtoFormat(p.getYarnApplicationAttemptState()); } @Override public void setYarnApplicationAttemptState(YarnApplicationAttemptState state) { maybeInitBuilder(); if (state == null) { builder.clearYarnApplicationAttemptState(); return; } builder.setYarnApplicationAttemptState(convertToProtoFormat(state)); } private YarnApplicationAttemptStateProto convertToProtoFormat( YarnApplicationAttemptState state) { return ProtoUtils.convertToProtoFormat(state); } private YarnApplicationAttemptState convertFromProtoFormat( YarnApplicationAttemptStateProto yarnApplicationAttemptState) { return ProtoUtils.convertFromProtoFormat(yarnApplicationAttemptState); } @Override public void setApplicationAttemptId(ApplicationAttemptId applicationAttemptId) { maybeInitBuilder(); if (applicationAttemptId == null) builder.clearApplicationAttemptId(); this.ApplicationAttemptId = applicationAttemptId; } @Override public void setHost(String host) { maybeInitBuilder(); if (host == null) { builder.clearHost(); return; } builder.setHost(host); } @Override public void setRpcPort(int rpcPort) { maybeInitBuilder(); builder.setRpcPort(rpcPort); } @Override public void setTrackingUrl(String url) { maybeInitBuilder(); if (url == null) { builder.clearTrackingUrl(); return; } builder.setTrackingUrl(url); } @Override public void setOriginalTrackingUrl(String oUrl) { maybeInitBuilder(); if (oUrl == null) { builder.clearOriginalTrackingUrl(); return; } builder.setOriginalTrackingUrl(oUrl); } @Override public void setDiagnostics(String diagnostics) { maybeInitBuilder(); if (diagnostics == null) { builder.clearDiagnostics(); return; } builder.setDiagnostics(diagnostics); } public ApplicationAttemptReportProto getProto() { mergeLocalToProto(); proto = viaProto ? proto : builder.build(); viaProto = true; return proto; } @Override public int hashCode() { return getProto().hashCode(); } @Override public boolean equals(Object other) { if (other == null) return false; if (other.getClass().isAssignableFrom(this.getClass())) { return this.getProto().equals(this.getClass().cast(other).getProto()); } return false; } @Override public String toString() { return TextFormat.shortDebugString(getProto()); } private void maybeInitBuilder() { if (viaProto || builder == null) { builder = ApplicationAttemptReportProto.newBuilder(proto); } viaProto = false; } private void mergeLocalToProto() { if (viaProto) maybeInitBuilder(); mergeLocalToBuilder(); proto = builder.build(); viaProto = true; } private void mergeLocalToBuilder() { if (this.ApplicationAttemptId != null && !((ApplicationAttemptIdPBImpl) this.ApplicationAttemptId).getProto() .equals(builder.getApplicationAttemptId())) { builder .setApplicationAttemptId(convertToProtoFormat(this.ApplicationAttemptId)); } if (this.amContainerId != null && !((ContainerIdPBImpl) this.amContainerId).getProto().equals( builder.getAmContainerId())) { builder.setAmContainerId(convertToProtoFormat(this.amContainerId)); } } private ContainerIdProto convertToProtoFormat(ContainerId amContainerId) { return ((ContainerIdPBImpl) amContainerId).getProto(); } private ContainerIdPBImpl convertFromProtoFormat( ContainerIdProto amContainerId) { return new ContainerIdPBImpl(amContainerId); } private ApplicationAttemptIdProto convertToProtoFormat(ApplicationAttemptId t) { return ((ApplicationAttemptIdPBImpl) t).getProto(); } private ApplicationAttemptIdPBImpl convertFromProtoFormat( ApplicationAttemptIdProto applicationAttemptId) { return new ApplicationAttemptIdPBImpl(applicationAttemptId); } @Override public ContainerId getAMContainerId() { if (this.amContainerId != null) { return this.amContainerId; } ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder; if (!p.hasAmContainerId()) { return null; } this.amContainerId = convertFromProtoFormat(p.getAmContainerId()); return this.amContainerId; } @Override public void setAMContainerId(ContainerId amContainerId) { maybeInitBuilder(); if (amContainerId == null) builder.clearAmContainerId(); this.amContainerId = amContainerId; } }
package com.example.android.sunshine.app; import android.content.ContentUris; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.os.AsyncTask; import android.text.format.Time; import android.util.Log; import com.example.android.sunshine.app.data.WeatherContract; import com.example.android.sunshine.app.data.WeatherContract.WeatherEntry; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.util.Vector; public class FetchWeatherTask extends AsyncTask<String, Void, Void> { private final String LOG_TAG = FetchWeatherTask.class.getSimpleName(); private final Context mContext; public FetchWeatherTask(Context context) { mContext = context; } private boolean DEBUG = true; /** * Helper method to handle insertion of a new location in the weather database. * * @param locationSetting The location string used to request updates from the server. * @param cityName A human-readable city name, e.g "Mountain View" * @param lat the latitude of the city * @param lon the longitude of the city * @return the row ID of the added location. */ long addLocation(String locationSetting, String cityName, double lat, double lon) { long locationId; // First, check if the location with this city name exists in the db Cursor locationCursor = mContext.getContentResolver().query( WeatherContract.LocationEntry.CONTENT_URI, new String[]{WeatherContract.LocationEntry._ID}, WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ?", new String[]{locationSetting}, null); if (locationCursor.moveToFirst()) { int locationIdIndex = locationCursor.getColumnIndex(WeatherContract.LocationEntry._ID); locationId = locationCursor.getLong(locationIdIndex); } else { // Now that the content provider is set up, inserting rows of data is pretty simple. // First create a ContentValues object to hold the data you want to insert. ContentValues locationValues = new ContentValues(); // Then add the data, along with the corresponding name of the data type, // so the content provider knows what kind of value is being inserted. locationValues.put(WeatherContract.LocationEntry.COLUMN_CITY_NAME, cityName); locationValues.put(WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING, locationSetting); locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LAT, lat); locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LONG, lon); // Finally, insert location data into the database. Uri insertedUri = mContext.getContentResolver().insert( WeatherContract.LocationEntry.CONTENT_URI, locationValues ); // The resulting URI contains the ID for the row. Extract the locationId from the Uri. locationId = ContentUris.parseId(insertedUri); } locationCursor.close(); // Wait, that worked? Yes! return locationId; } /** * Take the String representing the complete forecast in JSON Format and * pull out the data we need to construct the Strings needed for the wireframes. * * Fortunately parsing is easy: constructor takes the JSON string and converts it * into an Object hierarchy for us. */ private void getWeatherDataFromJson(String forecastJsonStr, String locationSetting) throws JSONException { // Now we have a String representing the complete forecast in JSON Format. // Fortunately parsing is easy: constructor takes the JSON string and converts it // into an Object hierarchy for us. // These are the names of the JSON objects that need to be extracted. // Location information final String OWM_CITY = "city"; final String OWM_CITY_NAME = "name"; final String OWM_COORD = "coord"; // Location coordinate final String OWM_LATITUDE = "lat"; final String OWM_LONGITUDE = "lon"; // Weather information. Each day's forecast info is an element of the "list" array. final String OWM_LIST = "list"; final String OWM_PRESSURE = "pressure"; final String OWM_HUMIDITY = "humidity"; final String OWM_WINDSPEED = "speed"; final String OWM_WIND_DIRECTION = "deg"; // All temperatures are children of the "temp" object. final String OWM_TEMPERATURE = "temp"; final String OWM_MAX = "max"; final String OWM_MIN = "min"; final String OWM_WEATHER = "weather"; final String OWM_DESCRIPTION = "main"; final String OWM_WEATHER_ID = "id"; try { JSONObject forecastJson = new JSONObject(forecastJsonStr); JSONArray weatherArray = forecastJson.getJSONArray(OWM_LIST); JSONObject cityJson = forecastJson.getJSONObject(OWM_CITY); String cityName = cityJson.getString(OWM_CITY_NAME); JSONObject cityCoord = cityJson.getJSONObject(OWM_COORD); double cityLatitude = cityCoord.getDouble(OWM_LATITUDE); double cityLongitude = cityCoord.getDouble(OWM_LONGITUDE); long locationId = addLocation(locationSetting, cityName, cityLatitude, cityLongitude); // Insert the new weather information into the database Vector<ContentValues> cVVector = new Vector<ContentValues>(weatherArray.length()); // OWM returns daily forecasts based upon the local time of the city that is being // asked for, which means that we need to know the GMT offset to translate this data // properly. // Since this data is also sent in-order and the first day is always the // current day, we're going to take advantage of that to get a nice // normalized UTC date for all of our weather. Time dayTime = new Time(); dayTime.setToNow(); // we start at the day returned by local time. Otherwise this is a mess. int julianStartDay = Time.getJulianDay(System.currentTimeMillis(), dayTime.gmtoff); // now we work exclusively in UTC dayTime = new Time(); for(int i = 0; i < weatherArray.length(); i++) { // These are the values that will be collected. long dateTime; double pressure; int humidity; double windSpeed; double windDirection; double high; double low; String description; int weatherId; // Get the JSON object representing the day JSONObject dayForecast = weatherArray.getJSONObject(i); // Cheating to convert this to UTC time, which is what we want anyhow dateTime = dayTime.setJulianDay(julianStartDay+i); pressure = dayForecast.getDouble(OWM_PRESSURE); humidity = dayForecast.getInt(OWM_HUMIDITY); windSpeed = dayForecast.getDouble(OWM_WINDSPEED); windDirection = dayForecast.getDouble(OWM_WIND_DIRECTION); // Description is in a child array called "weather", which is 1 element long. // That element also contains a weather code. JSONObject weatherObject = dayForecast.getJSONArray(OWM_WEATHER).getJSONObject(0); description = weatherObject.getString(OWM_DESCRIPTION); weatherId = weatherObject.getInt(OWM_WEATHER_ID); // Temperatures are in a child object called "temp". Try not to name variables // "temp" when working with temperature. It confuses everybody. JSONObject temperatureObject = dayForecast.getJSONObject(OWM_TEMPERATURE); high = temperatureObject.getDouble(OWM_MAX); low = temperatureObject.getDouble(OWM_MIN); ContentValues weatherValues = new ContentValues(); weatherValues.put(WeatherEntry.COLUMN_LOC_KEY, locationId); weatherValues.put(WeatherEntry.COLUMN_DATE, dateTime); weatherValues.put(WeatherEntry.COLUMN_HUMIDITY, humidity); weatherValues.put(WeatherEntry.COLUMN_PRESSURE, pressure); weatherValues.put(WeatherEntry.COLUMN_WIND_SPEED, windSpeed); weatherValues.put(WeatherEntry.COLUMN_DEGREES, windDirection); weatherValues.put(WeatherEntry.COLUMN_MAX_TEMP, high); weatherValues.put(WeatherEntry.COLUMN_MIN_TEMP, low); weatherValues.put(WeatherEntry.COLUMN_SHORT_DESC, description); weatherValues.put(WeatherEntry.COLUMN_WEATHER_ID, weatherId); cVVector.add(weatherValues); } int inserted = 0; // add to database if ( cVVector.size() > 0 ) { ContentValues[] cvArray = new ContentValues[cVVector.size()]; cVVector.toArray(cvArray); inserted = mContext.getContentResolver().bulkInsert(WeatherEntry.CONTENT_URI, cvArray); } Log.d(LOG_TAG, "FetchWeatherTask Complete. " + inserted + " Inserted"); } catch (JSONException e) { Log.e(LOG_TAG, e.getMessage(), e); e.printStackTrace(); } } @Override protected Void doInBackground(String... params) { // If there's no zip code, there's nothing to look up. Verify size of params. if (params.length == 0) { return null; } String locationQuery = params[0]; // These two need to be declared outside the try/catch // so that they can be closed in the finally block. HttpURLConnection urlConnection = null; BufferedReader reader = null; // Will contain the raw JSON response as a string. String forecastJsonStr = null; String format = "json"; String units = "metric"; int numDays = 14; try { // Construct the URL for the OpenWeatherMap query // Possible parameters are avaiable at OWM's forecast API page, at // http://openweathermap.org/API#forecast final String FORECAST_BASE_URL = "http://api.openweathermap.org/data/2.5/forecast/daily?"; final String QUERY_PARAM = "q"; final String FORMAT_PARAM = "mode"; final String UNITS_PARAM = "units"; final String DAYS_PARAM = "cnt"; final String APPID_PARAM = "APPID"; Uri builtUri = Uri.parse(FORECAST_BASE_URL).buildUpon() .appendQueryParameter(QUERY_PARAM, params[0]) .appendQueryParameter(FORMAT_PARAM, format) .appendQueryParameter(UNITS_PARAM, units) .appendQueryParameter(DAYS_PARAM, Integer.toString(numDays)) //.appendQueryParameter(APPID_PARAM, BuildConfig.OPEN_WEATHER_MAP_API_KEY) .build(); URL url = new URL(builtUri.toString()); // Create the request to OpenWeatherMap, and open the connection urlConnection = (HttpURLConnection) url.openConnection(); urlConnection.setRequestMethod("GET"); urlConnection.connect(); // Read the input stream into a String InputStream inputStream = urlConnection.getInputStream(); StringBuffer buffer = new StringBuffer(); if (inputStream == null) { // Nothing to do. return null; } reader = new BufferedReader(new InputStreamReader(inputStream)); String line; while ((line = reader.readLine()) != null) { // Since it's JSON, adding a newline isn't necessary (it won't affect parsing) // But it does make debugging a *lot* easier if you print out the completed // buffer for debugging. buffer.append(line + "\n"); } if (buffer.length() == 0) { // Stream was empty. No point in parsing. return null; } forecastJsonStr = buffer.toString(); getWeatherDataFromJson(forecastJsonStr, locationQuery); } catch (IOException e) { Log.e(LOG_TAG, "Error ", e); // If the code didn't successfully get the weather data, there's no point in attempting // to parse it. } catch (JSONException e) { Log.e(LOG_TAG, e.getMessage(), e); e.printStackTrace(); } finally { if (urlConnection != null) { urlConnection.disconnect(); } if (reader != null) { try { reader.close(); } catch (final IOException e) { Log.e(LOG_TAG, "Error closing stream", e); } } } // This will only happen if there was an error getting or parsing the forecast. return null; } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.android.relinker; import com.facebook.buck.android.toolchain.ndk.TargetCpuType; import com.facebook.buck.core.build.buildable.context.BuildableContext; import com.facebook.buck.core.build.context.BuildContext; import com.facebook.buck.core.build.execution.context.ExecutionContext; import com.facebook.buck.core.cell.CellPathResolver; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.InternalFlavor; import com.facebook.buck.core.model.impl.BuildTargetPaths; import com.facebook.buck.core.rulekey.AddToRuleKey; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.rules.BuildRuleResolver; import com.facebook.buck.core.rules.SourcePathRuleFinder; import com.facebook.buck.core.rules.common.BuildableSupport; import com.facebook.buck.core.rules.common.BuildableSupport.DepsSupplier; import com.facebook.buck.core.rules.impl.AbstractBuildRule; import com.facebook.buck.core.rules.schedule.OverrideScheduleRule; import com.facebook.buck.core.rules.schedule.RuleScheduleInfo; import com.facebook.buck.core.sourcepath.ExplicitBuildTargetSourcePath; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.core.sourcepath.resolver.SourcePathResolverAdapter; import com.facebook.buck.core.toolchain.tool.Tool; import com.facebook.buck.cxx.CxxLink; import com.facebook.buck.cxx.config.CxxBuckConfig; import com.facebook.buck.cxx.toolchain.LinkerMapMode; import com.facebook.buck.cxx.toolchain.linker.Linker; import com.facebook.buck.io.BuildCellRelativePath; import com.facebook.buck.io.file.MorePaths; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.rules.args.Arg; import com.facebook.buck.rules.args.StringArg; import com.facebook.buck.step.AbstractExecutionStep; import com.facebook.buck.step.Step; import com.facebook.buck.step.StepExecutionResult; import com.facebook.buck.step.StepExecutionResults; import com.facebook.buck.step.fs.MakeCleanDirectoryStep; import com.facebook.buck.util.ProcessExecutor; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Sets; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.SortedSet; import java.util.regex.Pattern; import javax.annotation.Nullable; /** Relinks a native library. See {@link NativeRelinker}. */ class RelinkerRule extends AbstractBuildRule implements OverrideScheduleRule { @AddToRuleKey private final ImmutableSortedSet<SourcePath> symbolsNeededPaths; @AddToRuleKey private final TargetCpuType cpuType; @AddToRuleKey private final SourcePath baseLibSourcePath; @AddToRuleKey private final Tool objdump; @AddToRuleKey private final ImmutableList<Arg> linkerArgs; @AddToRuleKey @Nullable private final Linker linker; @AddToRuleKey(stringify = true) private final ImmutableList<Pattern> symbolWhitelist; private final CxxBuckConfig cxxBuckConfig; private final SourcePathResolverAdapter pathResolver; private final CellPathResolver cellPathResolver; private SourcePathRuleFinder ruleFinder; private final DepsSupplier depsSupplier; public RelinkerRule( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, SourcePathResolverAdapter resolver, CellPathResolver cellPathResolver, SourcePathRuleFinder ruleFinder, ImmutableSortedSet<SourcePath> symbolsNeededPaths, TargetCpuType cpuType, Tool objdump, CxxBuckConfig cxxBuckConfig, SourcePath baseLibSourcePath, @Nullable Linker linker, ImmutableList<Arg> linkerArgs, ImmutableList<Pattern> symbolWhitelist) { super(buildTarget, projectFilesystem); this.pathResolver = resolver; this.cellPathResolver = cellPathResolver; this.cpuType = cpuType; this.objdump = objdump; this.cxxBuckConfig = cxxBuckConfig; this.linkerArgs = linkerArgs; this.ruleFinder = ruleFinder; this.symbolsNeededPaths = symbolsNeededPaths; this.baseLibSourcePath = baseLibSourcePath; this.linker = linker; this.symbolWhitelist = symbolWhitelist; this.depsSupplier = BuildableSupport.buildDepsSupplier(this, ruleFinder); } @Override public SortedSet<BuildRule> getBuildDeps() { return depsSupplier.get(); } private static String getVersionScript( Set<String> needed, Set<String> provided, List<Pattern> whitelist) { Set<String> keep = new ImmutableSet.Builder<String>() .addAll(Sets.intersection(needed, provided)) .addAll( Sets.filter( provided, s -> { if (s.contains("JNI_OnLoad")) { return true; } if (s.contains("Java_")) { return true; } for (Pattern pattern : whitelist) { if (pattern.matcher(s).matches()) { return true; } } return false; })) .build(); StringBuilder res = new StringBuilder("{\n"); if (!keep.isEmpty()) { res.append("global:\n"); } for (String s : keep) { res.append(" ").append(s).append(";\n"); } res.append("local: *;\n};\n"); return res.toString(); } public SourcePath getLibFileSourcePath() { return ExplicitBuildTargetSourcePath.of(getBuildTarget(), getLibFilePath()); } public SourcePath getSymbolsNeededPath() { return ExplicitBuildTargetSourcePath.of(getBuildTarget(), getSymbolsNeededOutPath()); } @Override public ImmutableList<Step> getBuildSteps( BuildContext context, BuildableContext buildableContext) { ImmutableList.Builder<Step> relinkerSteps = ImmutableList.builder(); if (linker != null) { ImmutableList<Arg> args = ImmutableList.<Arg>builder() .addAll(linkerArgs) .add(StringArg.of("-Wl,--version-script=" + getRelativeVersionFilePath())) .build(); relinkerSteps.addAll( new CxxLink( getBuildTarget() .withAppendedFlavors(InternalFlavor.of("cxx-link")) .withoutFlavors(LinkerMapMode.NO_LINKER_MAP.getFlavor()), getProjectFilesystem(), ruleFinder, cellPathResolver, linker, getLibFilePath(), ImmutableMap.of(), args, Optional.empty(), cxxBuckConfig.getLinkScheduleInfo(), cxxBuckConfig.shouldCacheLinks(), /* thinLto */ false, /* fatLto */ false) .getBuildSteps(context, buildableContext)); buildableContext.recordArtifact(getRelativeVersionFilePath()); } buildableContext.recordArtifact(getSymbolsNeededOutPath()); return new ImmutableList.Builder<Step>() .addAll( MakeCleanDirectoryStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), getScratchDirPath()))) .add( new AbstractExecutionStep("xdso-dce relinker") { @Override public StepExecutionResult execute(ExecutionContext context) throws IOException, InterruptedException { ImmutableSet<String> symbolsNeeded = readSymbolsNeeded(); if (linker == null) { getProjectFilesystem().copyFile(getBaseLibPath(), getLibFilePath()); buildableContext.recordArtifact(getLibFilePath()); } else { writeVersionScript(context.getProcessExecutor(), symbolsNeeded); for (Step s : relinkerSteps.build()) { StepExecutionResult executionResult = s.execute(context); if (!executionResult.isSuccess()) { return StepExecutionResults.ERROR; } } } writeSymbols( getSymbolsNeededOutPath(), Sets.union( symbolsNeeded, getSymbols(context.getProcessExecutor(), getLibFilePath()).undefined)); return StepExecutionResults.SUCCESS; } }) .build(); } @Override public SourcePath getSourcePathToOutput() { return ExplicitBuildTargetSourcePath.of(getBuildTarget(), getLibFilePath()); } @Override public RuleScheduleInfo getRuleScheduleInfo() { return cxxBuckConfig.getLinkScheduleInfo().orElse(RuleScheduleInfo.DEFAULT); } private Path getScratchPath() { // ld doesn't seem to like commas in the version script path so we construct one without commas. Path path = BuildTargetPaths.getScratchPath(getProjectFilesystem(), getBuildTarget(), "%s"); String dirname = path.getFileName().toString().replace(',', '.'); return path.getParent().resolve(dirname); } private Path getBaseLibPath() { return pathResolver.getAbsolutePath(baseLibSourcePath); } private Path getScratchDirPath() { return getScratchPath().resolve(cpuType.toString()); } private Path getScratchFilePath(String suffix) { return getScratchDirPath() .resolve(MorePaths.getNameWithoutExtension(getBaseLibPath()) + suffix); } private Path getLibFilePath() { return getScratchDirPath().resolve(getBaseLibPath().getFileName()); } private Symbols getSymbols(ProcessExecutor executor, Path path) throws IOException, InterruptedException { return Symbols.getDynamicSymbols(executor, objdump, pathResolver, absolutify(path)); } private Path getRelativeVersionFilePath() { return getScratchFilePath("__version.exp"); } private void writeVersionScript(ProcessExecutor executor, ImmutableSet<String> symbolsNeeded) throws IOException, InterruptedException { Symbols sym = getSymbols(executor, getBaseLibPath()); Set<String> defined = Sets.difference(sym.all, sym.undefined); String versionScript = getVersionScript(symbolsNeeded, defined, symbolWhitelist); Files.write( absolutify(getRelativeVersionFilePath()), versionScript.getBytes(Charsets.UTF_8), StandardOpenOption.CREATE); } private Path absolutify(Path p) { return getProjectFilesystem().resolve(p); } private Path getSymbolsNeededOutPath() { return getScratchFilePath(".symbols"); } private void writeSymbols(Path dest, Set<String> symbols) throws IOException { Files.write( absolutify(dest), ImmutableSortedSet.copyOf(symbols), Charsets.UTF_8, StandardOpenOption.CREATE); } private ImmutableSet<String> readSymbolsNeeded() throws IOException { ImmutableSet.Builder<String> symbolsNeeded = ImmutableSet.builder(); for (SourcePath source : symbolsNeededPaths) { symbolsNeeded.addAll( Files.readAllLines(pathResolver.getAbsolutePath(source), Charsets.UTF_8)); } return symbolsNeeded.build(); } @Override public void updateBuildRuleResolver(BuildRuleResolver ruleResolver) { this.ruleFinder = ruleResolver; this.depsSupplier.updateRuleFinder(ruleFinder); } }
/** * Copyright 2010 - 2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jetbrains.exodus.entitystore.iterate; import jetbrains.exodus.entitystore.*; import jetbrains.exodus.entitystore.util.EntityIdSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public class EntityIdArrayIterableWrapperTests extends EntityStoreTestBase { public void testEmpty() { examine(); } public void testNull() { examine(-1, 0); } public void testNullSorted() { examine(-1, 0, -1, 1); } public void testNullCompact() { examine(-1, 1, 2, 3); } public void testNullDiverse() { examineUnsorted(-1, 1, 2, 3, 2, 2); } public void testNullCompact2() { examineUnsorted(2, 3, -1, 1); } public void testNullDiverse2() { examineUnsorted(2, 3, 2, 2, -1, 1); } public void testSingle() { examine(0, 1, 0, 2); } public void testSkipCompact() { TestEntityIterableImpl t = t(0, 6, 0, 7, 0, 8, 0, 9, 1, 6, 1, 7, 1, 8, 1, 9); EntityIdArrayIterableWrapper w = w(t); assertEquals(true, w.isSortedById()); examineSkip(t, w, 1, 3); examineSkip(t, w, 2, 4); examineSkip(t, w, 3, 6); examineSkip(t, w, 5, 8); } public void testSkipCompact2() { TestEntityIterableImpl t = t(0, 6, 0, 7, 0, 8, 0, 9, 1, 6, 1, 7, 1, 8, 1, 9, 2, 1); EntityIdArrayIterableWrapper w = w(t); assertEquals(true, w.isSortedById()); examineSkip(t, w, 1, 3); examineSkip(t, w, 2, 4); examineSkip(t, w, 3, 6); examineSkip(t, w, 5, 8); } public void testSingleUnsorted() { examineUnsorted(0, 6, 0, 7, 0, 8, 0, 5, 0, 9); } public void testDiverseUnsorted() { examineUnsorted(0, 5, 0, 6, 1, 6, 1, 5, 1, 7, 1, 8); } public void testDiverseSorted() { examine(0, 5, 0, 6, 1, 5, 1, 6, 1, 7, 1, 8); } void examine(final long... ids) { TestEntityIterableImpl t = t(ids); EntityIdArrayIterableWrapper w = w(t); assertTrue(w.isSortedById()); assertIterablesMatch(t, w); t.isSortedById = false; w = w(t); assertTrue(w.isSortedById()); assertIterablesMatch(t, w); } void examineUnsorted(final long... ids) { TestEntityIterableImpl t = t(false, ids); EntityIdArrayIterableWrapper w = w(t); assertEquals(false, w.isSortedById()); assertIterablesMatch(t, w); } void assertIterablesMatch(EntityIterableBase expected, EntityIdArrayIterableWrapper actual) { assertEquals(expected.count(), actual.count()); assertIteratorsMatch(expected.iterator(), actual.iterator()); final PersistentStoreTransaction txn = getStoreTransaction(); assertIteratorsMatch(expected.getReverseIteratorImpl(txn), actual.getReverseIteratorImpl(txn)); int index = 0; for (Entity e : expected) { if (e != null) { //TODO: allow indexOf(null) assertEquals(index, actual.indexOf(e)); } ++index; } assertEquals(-1, actual.indexOfImpl(new PersistentEntityId(239, 1))); EntityIdSet idSet = actual.toSet(txn); for (Entity e : expected) { assertTrue(idSet.contains(e == null ? null : e.getId())); } if (expected.isEmpty()) { assertEquals(null, actual.getIteratorImpl(txn).getLast()); assertEquals(null, actual.getReverseIteratorImpl(txn).getLast()); } else { final Entity last = expected.getReverseIteratorImpl(txn).next(); assertEquals(last == null ? null : last.getId(), actual.getIteratorImpl(txn).getLast()); final Entity first = expected.getIteratorImpl(txn).next(); assertEquals(first == null ? null : first.getId(), actual.getReverseIteratorImpl(txn).getLast()); } } void examineSkip(TestEntityIterableImpl t, EntityIdArrayIterableWrapper w, int from, int to) { assertIteratorsMatch(t.iterator(), w.iterator(), from, to); final PersistentStoreTransaction txn = getStoreTransaction(); assertIteratorsMatch(t.getReverseIteratorImpl(txn), w.getReverseIteratorImpl(txn), from, to); } static void assertIteratorsMatch(EntityIterator expected, EntityIterator actual) { while (expected.hasNext()) { assertTrue(actual.hasNext()); assertEquals(expected.next(), actual.next()); } assertFalse(actual.hasNext()); } static void assertIteratorsMatch(EntityIterator expected, EntityIterator actual, int from, int to) { int i = 0; while (expected.hasNext()) { assertTrue(actual.hasNext()); assertEquals(expected.next(), actual.next()); if (++i == from) { final int length = to - from; assertEquals(expected.skip(length), actual.skip(length)); } } assertFalse(actual.hasNext()); } EntityIdArrayIterableWrapper w(TestEntityIterableImpl t) { return new EntityIdArrayIterableWrapper(getStoreTransaction(), getEntityStore(), t); } TestEntityIterableImpl t(final boolean isSortedById, final long... ids) { return new TestEntityIterableImpl(isSortedById, pack(ids)); } TestEntityIterableImpl t(final long... ids) { return new TestEntityIterableImpl(true, pack(ids)); } static EntityId[] pack(final long... ids) { final int length = ids.length; final EntityId[] result = new EntityId[length / 2]; int i = 0; while (i < length) { final int next = i + 1; final int typeId = (int) ids[i]; result[i / 2] = typeId < 0 ? null : new PersistentEntityId(typeId, ids[next]); i = next + 1; } return result; } @SuppressWarnings({"AssignmentToCollectionOrArrayFieldFromParameter"}) class TestEntityIterableImpl extends EntityIterableBase { boolean isSortedById; private final EntityId[] data; TestEntityIterableImpl(final boolean isSortedById, final EntityId[] data) { super(null); this.isSortedById = isSortedById; this.data = data; } @NotNull @Override public PersistentEntityStoreImpl getStore() { return getEntityStore(); } @Override public boolean isSortedById() { return isSortedById; } @Override protected long countImpl(@NotNull final PersistentStoreTransaction txn) { return data.length; } @Override protected int indexOfImpl(@NotNull EntityId entityId) { for (int i = 0; i < data.length; ++i) { if (entityId.equals(data[i])) { return i; } } return -1; } @Override public EntityIterator iterator() { return getIteratorImpl(getTransaction()); } @NotNull @Override public EntityIterator getIteratorImpl(@NotNull final PersistentStoreTransaction txn) { return new NonDisposableEntityIterator(this) { int i = 0; @Override public boolean skip(int number) { i += number; return hasNext(); } @Override protected boolean hasNextImpl() { return i < data.length; } @Nullable @Override protected EntityId nextIdImpl() { final EntityId sourceId = data[i++]; return sourceId == null ? null : new PersistentEntityId(sourceId); } }; } @NotNull @Override public EntityIterator getReverseIteratorImpl(@NotNull final PersistentStoreTransaction txn) { return new NonDisposableEntityIterator(this) { int i = data.length; @Override public boolean skip(int number) { i -= number; return hasNext(); } @Override protected boolean hasNextImpl() { return i > 0; } @Nullable @Override protected EntityId nextIdImpl() { final EntityId sourceId = data[--i]; return sourceId == null ? null : new PersistentEntityId(sourceId); } }; } @Override public boolean isEmpty() { return countImpl(getTransaction()) == 0; } @Override public long size() { return countImpl(getTransaction()); } @Override public long count() { return countImpl(getTransaction()); } @Override public long getRoughCount() { return countImpl(getTransaction()); } @Override public int indexOf(@NotNull Entity entity) { return indexOfImpl(entity.getId()); } @Override public boolean contains(@NotNull Entity entity) { return indexOf(entity) >= 0; } @Override @NotNull @SuppressWarnings("EmptyClass") public EntityIterableHandle getHandleImpl() { return new ConstantEntityIterableHandle(getEntityStore(), EntityIterableType.DISTINCT) { @Override public void hashCode(@NotNull final EntityIterableHandleHash hash) { } }; } @Override public boolean isSortResult() { return false; } @Override public boolean canBeCached() { return false; } @Override protected CachedWrapperIterable createCachedWrapper(@NotNull final PersistentStoreTransaction txn) { return new EntityIdArrayIterableWrapper(txn, getStore(), this); } // all following unsupported @Override @NotNull public EntityIterable intersect(@NotNull EntityIterable right) { throw new UnsupportedOperationException(); } @Override @NotNull public EntityIterable intersectSavingOrder(@NotNull EntityIterable right) { throw new UnsupportedOperationException(); } @Override @NotNull public EntityIterable union(@NotNull EntityIterable right) { throw new UnsupportedOperationException(); } @Override @NotNull public EntityIterable minus(@NotNull EntityIterable right) { throw new UnsupportedOperationException(); } @Override @NotNull public EntityIterable concat(@NotNull EntityIterable right) { throw new UnsupportedOperationException(); } @NotNull @Override public EntityIterable take(int number) { throw new UnsupportedOperationException(); } @NotNull @Override public EntityIterable asSortResult() { throw new UnsupportedOperationException(); } } }
/* * ARX: Powerful Data Anonymization * Copyright 2012 - 2016 Fabian Prasser, Florian Kohlmayer and contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deidentifier.arx.framework.data; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Set; import org.deidentifier.arx.DataDefinition; import org.deidentifier.arx.DataGeneralizationScheme; import org.deidentifier.arx.RowSet; import org.deidentifier.arx.criteria.EDDifferentialPrivacy; import org.deidentifier.arx.criteria.HierarchicalDistanceTCloseness; import org.deidentifier.arx.criteria.PrivacyCriterion; import org.deidentifier.arx.framework.check.distribution.DistributionAggregateFunction; import org.deidentifier.arx.framework.check.distribution.DistributionAggregateFunction.DistributionAggregateFunctionGeneralization; import com.carrotsearch.hppc.IntObjectOpenHashMap; import com.carrotsearch.hppc.IntOpenHashSet; /** * Holds all data needed for the anonymization process. * * @author Fabian Prasser * @author Florian Kohlmayer */ public class DataManager { /** * Internal representation of attribute types. Quasi-identifiers are split * into the ones to which generalization is applied and the ones to which * microaggregation is applied * * @author Florian Kohlmayer * @author Fabian Prasser * */ public static class AttributeTypeInternal { public static final int IDENTIFYING = 3; public static final int INSENSITIVE = 2; public static final int QUASI_IDENTIFYING_GENERALIZED = 0; public static final int QUASI_IDENTIFYING_MICROAGGREGATED = 4; public static final int SENSITIVE = 1; } /** The data. */ private final Data dataAnalyzed; /** The data which is generalized */ private final Data dataGeneralized; /** The data which is insensitive */ private final Data dataStatic; /** The original input header. */ private final String[] header; /** The generalization hierarchiesQI. */ private final GeneralizationHierarchy[] hierarchiesGeneralized; /** The hierarchy heights for each QI. */ private final int[] hierarchiesHeights; /** The sensitive attributes. */ private final Map<String, GeneralizationHierarchy> hierarchiesSensitive; /** The indexes of sensitive attributes. */ private final Map<String, Integer> indexesSensitive; /** The maximum level for each QI. */ private final int[] maxLevels; /** The microaggregation functions. */ private final DistributionAggregateFunction[] microaggregationFunctions; /** Header for microaggregated attributes */ private final String[] microaggregationHeader; /** Map for microaggregated attributes */ private final int[] microaggregationMap; /** The number of microaggregation attributes in the dataDI */ private final int microaggregationNumAttributes; /** The start index of the microaggregation attributes in the dataDI */ private final int microaggregationStartIndex; /** The minimum level for each QI. */ private final int[] minLevels; /** The research subset, if any. */ private RowSet subset = null; /** The size of the research subset. */ private int subsetSize = 0; /** * Creates a new data manager from pre-encoded data. * * @param header * @param data * @param dictionary * @param definition * @param criteria * @param function */ public DataManager(final String[] header, final int[][] data, final Dictionary dictionary, final DataDefinition definition, final Set<PrivacyCriterion> criteria, final Map<String, DistributionAggregateFunction> functions) { // Store columns for reordering the output this.header = header; Set<String> attributesGemeralized = definition.getQuasiIdentifiersWithGeneralization(); Set<String> attributesSensitive = definition.getSensitiveAttributes(); Set<String> attributesMicroaggregated = definition.getQuasiIdentifiersWithMicroaggregation(); Set<String> attributesInsensitive = definition.getInsensitiveAttributes(); // Init dictionary final Dictionary dictionaryGeneralized = new Dictionary(attributesGemeralized.size()); final Dictionary dictionaryAnalyzed = new Dictionary(attributesSensitive.size() + attributesMicroaggregated.size()); final Dictionary dictionaryStatic = new Dictionary(attributesInsensitive.size()); // Init maps for reordering the output final int[] mapGeneralized = new int[dictionaryGeneralized.getNumDimensions()]; final int[] mapAnalyzed = new int[dictionaryAnalyzed.getNumDimensions()]; final int[] mapStatic = new int[dictionaryStatic.getNumDimensions()]; this.microaggregationMap = new int[attributesMicroaggregated.size()]; // Indexes this.microaggregationStartIndex = attributesSensitive.size(); this.microaggregationNumAttributes = attributesMicroaggregated.size(); int indexStatic = 0; int indexGeneralized = 0; int indexAnalyzed = 0; int indexSensitive = 0; int indexMicroaggregated = this.microaggregationStartIndex; int counter = 0; // A map for column indices. map[i*2]=attribute type, map[i*2+1]=index position. final int[] map = new int[header.length * 2]; final String[] headerGH = new String[dictionaryGeneralized.getNumDimensions()]; final String[] headerDI = new String[dictionaryAnalyzed.getNumDimensions()]; final String[] headerIS = new String[dictionaryStatic.getNumDimensions()]; microaggregationHeader = new String[attributesMicroaggregated.size()]; for (final String column : header) { final int idx = counter * 2; if (attributesGemeralized.contains(column)) { map[idx] = AttributeTypeInternal.QUASI_IDENTIFYING_GENERALIZED; map[idx + 1] = indexGeneralized; mapGeneralized[indexGeneralized] = counter; dictionaryGeneralized.registerAll(indexGeneralized, dictionary, counter); headerGH[indexGeneralized] = header[counter]; indexGeneralized++; } else if (attributesMicroaggregated.contains(column)) { map[idx] = AttributeTypeInternal.QUASI_IDENTIFYING_MICROAGGREGATED; map[idx + 1] = indexMicroaggregated; mapAnalyzed[indexMicroaggregated] = counter; dictionaryAnalyzed.registerAll(indexMicroaggregated, dictionary, counter); headerDI[indexMicroaggregated] = header[counter]; indexMicroaggregated++; microaggregationMap[indexAnalyzed] = counter; microaggregationHeader[indexAnalyzed] = header[counter]; indexAnalyzed++; } else if (attributesInsensitive.contains(column)) { map[idx] = AttributeTypeInternal.INSENSITIVE; map[idx + 1] = indexStatic; mapStatic[indexStatic] = counter; dictionaryStatic.registerAll(indexStatic, dictionary, counter); headerIS[indexStatic] = header[counter]; indexStatic++; } else if (attributesSensitive.contains(column)) { map[idx] = AttributeTypeInternal.SENSITIVE; map[idx + 1] = indexSensitive; mapAnalyzed[indexSensitive] = counter; dictionaryAnalyzed.registerAll(indexSensitive, dictionary, counter); headerDI[indexSensitive] = header[counter]; indexSensitive++; } else { // TODO: CHECK: Changed default? - now all undefined attributes // are identifying! Previously they were considered sensitive? map[idx] = AttributeTypeInternal.IDENTIFYING; map[idx + 1] = -1; } counter++; } // encode Data final Data[] ddata = encode(data, map, mapGeneralized, mapAnalyzed, mapStatic, dictionaryGeneralized, dictionaryAnalyzed, dictionaryStatic, headerGH, headerDI, headerIS); dataGeneralized = ddata[0]; dataAnalyzed = ddata[1]; dataStatic = ddata[2]; // Initialize minlevels minLevels = new int[attributesGemeralized.size()]; hierarchiesHeights = new int[attributesGemeralized.size()]; maxLevels = new int[attributesGemeralized.size()]; // Build hierarchiesQI hierarchiesGeneralized = new GeneralizationHierarchy[attributesGemeralized.size()]; for (int i = 0; i < header.length; i++) { final int idx = i * 2; if (attributesGemeralized.contains(header[i]) && map[idx] == AttributeTypeInternal.QUASI_IDENTIFYING_GENERALIZED) { final int dictionaryIndex = map[idx + 1]; final String name = header[i]; if (definition.getHierarchy(name) != null) { hierarchiesGeneralized[dictionaryIndex] = new GeneralizationHierarchy(name, definition.getHierarchy(name), dictionaryIndex, dictionaryGeneralized); } else { throw new IllegalStateException("No hierarchy available for attribute (" + header[i] + ")"); } // Initialize hierarchy height and minimum / maximum // generalization hierarchiesHeights[dictionaryIndex] = hierarchiesGeneralized[dictionaryIndex].getArray()[0].length; final Integer minGenLevel = definition.getMinimumGeneralization(name); minLevels[dictionaryIndex] = minGenLevel == null ? 0 : minGenLevel; final Integer maxGenLevel = definition.getMaximumGeneralization(name); maxLevels[dictionaryIndex] = maxGenLevel == null ? hierarchiesHeights[dictionaryIndex] - 1 : maxGenLevel; } } // Change min & max, when using (e,d)-DP for (PrivacyCriterion c : criteria) { if (c instanceof EDDifferentialPrivacy) { DataGeneralizationScheme scheme = ((EDDifferentialPrivacy)c).getGeneralizationScheme(); for (int i = 0; i < header.length; i++) { final int idx = i * 2; if (attributesGemeralized.contains(header[i]) && map[idx] == AttributeTypeInternal.QUASI_IDENTIFYING_GENERALIZED) { minLevels[map[idx + 1]] = scheme.getGeneralizationLevel(header[i], definition); maxLevels[map[idx + 1]] = scheme.getGeneralizationLevel(header[i], definition); } } break; } } // Build map with hierarchies for sensitive attributes Map<String, String[][]> sensitiveHierarchies = new HashMap<String, String[][]>(); for (PrivacyCriterion c : criteria) { if (c instanceof HierarchicalDistanceTCloseness) { HierarchicalDistanceTCloseness t = (HierarchicalDistanceTCloseness) c; sensitiveHierarchies.put(t.getAttribute(), t.getHierarchy().getHierarchy()); } } // Build generalization hierarchies for sensitive attributes hierarchiesSensitive = new HashMap<String, GeneralizationHierarchy>(); indexesSensitive = new HashMap<String, Integer>(); int index = 0; for (int i = 0; i < header.length; i++) { final String name = header[i]; final int idx = i * 2; if (sensitiveHierarchies.containsKey(name) && map[idx] == AttributeTypeInternal.SENSITIVE) { final int dictionaryIndex = map[idx + 1]; final String[][] hiers = sensitiveHierarchies.get(name); if (hiers != null) { hierarchiesSensitive.put(name, new GeneralizationHierarchy(name, hiers, dictionaryIndex, dictionaryAnalyzed)); } } // Store index for sensitive attributes if (attributesSensitive.contains(header[i])) { indexesSensitive.put(name, index); index++; } } // Build map with hierarchies for microaggregated attributes Map<String, String[][]> maHierarchies = new HashMap<String, String[][]>(); for (String attribute : functions.keySet()) { if (functions.get(attribute) instanceof DistributionAggregateFunctionGeneralization) { maHierarchies.put(attribute, definition.getHierarchy(attribute)); } } // Build generalization hierarchies for microaggregated attributes Map<String, int[][]> hierarchiesMA = new HashMap<String, int[][]>(); index = 0; for (int i = 0; i < header.length; i++) { final String name = header[i]; final int idx = i * 2; if (maHierarchies.containsKey(name) && map[idx] == AttributeTypeInternal.QUASI_IDENTIFYING_MICROAGGREGATED) { final int dictionaryIndex = map[idx + 1]; final String[][] hiers = maHierarchies.get(name); if (hiers != null) { hierarchiesMA.put(name, new GeneralizationHierarchy(name, hiers, dictionaryIndex, dictionaryAnalyzed).map); } } } // finalize dictionary dictionaryGeneralized.finalizeAll(); dictionaryAnalyzed.finalizeAll(); dictionaryStatic.finalizeAll(); // Init microaggregation functions microaggregationFunctions = new DistributionAggregateFunction[attributesMicroaggregated.size()]; for (int i = 0; i < header.length; i++) { final int idx = i * 2; if (attributesMicroaggregated.contains(header[i]) && map[idx] == AttributeTypeInternal.QUASI_IDENTIFYING_MICROAGGREGATED) { final int dictionaryIndex = map[idx + 1] - microaggregationStartIndex; final String name = header[i]; if (definition.getMicroAggregationFunction(name) != null) { microaggregationFunctions[dictionaryIndex] = functions.get(name); microaggregationFunctions[dictionaryIndex].initialize(dictionaryAnalyzed.getMapping()[dictionaryIndex + microaggregationStartIndex], definition.getDataType(name), hierarchiesMA.get(name)); } else { throw new IllegalStateException("No microaggregation function defined for attribute (" + header[i] + ")"); } } } // Store research subset for (PrivacyCriterion c : criteria) { if (c instanceof EDDifferentialPrivacy) { ((EDDifferentialPrivacy) c).initialize(this); } if (c.getSubset() != null) { subset = c.getSubset().getSet(); subsetSize = c.getSubset().getArray().length; } } } /** * For creating a projected instance * @param dataAnalyzed * @param dataGeneralized * @param dataStatic * @param header * @param hierarchiesGeneralized * @param hierarchiesHeights * @param hierarchiesSensitive * @param indexesSensitive * @param maxLevels * @param microaggregationFunctions * @param microaggregationHeader * @param microaggregationMap * @param microaggregationNumAttributes * @param microaggregationStartIndex * @param minLevels */ protected DataManager(Data dataAnalyzed, Data dataGeneralized, Data dataStatic, String[] header, GeneralizationHierarchy[] hierarchiesGeneralized, int[] hierarchiesHeights, Map<String, GeneralizationHierarchy> hierarchiesSensitive, Map<String, Integer> indexesSensitive, int[] maxLevels, DistributionAggregateFunction[] microaggregationFunctions, String[] microaggregationHeader, int[] microaggregationMap, int microaggregationNumAttributes, int microaggregationStartIndex, int[] minLevels) { this.dataAnalyzed = dataAnalyzed; this.dataGeneralized = dataGeneralized; this.dataStatic = dataStatic; this.header = header; this.hierarchiesGeneralized = hierarchiesGeneralized; this.hierarchiesHeights = hierarchiesHeights; this.hierarchiesSensitive = hierarchiesSensitive; this.indexesSensitive = indexesSensitive; this.maxLevels = maxLevels; this.microaggregationFunctions = microaggregationFunctions; this.microaggregationHeader = microaggregationHeader; this.microaggregationMap = microaggregationMap; this.microaggregationNumAttributes = microaggregationNumAttributes; this.microaggregationStartIndex = microaggregationStartIndex; this.minLevels = minLevels; // Both variables are only used for getDistribution() and getTree() // The projected instance delegates these methods to the original data manager this.subset = null; this.subsetSize = 0; } /** * Returns the input data that will be analyzed. * * @return the data */ public Data getDataAnalyzed() { return dataAnalyzed; } /** * Returns the input data that will be generalized. * * @return the data */ public Data getDataGeneralized() { return dataGeneralized; } /** * Returns the static input data. * * @return the data */ public Data getDataStatic() { return dataStatic; } /** * Returns the distribution of the attribute in the data array at the given index. * @param data * @param index * @param distinctValues * @return */ public double[] getDistribution(int[][] data, int index, int distinctValues) { // Initialize counts: iterate over all rows or the subset final int[] cardinalities = new int[distinctValues]; for (int i = 0; i < data.length; i++) { if (subset == null || subset.contains(i)) { cardinalities[data[i][index]]++; } } // compute distribution final double total = subset == null ? data.length : subsetSize; final double[] distribution = new double[cardinalities.length]; for (int i = 0; i < distribution.length; i++) { distribution[i] = (double) cardinalities[i] / total; } return distribution; } /** * Returns the distribution of the given sensitive attribute in the original dataset. Required for t-closeness. * * @param attribute * @return distribution */ public double[] getDistribution(String attribute) { if (!indexesSensitive.containsKey(attribute)) { throw new IllegalArgumentException("Attribute " + attribute + " is not sensitive"); } final int index = indexesSensitive.get(attribute); final int distinctValues = dataAnalyzed.getDictionary().getMapping()[index].length; final int[][] data = dataAnalyzed.getArray(); return getDistribution(data, index, distinctValues); } /** * The original data header. * * @return */ public String[] getHeader() { return header; } /** * Returns the heights of the hierarchiesQI. * * @return */ public int[] getHierachiesHeights() { return hierarchiesHeights; } /** * Returns the generalization hierarchiesQI. * * @return the hierarchiesQI */ public GeneralizationHierarchy[] getHierarchies() { return hierarchiesGeneralized; } /** * Returns the maximum levels for the generalizaiton. * * @return the maximum level for each QI */ public int[] getHierarchiesMaxLevels() { return maxLevels; } /** * Returns the minimum levels for the generalizations. * * @return */ public int[] getHierarchiesMinLevels() { return minLevels; } /** * Returns the microaggregation functions. * * @return */ public DistributionAggregateFunction[] getMicroaggregationFunctions() { return microaggregationFunctions; } /** * Returns the header for the according buffer * @return */ public String[] getMicroaggregationHeader() { return microaggregationHeader; } /** * Returns the map for the according buffer * @return */ public int[] getMicroaggregationMap() { return microaggregationMap; } /** * Gets the number of attributes to which microaggregation will be applied * in dataAnalyzed. * * @return */ public int getMicroaggregationNumAttributes() { return microaggregationNumAttributes; } /** * Gets the start index of the attributes to which microaggregation will be * applied in dataAnalyzed. * * @return */ public int getMicroaggregationStartIndex() { return microaggregationStartIndex; } /** * Returns an instance of this data manager, that is projected onto the given rowset * @param rowset * @return */ public DataManager getSubsetInstance(RowSet rowset) { DistributionAggregateFunction[] microaggregationFunctions = new DistributionAggregateFunction[this.microaggregationFunctions.length]; for (int i = 0; i < this.microaggregationFunctions.length; i++) { microaggregationFunctions[i] = this.microaggregationFunctions[i].clone(); } return new DataManagerSubset(this, this.dataAnalyzed.getSubsetInstance(rowset), this.dataGeneralized.getSubsetInstance(rowset), this.dataStatic.getSubsetInstance(rowset), this.header, this.hierarchiesGeneralized, this.hierarchiesHeights, this.hierarchiesSensitive, this.indexesSensitive, this.maxLevels, microaggregationFunctions, this.microaggregationHeader, this.microaggregationMap, this.microaggregationNumAttributes, this.microaggregationStartIndex, this.minLevels); } /** * Returns a tree for the given attribute at the index within the given data array, using the given hierarchy. * The resulting tree can be used to calculate the earth mover's distance with hierarchical ground-distance. * @param data * @param index * @param hierarchy * @return tree */ public int[] getTree(int[][] data, int index, int[][] hierarchy) { final int totalElementsP = subset == null ? data.length : subsetSize; final int height = hierarchy[0].length - 1; final int numLeafs = hierarchy.length; // TODO: Size could be calculated?! final ArrayList<Integer> treeList = new ArrayList<Integer>(); treeList.add(totalElementsP); treeList.add(numLeafs); treeList.add(height); // Init all freq to 0 for (int i = 0; i < numLeafs; i++) { treeList.add(0); } // Count frequencies final int offsetLeafs = 3; for (int i = 0; i < data.length; i++) { if (subset == null || subset.contains(i)) { int previousFreq = treeList.get(data[i][index] + offsetLeafs); previousFreq++; treeList.set(data[i][index] + offsetLeafs, previousFreq); } } // Init extras for (int i = 0; i < numLeafs; i++) { treeList.add(-1); } // Temporary class for nodes class TNode { IntOpenHashSet children = new IntOpenHashSet(); int level = 0; int offset = 0; } final int offsetsExtras = offsetLeafs + numLeafs; final IntObjectOpenHashMap<TNode> nodes = new IntObjectOpenHashMap<TNode>(); final ArrayList<ArrayList<TNode>> levels = new ArrayList<ArrayList<TNode>>(); // Init levels for (int i = 0; i < hierarchy[0].length; i++) { levels.add(new ArrayList<TNode>()); } // Build nodes int offset = dataAnalyzed.getDictionary().getMapping()[index].length; for (int i = 0; i < hierarchy[0].length; i++) { for (int j = 0; j < hierarchy.length; j++) { final int nodeID = hierarchy[j][i] + i * offset; TNode curNode = null; if (!nodes.containsKey(nodeID)) { curNode = new TNode(); curNode.level = i; nodes.put(nodeID, curNode); final ArrayList<TNode> level = levels.get(curNode.level); level.add(curNode); } else { curNode = nodes.get(nodeID); } if (i > 0) { // first add child curNode.children.add(hierarchy[j][i - 1] + (i - 1) * offset); } } } // For all nodes for (final ArrayList<TNode> level : levels) { for (final TNode node : level) { if (node.level > 0) { // only inner nodes node.offset = treeList.size(); treeList.add(node.children.size()); treeList.add(node.level); final int[] keys = node.children.keys; final boolean[] allocated = node.children.allocated; for (int i = 0; i < allocated.length; i++) { if (allocated[i]) { treeList.add(node.level == 1 ? keys[i] + offsetsExtras : nodes.get(keys[i]).offset); } } treeList.add(0); // pos_e treeList.add(0); // neg_e } } } final int[] treeArray = new int[treeList.size()]; int count = 0; for (final int val : treeList) { treeArray[count++] = val; } return treeArray; } /** * Returns the tree for the given sensitive attribute, if a generalization hierarchy is associated. * The resulting tree can be used to calculate the earth mover's distance with hierarchical ground-distance. * * @param attribute * @return tree */ public int[] getTree(String attribute) { if (!hierarchiesSensitive.containsKey(attribute)) { throw new IllegalArgumentException("Attribute " + attribute + " is not sensitive"); } final int[][] data = dataAnalyzed.getArray(); final int index = indexesSensitive.get(attribute); return getTree(data, index, hierarchiesSensitive.get(attribute).map); } /** * Encodes the data. * * @param data * @param map * @param mapGeneralized * @param mapAnalyzed * @param mapStatic * @param dictionaryGeneralized * @param dictionaryAnalyzed * @param dictionaryStatic * @param headerGeneralized * @param headerAnalyzed * @param headerStatic * @return */ private Data[] encode(final int[][] data, final int[] map, final int[] mapGeneralized, final int[] mapAnalyzed, final int[] mapStatic, final Dictionary dictionaryGeneralized, final Dictionary dictionaryAnalyzed, final Dictionary dictionaryStatic, final String[] headerGeneralized, final String[] headerAnalyzed, final String[] headerStatic) { // Parse the dataset final int[][] valsGH = headerGeneralized.length == 0 ? null : new int[data.length][]; final int[][] valsDI = headerAnalyzed.length == 0 ? null : new int[data.length][]; final int[][] valsIS = headerStatic.length == 0 ? null : new int[data.length][]; int index = 0; for (final int[] tuple : data) { // Process a tuple final int[] tupleGH = headerGeneralized.length == 0 ? null : new int[headerGeneralized.length]; final int[] tupleDI = headerAnalyzed.length == 0 ? null : new int[headerAnalyzed.length]; final int[] tupleIS = headerStatic.length == 0 ? null : new int[headerStatic.length]; for (int i = 0; i < tuple.length; i++) { final int idx = i * 2; int aType = map[idx]; final int iPos = map[idx + 1]; switch (aType) { case AttributeTypeInternal.QUASI_IDENTIFYING_GENERALIZED: tupleGH[iPos] = tuple[i]; break; case AttributeTypeInternal.IDENTIFYING: // Ignore break; case AttributeTypeInternal.INSENSITIVE: tupleIS[iPos] = tuple[i]; break; case AttributeTypeInternal.QUASI_IDENTIFYING_MICROAGGREGATED: tupleDI[iPos] = tuple[i]; break; case AttributeTypeInternal.SENSITIVE: tupleDI[iPos] = tuple[i]; break; } } if (valsGH != null) valsGH[index] = tupleGH; if (valsIS != null) valsIS[index] = tupleIS; if (valsDI != null) valsDI[index] = tupleDI; index++; } // Build data object final Data[] result = { new Data(valsGH, headerGeneralized, mapGeneralized, dictionaryGeneralized), new Data(valsDI, headerAnalyzed, mapAnalyzed, dictionaryAnalyzed), new Data(valsIS, headerStatic, mapStatic, dictionaryStatic) }; return result; } }
/* * Copyright 2018 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.service; import com.thoughtworks.go.config.CaseInsensitiveString; import com.thoughtworks.go.config.materials.PackageMaterial; import com.thoughtworks.go.config.materials.PluggableSCMMaterial; import com.thoughtworks.go.config.materials.SubprocessExecutionContext; import com.thoughtworks.go.config.materials.dependency.DependencyMaterial; import com.thoughtworks.go.config.materials.git.GitMaterial; import com.thoughtworks.go.config.materials.git.GitMaterialConfig; import com.thoughtworks.go.config.materials.mercurial.HgMaterial; import com.thoughtworks.go.config.materials.perforce.P4Material; import com.thoughtworks.go.config.materials.svn.SvnMaterial; import com.thoughtworks.go.config.materials.tfs.TfsMaterial; import com.thoughtworks.go.domain.MaterialInstance; import com.thoughtworks.go.domain.MaterialRevision; import com.thoughtworks.go.domain.MaterialRevisions; import com.thoughtworks.go.domain.config.Configuration; import com.thoughtworks.go.domain.materials.*; import com.thoughtworks.go.domain.materials.git.GitMaterialInstance; import com.thoughtworks.go.domain.materials.packagematerial.PackageMaterialRevision; import com.thoughtworks.go.domain.materials.scm.PluggableSCMMaterialRevision; import com.thoughtworks.go.domain.packagerepository.PackageDefinition; import com.thoughtworks.go.domain.packagerepository.PackageRepositoryMother; import com.thoughtworks.go.helper.MaterialsMother; import com.thoughtworks.go.i18n.LocalizedMessage; import com.thoughtworks.go.plugin.access.packagematerial.PackageRepositoryExtension; import com.thoughtworks.go.plugin.access.scm.SCMExtension; import com.thoughtworks.go.plugin.access.scm.SCMPropertyConfiguration; import com.thoughtworks.go.plugin.access.scm.material.MaterialPollResult; import com.thoughtworks.go.plugin.access.scm.revision.SCMRevision; import com.thoughtworks.go.plugin.api.material.packagerepository.PackageConfiguration; import com.thoughtworks.go.plugin.api.material.packagerepository.PackageRevision; import com.thoughtworks.go.plugin.api.material.packagerepository.RepositoryConfiguration; import com.thoughtworks.go.security.GoCipher; import com.thoughtworks.go.server.domain.Username; import com.thoughtworks.go.server.persistence.MaterialRepository; import com.thoughtworks.go.server.service.result.LocalizedOperationResult; import com.thoughtworks.go.server.transaction.TransactionTemplate; import com.thoughtworks.go.server.util.Pagination; import com.thoughtworks.go.serverhealth.HealthStateScope; import com.thoughtworks.go.serverhealth.HealthStateType; import org.joda.time.DateTime; import org.junit.Before; import org.junit.Test; import org.junit.experimental.theories.DataPoint; import org.junit.experimental.theories.Theories; import org.junit.experimental.theories.Theory; import org.junit.runner.RunWith; import org.mockito.Mock; import java.io.File; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import static com.thoughtworks.go.domain.packagerepository.PackageDefinitionMother.create; import static java.util.Arrays.asList; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.*; import static org.mockito.MockitoAnnotations.initMocks; @RunWith(Theories.class) public class MaterialServiceTest { private static List MODIFICATIONS = new ArrayList<Modification>(); @Mock private MaterialRepository materialRepository; @Mock private GoConfigService goConfigService; @Mock private SecurityService securityService; @Mock private PackageRepositoryExtension packageRepositoryExtension; @Mock private SCMExtension scmExtension; @Mock private TransactionTemplate transactionTemplate; private MaterialService materialService; @Before public void setUp() { initMocks(this); materialService = new MaterialService(materialRepository, goConfigService, securityService, packageRepositoryExtension, scmExtension, transactionTemplate); } @Test public void shouldUnderstandIfMaterialHasModifications() { assertHasModifcation(new MaterialRevisions(new MaterialRevision(new HgMaterial("foo.com", null), new Modification(new Date(), "2", "MOCK_LABEL-12", null))), true); assertHasModifcation(new MaterialRevisions(), false); } @Test public void shouldNotBeAuthorizedToViewAPipeline() { Username pavan = Username.valueOf("pavan"); when(securityService.hasViewPermissionForPipeline(pavan, "pipeline")).thenReturn(false); LocalizedOperationResult operationResult = mock(LocalizedOperationResult.class); materialService.searchRevisions("pipeline", "sha", "search-string", pavan, operationResult); verify(operationResult).forbidden(LocalizedMessage.forbiddenToViewPipeline("pipeline"), HealthStateType.general(HealthStateScope.forPipeline("pipeline"))); } @Test public void shouldReturnTheRevisionsThatMatchTheGivenSearchString() { Username pavan = Username.valueOf("pavan"); when(securityService.hasViewPermissionForPipeline(pavan, "pipeline")).thenReturn(true); LocalizedOperationResult operationResult = mock(LocalizedOperationResult.class); MaterialConfig materialConfig = mock(MaterialConfig.class); when(goConfigService.materialForPipelineWithFingerprint("pipeline", "sha")).thenReturn(materialConfig); List<MatchedRevision> expected = asList(new MatchedRevision("23", "revision", "revision", "user", new DateTime(2009, 10, 10, 12, 0, 0, 0).toDate(), "comment")); when(materialRepository.findRevisionsMatching(materialConfig, "23")).thenReturn(expected); assertThat(materialService.searchRevisions("pipeline", "sha", "23", pavan, operationResult), is(expected)); } @Test public void shouldReturnNotFoundIfTheMaterialDoesNotBelongToTheGivenPipeline() { Username pavan = Username.valueOf("pavan"); when(securityService.hasViewPermissionForPipeline(pavan, "pipeline")).thenReturn(true); LocalizedOperationResult operationResult = mock(LocalizedOperationResult.class); when(goConfigService.materialForPipelineWithFingerprint("pipeline", "sha")).thenThrow(new RuntimeException("Not found")); materialService.searchRevisions("pipeline", "sha", "23", pavan, operationResult); verify(operationResult).notFound("Pipeline '" + "pipeline" + "' does not contain material with fingerprint '" + "sha" + "'.", HealthStateType.general(HealthStateScope.forPipeline("pipeline"))); } @DataPoint public static RequestDataPoints GIT_LATEST_MODIFICATIONS = new RequestDataPoints(new GitMaterial("url") { @Override public List<Modification> latestModification(File baseDir, SubprocessExecutionContext execCtx) { return (List<Modification>) MODIFICATIONS; } @Override public GitMaterial withShallowClone(boolean value) { return this; } @Override public List<Modification> modificationsSince(File baseDir, Revision revision, SubprocessExecutionContext execCtx) { return (List<Modification>) MODIFICATIONS; } }, GitMaterial.class); @DataPoint public static RequestDataPoints SVN_LATEST_MODIFICATIONS = new RequestDataPoints(new SvnMaterial("url", "username", "password", true) { @Override public List<Modification> latestModification(File baseDir, SubprocessExecutionContext execCtx) { return (List<Modification>) MODIFICATIONS; } @Override public List<Modification> modificationsSince(File baseDir, Revision revision, SubprocessExecutionContext execCtx) { return (List<Modification>) MODIFICATIONS; } }, SvnMaterial.class); @DataPoint public static RequestDataPoints HG_LATEST_MODIFICATIONS = new RequestDataPoints(new HgMaterial("url", null) { @Override public List<Modification> latestModification(File baseDir, SubprocessExecutionContext execCtx) { return (List<Modification>) MODIFICATIONS; } @Override public List<Modification> modificationsSince(File baseDir, Revision revision, SubprocessExecutionContext execCtx) { return (List<Modification>) MODIFICATIONS; } }, HgMaterial.class); @DataPoint public static RequestDataPoints TFS_LATEST_MODIFICATIONS = new RequestDataPoints(new TfsMaterial(mock(GoCipher.class)) { @Override public List<Modification> latestModification(File baseDir, SubprocessExecutionContext execCtx) { return (List<Modification>) MODIFICATIONS; } @Override public List<Modification> modificationsSince(File baseDir, Revision revision, SubprocessExecutionContext execCtx) { return (List<Modification>) MODIFICATIONS; } }, TfsMaterial.class); @DataPoint public static RequestDataPoints P4_LATEST_MODIFICATIONS = new RequestDataPoints(new P4Material("url", "view", "user") { @Override public List<Modification> latestModification(File baseDir, SubprocessExecutionContext execCtx) { return (List<Modification>) MODIFICATIONS; } @Override public List<Modification> modificationsSince(File baseDir, Revision revision, SubprocessExecutionContext execCtx) { return (List<Modification>) MODIFICATIONS; } }, P4Material.class); @DataPoint public static RequestDataPoints DEPENDENCY_LATEST_MODIFICATIONS = new RequestDataPoints(new DependencyMaterial(new CaseInsensitiveString("p1"), new CaseInsensitiveString("s1")) { @Override public List<Modification> latestModification(File baseDir, SubprocessExecutionContext execCtx) { return (List<Modification>) MODIFICATIONS; } @Override public List<Modification> modificationsSince(File baseDir, Revision revision, SubprocessExecutionContext execCtx) { return (List<Modification>) MODIFICATIONS; } }, DependencyMaterial.class); @Theory public void shouldGetLatestModificationsForGivenMaterial(RequestDataPoints data) { MaterialService spy = spy(materialService); doReturn(data.klass).when(spy).getMaterialClass(data.material); List<Modification> actual = spy.latestModification(data.material, null, null); assertThat(actual, is(MODIFICATIONS)); } @Theory public void shouldGetModificationsSinceARevisionForGivenMaterial(RequestDataPoints data) { Revision revision = mock(Revision.class); MaterialService spy = spy(materialService); doReturn(data.klass).when(spy).getMaterialClass(data.material); List<Modification> actual = spy.modificationsSince(data.material, null, revision, null); assertThat(actual, is(MODIFICATIONS)); } @Test public void shouldThrowExceptionWhenPollerForMaterialNotFound() { try { materialService.latestModification(mock(Material.class), null, null); fail("Should have thrown up"); } catch (RuntimeException e) { assertThat(e.getMessage(), is("unknown material type null")); } } @Test public void shouldGetLatestModificationForPackageMaterial() { PackageMaterial material = new PackageMaterial(); PackageDefinition packageDefinition = create("id", "package", new Configuration(), PackageRepositoryMother.create("id", "name", "plugin-id", "plugin-version", new Configuration())); material.setPackageDefinition(packageDefinition); when(packageRepositoryExtension.getLatestRevision(eq("plugin-id"), any(PackageConfiguration.class), any(RepositoryConfiguration.class))).thenReturn(new PackageRevision("blah-123", new Date(), "user")); List<Modification> modifications = materialService.latestModification(material, null, null); assertThat(modifications.get(0).getRevision(), is("blah-123")); } @Test public void shouldGetModificationSinceAGivenRevision() { PackageMaterial material = new PackageMaterial(); PackageDefinition packageDefinition = create("id", "package", new Configuration(), PackageRepositoryMother.create("id", "name", "plugin-id", "plugin-version", new Configuration())); material.setPackageDefinition(packageDefinition); when(packageRepositoryExtension.latestModificationSince(eq("plugin-id"), any(PackageConfiguration.class), any(RepositoryConfiguration.class), any(PackageRevision.class))).thenReturn(new PackageRevision("new-revision-456", new Date(), "user")); List<Modification> modifications = materialService.modificationsSince(material, null, new PackageMaterialRevision("revision-124", new Date()), null); assertThat(modifications.get(0).getRevision(), is("new-revision-456")); } @Test public void shouldGetLatestModification_PluggableSCMMaterial() { PluggableSCMMaterial pluggableSCMMaterial = MaterialsMother.pluggableSCMMaterial(); MaterialInstance materialInstance = pluggableSCMMaterial.createMaterialInstance(); when(materialRepository.findMaterialInstance(any(Material.class))).thenReturn(materialInstance); MaterialPollResult materialPollResult = new MaterialPollResult(null, new SCMRevision("blah-123", new Date(), "user", "comment", null, null)); when(scmExtension.getLatestRevision(any(String.class), any(SCMPropertyConfiguration.class), any(Map.class), any(String.class))).thenReturn(materialPollResult); List<Modification> modifications = materialService.latestModification(pluggableSCMMaterial, new File("/tmp/flyweight"), null); assertThat(modifications.get(0).getRevision(), is("blah-123")); } @Test public void shouldGetModificationSince_PluggableSCMMaterial() { PluggableSCMMaterial pluggableSCMMaterial = MaterialsMother.pluggableSCMMaterial(); MaterialInstance materialInstance = pluggableSCMMaterial.createMaterialInstance(); when(materialRepository.findMaterialInstance(any(Material.class))).thenReturn(materialInstance); MaterialPollResult materialPollResult = new MaterialPollResult(null, asList(new SCMRevision("new-revision-456", new Date(), "user", "comment", null, null))); when(scmExtension.latestModificationSince(any(String.class), any(SCMPropertyConfiguration.class), any(Map.class), any(String.class), any(SCMRevision.class))).thenReturn(materialPollResult); PluggableSCMMaterialRevision previouslyKnownRevision = new PluggableSCMMaterialRevision("revision-124", new Date()); List<Modification> modifications = materialService.modificationsSince(pluggableSCMMaterial, new File("/tmp/flyweight"), previouslyKnownRevision, null); assertThat(modifications.get(0).getRevision(), is("new-revision-456")); } @Test public void shouldDelegateToMaterialRepository_getTotalModificationsFor() { GitMaterialConfig materialConfig = new GitMaterialConfig("http://test.com"); GitMaterialInstance gitMaterialInstance = new GitMaterialInstance("http://test.com", null, null, "flyweight"); when(materialRepository.findMaterialInstance(materialConfig)).thenReturn(gitMaterialInstance); when(materialRepository.getTotalModificationsFor(gitMaterialInstance)).thenReturn(1L); Long totalCount = materialService.getTotalModificationsFor(materialConfig); assertThat(totalCount, is(1L)); } @Test public void shouldDelegateToMaterialRepository_getModificationsFor() { GitMaterialConfig materialConfig = new GitMaterialConfig("http://test.com"); GitMaterialInstance gitMaterialInstance = new GitMaterialInstance("http://test.com", null, null, "flyweight"); Pagination pagination = Pagination.pageStartingAt(0, 10, 10); Modifications modifications = new Modifications(); modifications.add(new Modification("user", "comment", "email", new Date(), "revision")); when(materialRepository.findMaterialInstance(materialConfig)).thenReturn(gitMaterialInstance); when(materialRepository.getModificationsFor(gitMaterialInstance, pagination)).thenReturn(modifications); Modifications gotModifications = materialService.getModificationsFor(materialConfig, pagination); assertThat(gotModifications, is(modifications)); } private void assertHasModifcation(MaterialRevisions materialRevisions, boolean b) { HgMaterial hgMaterial = new HgMaterial("foo.com", null); when(materialRepository.findLatestModification(hgMaterial)).thenReturn(materialRevisions); assertThat(materialService.hasModificationFor(hgMaterial), is(b)); } private static class RequestDataPoints<T extends Material> { final T material; final Class klass; public RequestDataPoints(T material, Class klass) { this.material = material; this.klass = klass; } } }
/** Notice of modification as required by the LGPL * This file was modified by Gemstone Systems Inc. on * $Date$ **/ // $Id: DEADLOCK.java,v 1.7 2005/08/08 12:45:42 belaban Exp $ package com.gemstone.org.jgroups.protocols; import com.gemstone.org.jgroups.Address; import com.gemstone.org.jgroups.Event; import com.gemstone.org.jgroups.View; import com.gemstone.org.jgroups.blocks.GroupRequest; import com.gemstone.org.jgroups.blocks.MethodCall; import com.gemstone.org.jgroups.stack.RpcProtocol; import com.gemstone.org.jgroups.util.ExternalStrings; import com.gemstone.org.jgroups.util.RspList; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.Vector; class MyFrame extends Frame { private static final long serialVersionUID = -5091554906224946278L; final List list=new List(); final Label result=new Label("Result: "); final Button send=new Button("Send Request"); final Button quit=new Button("Quit"); final Panel button_panel=new Panel(); final Panel main_panel=new Panel(); DEADLOCK deadlock=null; Vector members=null; MyFrame(String title, DEADLOCK deadlock) { this.deadlock=deadlock; setSize(300, 200); setTitle(title); setBackground(Color.white); setFont(new Font("Helvetica", Font.PLAIN, 12)); setLayout(new BorderLayout()); main_panel.setLayout(new GridLayout(0, 2)); main_panel.add(result); main_panel.add(list); button_panel.add(send); button_panel.add(quit); add("Center", main_panel); add("South", button_panel); addEventHandlers(); } void addEventHandlers() { quit.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) {dispose();} }); send.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { Address dest; int res; int index=-1; index=list.getSelectedIndex(); if(index == -1) return; dest=members != null ? (Address)members.elementAt(index) : null; if(dest != null) { res=deadlock.sendRequest(dest); setResult(res); } } }); } void setResult(int res) { result.setText("Result: " + res); } void setMembers(Vector members) { list.removeAll(); for(int i=0; i < members.size(); i++) list.add(members.elementAt(i).toString()); this.members=members; } } /** Tests the deadlock detection mechanism of RequestCorrelator. */ public class DEADLOCK extends RpcProtocol { MyFrame frame=null; @Override // GemStoneAddition public String getName() {return "DEADLOCK";} @Override // GemStoneAddition public void start() throws Exception { super.start(); if(_corr != null) _corr.setDeadlockDetection(true); else log.error(ExternalStrings.DEADLOCK_CANNOT_SET_DEADLOCK_DETECTION_IN_CORR_AS_IT_IS_NULL_); frame=new MyFrame(getName(), this); frame.show(); } @Override // GemStoneAddition public void stop() { super.stop(); if(frame != null) { frame.dispose(); frame=null; } } public int sendRequest(Address dest) { Object retval; try { System.out.println("--> getCombinedResults() to " + dest); retval=callRemoteMethod(dest, "getCombinedResults", GroupRequest.GET_FIRST, 0); } catch(Exception e) { return -1; } if(retval != null && retval instanceof Integer) return ((Integer)retval).intValue(); return -1; } /* ------------------------- Request handler methods ----------------------------- */ /** Mcasts getResult() to all members (including itself). Returns the sum of all results. */ public int getCombinedResults() { RspList rsp_list; Vector results; int retval=0; System.out.println("<-- getCombinedResults()"); System.out.println("--> getResult() to " + members); MethodCall call = new MethodCall("getResult", new Object[] {}, new String[] {}); rsp_list=callRemoteMethods(members, call, GroupRequest.GET_ALL, 0); results=rsp_list.getResults(); for(int i=0; i < results.size(); i++) retval+=((Integer)results.elementAt(i)).intValue(); return retval; } /** Returns a random integer value between 1 and 10 */ public static int getResult() { System.out.println("<-- getResult()"); return (int)((Math.random() * 10) % 10) + 1; } /* --------------------- End of Request handler methods -------------------------- */ /** * <b>Callback</b>. Called by superclass when event may be handled.<p> * <b>Do not use <code>PassUp</code> in this method as the event is passed up * by default by the superclass after this method returns !</b> * @return boolean Defaults to true. If false, event will not be passed up the stack. */ @Override // GemStoneAddition public boolean handleUpEvent(Event evt) { switch(evt.getType()) { case Event.TMP_VIEW: case Event.VIEW_CHANGE: Vector new_members=((View)evt.getArg()).getMembers(); synchronized(members) { members.removeAllElements(); if(new_members != null && new_members.size() > 0) for(int i=0; i < new_members.size(); i++) members.addElement(new_members.elementAt(i)); } frame.setMembers(members); break; case Event.SET_LOCAL_ADDRESS: frame.setTitle(frame.getTitle() + ": " + evt.getArg().toString()); break; } return true; } /** <b>Callback</b>. Called by superclass when event may be handled.<p> <b>Do not use <code>PassDown</code> in this method as the event is passed down by default by the superclass after this method returns !</b> @return boolean Defaults to true. If false, event will not be passed down the stack. */ @Override // GemStoneAddition public boolean handleDownEvent(Event evt) { switch(evt.getType()) { case Event.TMP_VIEW: case Event.VIEW_CHANGE: Vector new_members=((View)evt.getArg()).getMembers(); synchronized(members) { members.removeAllElements(); if(new_members != null && new_members.size() > 0) for(int i=0; i < new_members.size(); i++) members.addElement(new_members.elementAt(i)); } System.out.println("Setting members"); frame.setMembers(members); System.out.println("done"); break; } return true; } }
package com.cleeng.api; import com.cleeng.api.domain.*; import com.cleeng.api.domain.async.AsyncRequest; import java.io.IOException; import java.util.List; /** * Cleeng API. Set of methods to interact with Cleeng platform. * */ public interface Cleeng { HttpClient getClient(); /** * Creates a subscription offer.<br/> * <br/> * * @param offerData domain object representing offer data * @return */ OfferResponse createSubscriptionOffer(SubscriptionOfferData offerData) throws IOException; /** * Creates subscription offers (async) * <br/> * * @param requests * @throws IOException * @throws InterruptedException */ void createSubscriptionOfferAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Updates a subscription offer.<br/> * <br/> * * @param offerData domain object representing offer data * @param offerId offer id * @return */ OfferResponse updateSubscriptionOffer(SubscriptionOfferData offerData, String offerId) throws IOException; /** * Updates subscription offers (async) * <br/> * * @param requests * @throws IOException * @throws InterruptedException */ void updateSubscriptionOfferAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Creates a single offer.<br/> * <br/> * * @param offerData domain object representing single offer data * @return */ SingleOfferResponse createSingleOffer(SingleOfferData offerData) throws IOException; /** * Creates single offers (async) * <br/> * * @param requests * @throws IOException * @throws InterruptedException */ void createSingleOfferAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Updates a single offer.<br/> * <br/> * * @param offerData domain object representing single offer data * @return */ SingleOfferResponse updateSingleOffer(String offerId, SingleOfferData offerData) throws IOException; /** * Updates single offers (async) * <br/> * * @param requests * @throws IOException * @throws InterruptedException */ void updateSingleOfferAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Creates an event offer.<br/> * <br/> * * @param offerData domain object representing event offer data * @return */ EventOfferResponse createEventOffer(EventOfferData offerData) throws IOException; /** * Creates an event offers (async).<br/> * <br/> * * @param requests * @throws IOException * @throws InterruptedException */ void createEventOfferAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Updates an event offer.<br/> * <br/> * * @param offerData domain object representing event offer data * @param offerId offer Id * @return */ EventOfferResponse updateEventOffer(EventOfferData offerData, String offerId) throws IOException; /** * Updates event offers (async) * <br/> * * @param requests * @throws IOException * @throws InterruptedException */ void updateEventOfferAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Creates a rental offer.<br/> * <br/> * * @param offerData domain object representing rental offer data * @return */ RentalOfferResponse createRentalOffer(RentalOfferData offerData) throws IOException; /** * Creates rental offers (async).<br/> * <br/> * * @param requests * @throws IOException * @throws InterruptedException */ void createRentalOfferAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Updates a rental offer.<br/> * <br/> * * @param offerData domain object representing rental offer data * @param offerId offer Id * @return */ RentalOfferResponse updateRentalOffer(RentalOfferData offerData, String offerId) throws IOException; /** * Updates rental offers (async) * <br/> * * @param requests * @throws IOException * @throws InterruptedException */ void updateRentalOfferAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Creates a pass offer.<br/> * <br/> * * @param offerData domain object representing pass offer data * @return */ PassOfferResponse createPassOffer(PassOfferData offerData) throws IOException; /** * Updates a pass offer.<br/> * <br/> * * @param offerData domain object representing pass offer data * @param offerId offer Id * @return */ public OfferResponse updatePassOffer(PassOfferData offerData, String offerId) throws IOException; /** * Updates pass offers (async) * <br/> * * @param requests * @throws IOException * @throws InterruptedException */ void updatePassOfferAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Creates pass offers (async).<br/> * <br/> * * @param requests * @throws IOException * @throws InterruptedException */ void createPassOfferAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Lists subscription offers.<br/> * <br/> * * @param criteria domain object representing search criteria * @param offset pagination offset * @param limit pagination's items per page * @return */ ListSubscriptionOffersResponse listSubscriptionOffers(Criteria criteria, int offset, int limit) throws IOException; /** * Lists subscription offers (async).<br/> * <br/> * * @param requests collection of AsyncListRequest objects * @throws IOException * @throws InterruptedException */ void listSubscriptionOffersAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Lists single offers.<br/> * <br/> * * @param criteria domain object representing search criteria * @param offset pagination offset * @param limit pagination's items per page * @return */ ListSingleOffersResponse listSingleOffers(Criteria criteria, int offset, int limit) throws IOException; /** * Lists single offers (async).<br/> * <br/> * * @param requests collection of AsyncListRequest objects * @throws IOException * @throws InterruptedException */ void listSingleOffersAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Lists vod offers.<br/> * <br/> * * @param criteria domain object representing search criteria * @param offset pagination offset * @param limit pagination's items per page * @return */ ListVodOffersResponse listVodOffers(Criteria criteria, int offset, int limit) throws IOException; /** * Lists vod offers (async).<br/> * <br/> * * @param requests collection of AsyncListRequest objects * @throws IOException * @throws InterruptedException */ void listVodOffersAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Lists pass offers.<br/> * <br/> * * @param criteria domain object representing search criteria * @param offset pagination offset * @param limit pagination's limit * @return */ ListPassOffersResponse listPassOffers(Criteria criteria, int offset, int limit) throws IOException; /** * Lists pass offers (async).<br/> * <br/> * * @param requests collection of AsyncListRequest objects * @throws IOException * @throws InterruptedException */ void listPassOffersAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Prepares a remote auth.<br/> * <br/> * * @param customerData domain object representing customer's data * @param flowDescription domain object representing flow description * @return */ PrepareRemoteAuthResponse prepareRemoteAuth(CustomerData customerData, FlowDescription flowDescription) throws IOException; /** * Prepares a remote auth (async).<br/> * <br/> * * @param requests collection of AsyncPrepareRemoteAuthRequest objects * @throws IOException * @throws InterruptedException */ void prepareRemoteAuthAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Generates a customer token.<br/> * <br/> * * @param customerEmail customer's email * @return */ TokenResponse generateCustomerToken(String customerEmail) throws IOException; /** * Generates a customer token (async)<br/> * <br/> * * @param requests collection of AsyncTokenRequest objects * @return */ void generateCustomerTokenAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Requests user's password reset.<br/> * <br/> * * @param customerEmail customer's email * @return */ BooleanResponse requestPasswordReset(String customerEmail) throws IOException; /** * Requests user's password reset (async).<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ void requestPasswordResetAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Updates user's password.<br/> * <br/> * * @param customerEmail customer's email * @param resetPasswordToken token received on email * @param newPassword new password * @return */ BooleanResponse updateCustomerPassword(String customerEmail, String resetPasswordToken, String newPassword) throws IOException; /** * Updates customer password (async).<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ public void updateCustomerPasswordAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Updates customer's subscription.<br/> * <br/> * * @param offerId offer Id * @param customerEmail customer email * @param offerData offer data * @return */ UpdateCustomerSubscriptionResponse updateCustomerSubscription(String offerId, String customerEmail, UpdateCustomerSubscriptionOfferData offerData) throws IOException; /** * Updates customer's subscription (async).<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ public void updateCustomerSubscriptionAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Updates user's email.<br/> * <br/> * * @param customerEmail customer's email * @param newEmail new email * @return */ BooleanResponse updateCustomerEmail(String customerEmail, String newEmail) throws IOException; /** * Updates customer email (async).<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ public void updateCustomerEmailAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Generates a customer token from facebook.<br/> * <br/> * * @param facebookId facebook login * @return */ TokenResponse generateCustomerTokenFromFacebook(String facebookId) throws IOException; /** * Generates a customer token from facebook (async)<br/> * <br/> * * @param requests collection of AsyncTokenRequest objects * @return */ void generateCustomerTokenFromFacebookAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Generates a customer token from password.<br/> * <br/> * * @param password password * @return */ TokenResponse generateCustomerTokenFromPassword(String password, String customerEmail) throws IOException; /** * Generates a customer token from password (async)<br/> * <br/> * * @param requests collection of AsyncTokenRequest objects * @return */ void generateCustomerTokenFromPasswordAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Gets access status.<br/> * <br/> * * @param customerToken customer token * @param offerId offer Id * @param ipAddress IP address * @return */ GetAccessStatusResponse getAccessStatus(String customerToken, String offerId, String ipAddress) throws IOException; /** * Gets access status (async)<br/> * <br/> * * @param requests collection of AsyncGetAccessStatusRequest objects * @return */ public void getAccessStatusAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Get accessible tags.<br/> * <br/> * * @param publisherToken publisher's token * @param customerToken customer's token * @return */ GetAccessibleTagsResponse getAccessibleTags(String publisherToken, String customerToken) throws IOException; /** * Gets accessible tags (async)<br/> * <br/> * * @param requests collection of AsyncGetAccessibleTagsRequest objects * @return */ void getAccessibleTagsAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Get customer<br/> * <br/> * * @param customerToken customer's token * @return */ GetCustomerResponse getCustomer(String customerToken) throws IOException; /** * Get customer (async)<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ void getCustomerAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Creates Vod offer<br/> * <br/> * * @param offerData vod offer data * @return */ VodOfferResponse createVodOffer(VodOfferData offerData) throws IOException; /** * Creates Vod offer (async)<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ void createVodOfferAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Gets Vod offer<br/> * <br/> * * @param offerId an id of an offer * @return */ public VodOfferResponse getVodOffer(String offerId) throws IOException; /** * Gets Vod offer (async)<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ public void getVodOfferAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Updates Vod offer<br/> * <br/> * * @param offerId offer id * @param offerData vod offer data * @return */ public VodOfferResponse updateVodOffer(String offerId, VodOfferData offerData) throws IOException; /** * Updates Vod offer(s) (async)<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ public void updateVodOfferAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Generates a checkout url * * @param customerEmail vod offer data * * @return * @throws IOException */ public UrlResponse generateCheckoutUrl(String customerEmail, FlowDescription flowDescription) throws IOException; /** * Generates checkout urls (async)<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ public void generateCheckoutUrlAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Registers a customer * * @param data customer data * * @return * @throws IOException */ public TokenResponse registerCustomer(CustomerData data) throws IOException; /** * Registers customers (async)<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ public void registerCustomerAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Generates my account url<br/> * <br/> * * @param customerEmail customer email * @param modules lis of modules * @return */ UrlResponse generateMyAccountUrl(String customerEmail, List<String> modules) throws IOException; /** * Generates my account url (async)<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ void generateMyAccountUrlAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Lists payment details<br/> * <br/> * * @param userEmail user's email * @return */ PaymentDetailsResponse listPaymentDetails(String userEmail) throws IOException; /** * Lists payment details (async)<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ void listPaymentDetailsAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Deletes payment details<br/> * <br/> * * @param paymentDetailsId Id of payment details * @return */ BooleanResponse deletePaymentDetails(String paymentDetailsId) throws IOException; /** * Deletes payment details (async)<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ void deletePaymentDetailsAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Checks if trial is allowd<br/> * <br/> * * @param customerEmail customer's email * @param offerId offer Id * @return */ public BooleanResponse isTrialAllowed(String customerEmail, String offerId) throws IOException; /** * Checks if trial is allowd (async)<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ public void isTrialAllowedAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Gets offerIds by video id<br/> * <br/> * * @param videoId video Id * @return */ ListOfferIdsByVideoIdResponse listOfferIdsByVideoId(String videoId) throws IOException; /** * Gets offerIds by video id (async)<br/> * <br/> *[ * @param requests collection of AsyncRequest objects * @return */ void listOfferIdsByVideoIdAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Lists customer subscriptions<br/> * <br/> * * @param customerEmail customer's email * @param offset pagination offset * @patam limit pagination limit * @return */ ListCustomerSubscriptionsResponse listCustomerSubscriptions(String customerEmail, int offset, int limit) throws IOException; /** * Gets offerIds by video id (async)<br/> * <br/> *[ * @param requests collection of AsyncRequest objects * @return */ void listCustomerSubscriptionsAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Checks whether access to offer is granted<br/> * <br/> * * @param customerToken customer token * @param offerId offer id * @param deviceId device id * @param deviceType device type * @return */ GetAccessStatusForDeviceResponse getAccessStatusForDevice(String customerToken, String offerId, String deviceId, String deviceType) throws IOException; /** * Checks whether access to offer is granted<br/> * <br/> * * @param requests collection of AsyncRequest objects * @return */ void getAccessStatusForDeviceAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Invokes a batch request (async)<br/> * <br/> * * @param batch object containing a collection of individual requests * @return */ void invokeBatchAsync(BatchAsyncRequest batch) throws IOException, InterruptedException; /** * Invokes a batch request<br/> * <br/> * * @param batch object containing a collection of individual requests * @return */ BatchResponse invokeBatch(BatchRequest batch) throws IOException; /** * Updates Cleeng Capture data for given broadcaster and customer<br/> * <br/> * * @param data object containing personal data * @return */ BooleanResponse updateBroadcasterSpecificPersonalDataWithCaptureAnswers(Integer userId, PersonalData data) throws IOException; /** * Updates Cleeng Capture data for given broadcaster and customer in an asynchronous way * * @param requests * @throws IOException * @throws InterruptedException */ void updateBroadcasterSpecificPersonalDataWithCaptureAnswersAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * fetches customer data with additional broadcaster specific information<br/> * <br/> * * @param userId user's Id * @return */ PersonalDataResponse fetchBroadcasterSpecificPersonalDataWithCaptureAnswers(Integer userId) throws IOException; /** * Fetches customer data with additional broadcaster specific information in an asynchronous way * * @param requests * @throws IOException * @throws InterruptedException */ void fetchBroadcasterSpecificPersonalDataWithCaptureAnswersAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Saves capture questions * * @param questions a list of questions * @return * @throws IOException */ BooleanResponse saveCaptureQuestions(List<Question> questions) throws IOException; /** * Saves capture questions in an asynchronous way * * @param requests * @throws IOException * @throws InterruptedException */ void saveCaptureQuestionsAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; /** * Fetches capture questions * * @return * @throws IOException */ CaptureQuestionResponse fetchCaptureQuestions() throws IOException; /** * Fetches capture questions in an asynchronous way * * @throws IOException * @throws InterruptedException */ void fetchCaptureQuestionsAsync(List<AsyncRequest> requests) throws IOException, InterruptedException; }
package org.spongycastle.crypto.prng.drbg; import java.util.Hashtable; import org.spongycastle.crypto.Digest; import org.spongycastle.crypto.prng.EntropySource; import org.spongycastle.util.Arrays; import org.spongycastle.util.Integers; /** * A SP800-90A Hash DRBG. */ public class HashSP800DRBG implements SP80090DRBG { private final static byte[] ONE = { 0x01 }; private final static long RESEED_MAX = 1L << (48 - 1); private final static int MAX_BITS_REQUEST = 1 << (19 - 1); private final static Hashtable seedlens = new Hashtable(); static { seedlens.put("SHA-1", Integers.valueOf(440)); seedlens.put("SHA-224", Integers.valueOf(440)); seedlens.put("SHA-256", Integers.valueOf(440)); seedlens.put("SHA-512/256", Integers.valueOf(440)); seedlens.put("SHA-512/224", Integers.valueOf(440)); seedlens.put("SHA-384", Integers.valueOf(888)); seedlens.put("SHA-512", Integers.valueOf(888)); } private Digest _digest; private byte[] _V; private byte[] _C; private long _reseedCounter; private EntropySource _entropySource; private int _securityStrength; private int _seedLength; /** * Construct a SP800-90A Hash DRBG. * <p> * Minimum entropy requirement is the security strength requested. * </p> * @param digest source digest to use for DRB stream. * @param securityStrength security strength required (in bits) * @param entropySource source of entropy to use for seeding/reseeding. * @param personalizationString personalization string to distinguish this DRBG (may be null). * @param nonce nonce to further distinguish this DRBG (may be null). */ public HashSP800DRBG(Digest digest, int securityStrength, EntropySource entropySource, byte[] personalizationString, byte[] nonce) { if (securityStrength > Utils.getMaxSecurityStrength(digest)) { throw new IllegalArgumentException("Requested security strength is not supported by the derivation function"); } if (entropySource.entropySize() < securityStrength) { throw new IllegalArgumentException("Not enough entropy for security strength required"); } _digest = digest; _entropySource = entropySource; _securityStrength = securityStrength; _seedLength = ((Integer)seedlens.get(digest.getAlgorithmName())).intValue(); // 1. seed_material = entropy_input || nonce || personalization_string. // 2. seed = Hash_df (seed_material, seedlen). // 3. V = seed. // 4. C = Hash_df ((0x00 || V), seedlen). Comment: Preceed V with a byte // of zeros. // 5. reseed_counter = 1. // 6. Return V, C, and reseed_counter as the initial_working_state byte[] entropy = getEntropy(); byte[] seedMaterial = Arrays.concatenate(entropy, nonce, personalizationString); byte[] seed = Utils.hash_df(_digest, seedMaterial, _seedLength); _V = seed; byte[] subV = new byte[_V.length + 1]; System.arraycopy(_V, 0, subV, 1, _V.length); _C = Utils.hash_df(_digest, subV, _seedLength); _reseedCounter = 1; } /** * Return the block size (in bits) of the DRBG. * * @return the number of bits produced on each internal round of the DRBG. */ public int getBlockSize() { return _digest.getDigestSize() * 8; } /** * Populate a passed in array with random data. * * @param output output array for generated bits. * @param additionalInput additional input to be added to the DRBG in this step. * @param predictionResistant true if a reseed should be forced, false otherwise. * * @return number of bits generated, -1 if a reseed required. */ public int generate(byte[] output, byte[] additionalInput, boolean predictionResistant) { // 1. If reseed_counter > reseed_interval, then return an indication that a // reseed is required. // 2. If (additional_input != Null), then do // 2.1 w = Hash (0x02 || V || additional_input). // 2.2 V = (V + w) mod 2^seedlen // . // 3. (returned_bits) = Hashgen (requested_number_of_bits, V). // 4. H = Hash (0x03 || V). // 5. V = (V + H + C + reseed_counter) mod 2^seedlen // . // 6. reseed_counter = reseed_counter + 1. // 7. Return SUCCESS, returned_bits, and the new values of V, C, and // reseed_counter for the new_working_state. int numberOfBits = output.length*8; if (numberOfBits > MAX_BITS_REQUEST) { throw new IllegalArgumentException("Number of bits per request limited to " + MAX_BITS_REQUEST); } if (_reseedCounter > RESEED_MAX) { return -1; } if (predictionResistant) { reseed(additionalInput); additionalInput = null; } // 2. if (additionalInput != null) { byte[] newInput = new byte[1 + _V.length + additionalInput.length]; newInput[0] = 0x02; System.arraycopy(_V, 0, newInput, 1, _V.length); // TODO: inOff / inLength System.arraycopy(additionalInput, 0, newInput, 1 + _V.length, additionalInput.length); byte[] w = hash(newInput); addTo(_V, w); } // 3. byte[] rv = hashgen(_V, numberOfBits); // 4. byte[] subH = new byte[_V.length + 1]; System.arraycopy(_V, 0, subH, 1, _V.length); subH[0] = 0x03; byte[] H = hash(subH); // 5. addTo(_V, H); addTo(_V, _C); byte[] c = new byte[4]; c[0] = (byte)(_reseedCounter >> 24); c[1] = (byte)(_reseedCounter >> 16); c[2] = (byte)(_reseedCounter >> 8); c[3] = (byte)_reseedCounter; addTo(_V, c); _reseedCounter++; System.arraycopy(rv, 0, output, 0, output.length); return numberOfBits; } private byte[] getEntropy() { byte[] entropy = _entropySource.getEntropy(); if (entropy.length < (_securityStrength + 7) / 8) { throw new IllegalStateException("Insufficient entropy provided by entropy source"); } return entropy; } // this will always add the shorter length byte array mathematically to the // longer length byte array. // be careful.... private void addTo(byte[] longer, byte[] shorter) { int carry = 0; for (int i=1;i <= shorter.length; i++) // warning { int res = (longer[longer.length-i] & 0xff) + (shorter[shorter.length-i] & 0xff) + carry; carry = (res > 0xff) ? 1 : 0; longer[longer.length-i] = (byte)res; } for (int i=shorter.length+1;i <= longer.length; i++) // warning { int res = (longer[longer.length-i] & 0xff) + carry; carry = (res > 0xff) ? 1 : 0; longer[longer.length-i] = (byte)res; } } /** * Reseed the DRBG. * * @param additionalInput additional input to be added to the DRBG in this step. */ public void reseed(byte[] additionalInput) { // 1. seed_material = 0x01 || V || entropy_input || additional_input. // // 2. seed = Hash_df (seed_material, seedlen). // // 3. V = seed. // // 4. C = Hash_df ((0x00 || V), seedlen). // // 5. reseed_counter = 1. // // 6. Return V, C, and reseed_counter for the new_working_state. // // Comment: Precede with a byte of all zeros. byte[] entropy = getEntropy(); byte[] seedMaterial = Arrays.concatenate(ONE, _V, entropy, additionalInput); byte[] seed = Utils.hash_df(_digest, seedMaterial, _seedLength); _V = seed; byte[] subV = new byte[_V.length + 1]; subV[0] = 0x00; System.arraycopy(_V, 0, subV, 1, _V.length); _C = Utils.hash_df(_digest, subV, _seedLength); _reseedCounter = 1; } private byte[] hash(byte[] input) { byte[] hash = new byte[_digest.getDigestSize()]; doHash(input, hash); return hash; } private void doHash(byte[] input, byte[] output) { _digest.update(input, 0, input.length); _digest.doFinal(output, 0); } // 1. m = [requested_number_of_bits / outlen] // 2. data = V. // 3. W = the Null string. // 4. For i = 1 to m // 4.1 wi = Hash (data). // 4.2 W = W || wi. // 4.3 data = (data + 1) mod 2^seedlen // . // 5. returned_bits = Leftmost (requested_no_of_bits) bits of W. private byte[] hashgen(byte[] input, int lengthInBits) { int digestSize = _digest.getDigestSize(); int m = (lengthInBits / 8) / digestSize; byte[] data = new byte[input.length]; System.arraycopy(input, 0, data, 0, input.length); byte[] W = new byte[lengthInBits / 8]; byte[] dig = new byte[_digest.getDigestSize()]; for (int i = 0; i <= m; i++) { doHash(data, dig); int bytesToCopy = ((W.length - i * dig.length) > dig.length) ? dig.length : (W.length - i * dig.length); System.arraycopy(dig, 0, W, i * dig.length, bytesToCopy); addTo(data, ONE); } return W; } }
/* * //****************************************************************** * // * // Copyright 2016 Samsung Electronics All Rights Reserved. * // * //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= * // * // Licensed under the Apache License, Version 2.0 (the "License"); * // you may not use this file except in compliance with the License. * // You may obtain a copy of the License at * // * // http://www.apache.org/licenses/LICENSE-2.0 * // * // Unless required by applicable law or agreed to in writing, software * // distributed under the License is distributed on an "AS IS" BASIS, * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * // See the License for the specific language governing permissions and * // limitations under the License. * // * //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= */ package org.iotivity.cloud.rdserver.resources.presence; import java.io.ByteArrayOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import org.iotivity.cloud.base.device.Device; import org.iotivity.cloud.base.exception.ServerException.InternalServerErrorException; import org.iotivity.cloud.base.protocols.IRequest; import org.iotivity.cloud.base.protocols.MessageBuilder; import org.iotivity.cloud.base.protocols.enums.ContentFormat; import org.iotivity.cloud.base.protocols.enums.ResponseStatus; import org.iotivity.cloud.rdserver.Constants; import org.iotivity.cloud.rdserver.db.DBManager; import org.iotivity.cloud.util.Cbor; import org.iotivity.cloud.util.Log; import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.dataformat.cbor.CBORFactory; import com.fasterxml.jackson.dataformat.cbor.CBORGenerator; /** * * This class provides a set of APIs handle requests about presence * */ public class PresenceManager { private static PresenceManager mPresenceManager = new PresenceManager(); private Cbor<HashMap<String, Object>> mCbor = new Cbor<>(); private class PresenceSubscriber { PresenceSubscriber(Device subscriber, IRequest request) { mSubscriber = subscriber; mRequest = request; } public Device mSubscriber; public IRequest mRequest; } private class PresenceInfo { PresenceInfo() { mSubscriber = new HashMap<>(); mSubscribedDevices = new HashMap<>(); mSequenceNumber = new HashMap<>(); } // di , token, Subscriber list private HashMap<String, HashMap<String, PresenceSubscriber>> mSubscriber; // token, di list private HashMap<String, List<String>> mSubscribedDevices; private HashMap<String, Long> mSequenceNumber; } private PresenceInfo mDevicePresence = null; private PresenceInfo mResourcePresence = null; private PresenceManager() { mDevicePresence = new PresenceInfo(); mResourcePresence = new PresenceInfo(); } /** * API to return PresenceManager object * * @return PresenceManager object */ public static PresenceManager getInstance() { return mPresenceManager; } /** * API to add observer * * @param srcDevice * channel information * @param request * request message * @param deviceIdList * subscribed device list * @param presenceType * device presence or resource presence */ public void subscribePresence(Device srcDevice, IRequest request, List<String> deviceIdList, String presenceType) { PresenceInfo presenceInfo = getPresenceInfo(presenceType); for (String deviceId : deviceIdList) { HashMap<String, PresenceSubscriber> subscribers = presenceInfo.mSubscriber .get(deviceId); if (subscribers == null) { subscribers = new HashMap<>(); presenceInfo.mSubscriber.put(deviceId, subscribers); } subscribers.put(request.getRequestId(), new PresenceSubscriber(srcDevice, request)); } presenceInfo.mSubscribedDevices.put(request.getRequestId(), deviceIdList); presenceInfo.mSequenceNumber.put(request.getRequestId(), (long) 1); } /** * API to remove observer * * @param request * request message * @param deviceIdList * unsubscribed device list * @param presenceType * device presence or resource presence */ public void unsubscribePresence(IRequest request, List<String> deviceIdList, String presenceType) { PresenceInfo presenceInfo = getPresenceInfo(presenceType); for (String deviceId : deviceIdList) { HashMap<String, PresenceSubscriber> subscribers = presenceInfo.mSubscriber .get(deviceId); if (subscribers == null) { continue; } subscribers.remove(request.getRequestId()); } } /** * API for notifying to observers about device presence * * @param deviceId * device id */ public void notifyToObservers(String deviceId) { HashMap<String, PresenceSubscriber> tokenNSubscribers = mDevicePresence.mSubscriber .get(deviceId); if (tokenNSubscribers != null) { byte[] payload = makeResponsePayload(Arrays.asList(deviceId)); for (PresenceSubscriber subscriber : tokenNSubscribers.values()) { subscriber.mSubscriber.sendResponse( MessageBuilder.createResponse(subscriber.mRequest, ResponseStatus.CONTENT, ContentFormat.APPLICATION_CBOR, payload)); } } } /** * API to make response payload about device presence * * @param deviceList * device id list * @return payload data */ public byte[] makeResponsePayload(List<String> deviceList) { HashMap<String, Object> getPayload = new HashMap<>(); ArrayList<HashMap<String, Object>> prsList = new ArrayList<>(); for (String deviceId : deviceList) { HashMap<String, Object> payloadSegment = new HashMap<>(); String deviceState = getDeviceState(deviceId); payloadSegment.put(Constants.DEVICE_ID, deviceId); if (deviceState != null) { payloadSegment.put(Constants.PRESENCE_STATE, deviceState); } else { payloadSegment.put(Constants.PRESENCE_STATE, Constants.PRESENCE_OFF); } prsList.add(payloadSegment); } getPayload.put(Constants.PRESENCE_LIST, prsList); Log.i("Device presence observe response : " + getPayload.toString()); return mCbor.encodingPayloadToCbor(getPayload); } private String getDeviceState(String deviceId) { HashMap<String, Object> condition = new HashMap<>(); condition.put(Constants.DEVICE_ID, deviceId); String state = null; ArrayList<HashMap<String, Object>> readRecords = DBManager.getInstance() .selectRecord(Constants.PRESENCE_TABLE, condition); if (!readRecords.isEmpty() && readRecords.get(0).get(Constants.PRESENCE_STATE) != null) { state = readRecords.get(0).get(Constants.PRESENCE_STATE).toString(); } return state; } private PresenceInfo getPresenceInfo(String presenceType) { PresenceInfo presenceInfo = null; switch (presenceType) { case Constants.DEVICE_PRESENCE: presenceInfo = mDevicePresence; break; case Constants.RESOURCE_PRESENCE: presenceInfo = mResourcePresence; break; default: } return presenceInfo; } /** * API for notifying to observers about resource presence * * @param resourceInfo * resource information */ public void notifyToObservers( ArrayList<HashMap<String, Object>> resourceInfo) { if (resourceInfo.isEmpty()) { return; } Object obj = resourceInfo.get(0).get(Constants.DEVICE_ID); if (obj == null) { return; } String deviceId = obj.toString(); HashMap<String, PresenceSubscriber> tokenNSubscribers = mResourcePresence.mSubscriber .get(deviceId); if (tokenNSubscribers != null) { for (PresenceSubscriber subscriber : tokenNSubscribers.values()) { for (HashMap<String, Object> resource : resourceInfo) { subscriber.mSubscriber.sendResponse( MessageBuilder.createResponse(subscriber.mRequest, ResponseStatus.CONTENT, ContentFormat.APPLICATION_CBOR, makeResponsePayload( subscriber.mRequest.getRequestId(), resource))); } } } } private byte[] makeResponsePayload(String requestId, HashMap<String, Object> resource) { ByteArrayOutputStream out = new ByteArrayOutputStream(); CBORFactory f = new CBORFactory(); try { JsonGenerator gen = f.createGenerator(out, JsonEncoding.UTF8); gen.writeStartObject(); long sequenceId = mResourcePresence.mSequenceNumber.get(requestId); gen.writeNumberField(Constants.NON, sequenceId); mResourcePresence.mSequenceNumber.put(requestId, sequenceId + 1); gen.writeNumberField(Constants.RESOURCE_TTL, Long.parseLong( checkPayload(resource, Constants.RESOURCE_TTL).toString())); gen.writeFieldName(Constants.TRIGGER); ((CBORGenerator) gen).writeRaw((byte) (224 + (byte) (checkPayload(resource, Constants.TRIGGER)))); gen.writeStringField(Constants.RESOURCE_TYPE, checkPayload(resource, Constants.RESOURCE_TYPE).toString()); gen.writeStringField(Constants.HREF, checkPayload(resource, Constants.HREF).toString()); gen.writeEndObject(); gen.close(); } catch (Exception e) { throw new InternalServerErrorException( "notification payload cbor encoding error"); } return out.toByteArray(); } private Object checkPayload(HashMap<String, Object> resource, String key) { Object obj = resource.get(key); if (obj == null) { throw new InternalServerErrorException( "property (" + key + ") is null"); } return obj; } /** * API to update device state * * @param payload * payload included device state */ public void updateDevicePresence(HashMap<String, Object> payload) { DBManager.getInstance().insertAndReplaceRecord(Constants.PRESENCE_TABLE, payload); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.psi; import com.google.common.collect.Iterables; import com.intellij.openapi.editor.Document; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.LineTokenizer; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiWhiteSpace; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.Function; import com.intellij.util.containers.ContainerUtil; import com.jetbrains.python.PythonFileType; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.util.Collections; import java.util.List; /** * Contains various methods for manipulation on indentation found in arbitrary text and individual lines: * <ul> * <li>calculating actual and expected indentation</li> * <li>finding common indentation of several lines</li> * <li>replacing and removing indentation of multiple lines</li> * </ul> * * It indented to be used primarily when one needs to modify content of Python files on document level and preserve valid block structure. * Note that in most scenarios accurate indentation consistent with the code style settings is provided by automatic formatting pass * that is performed each time you modify PSI tree directly. * * @author Mikhail Golubev */ public class PyIndentUtil { @NonNls public static final String TWO_SPACES = " "; @NonNls public static final String FOUR_SPACES = " "; private PyIndentUtil() { } /** * Returns indentation size as number of characters <tt>' '</tt> and <tt>'\t'</tt> in the beginning of a line. * It doesn't perform any expansion of tabs. */ public static int getLineIndentSize(@NotNull CharSequence line) { int stop; for (stop = 0; stop < line.length(); stop++) { final char c = line.charAt(stop); if (!(c == ' ' || c == '\t')) { break; } } return stop; } @NotNull public static String getLineIndent(@NotNull String line) { return line.substring(0, getLineIndentSize(line)); } /** * Useful version of {@link #getLineIndent(String)} for custom character sequences like {@link com.jetbrains.python.toolbox.Substring}. */ @NotNull public static CharSequence getLineIndent(@NotNull CharSequence line) { return line.subSequence(0, getLineIndentSize(line)); } @NotNull public static String getElementIndent(@NotNull PsiElement anchor) { if (anchor instanceof PsiFile) { return ""; } final PyStatementList statementList = PsiTreeUtil.getParentOfType(anchor, PyStatementList.class, false); if (statementList == null) { return ""; } final PsiElement prevSibling = statementList.getPrevSibling(); final String whitespace = prevSibling instanceof PsiWhiteSpace ? prevSibling.getText() : ""; final int i = whitespace.lastIndexOf("\n"); if (i >= 0 && statementList.getStatements().length != 0) { return whitespace.substring(i + 1); } else { return getExpectedElementIndent(anchor); } } @NotNull public static String getExpectedElementIndent(@NotNull PsiElement anchor) { final String indentStep = getIndentFromSettings(anchor.getProject()); final PyStatementList parentBlock = PsiTreeUtil.getParentOfType(anchor, PyStatementList.class, true); if (parentBlock != null) { return getElementIndent(parentBlock) + indentStep; } return anchor instanceof PyStatementList ? indentStep : ""; } public static int getExpectedElementIndentSize(@NotNull PsiElement anchor) { int depth = 0; PyStatementList block = PsiTreeUtil.getParentOfType(anchor, PyStatementList.class, false); while (block != null) { depth += 1; block = PsiTreeUtil.getParentOfType(block, PyStatementList.class); } return depth * getIndentSizeFromSettings(anchor.getProject()); } /** * Returns indentation size configured in the Python code style settings. * * @see #getIndentFromSettings(Project) */ public static int getIndentSizeFromSettings(@NotNull Project project) { final CodeStyleSettings codeStyleSettings = CodeStyleSettingsManager.getInstance(project).getCurrentSettings(); final CodeStyleSettings.IndentOptions indentOptions = codeStyleSettings.getIndentOptions(PythonFileType.INSTANCE); return indentOptions.INDENT_SIZE; } /** * Returns indentation configured in the Python code style settings as plain space character repeated number times specified there. * Note that it doesn't take into account usage of tab characters that might be configured there as well. * * @see #getIndentSizeFromSettings(Project) */ @NotNull public static String getIndentFromSettings(@NotNull Project project) { return StringUtil.repeatSymbol(' ', getIndentSizeFromSettings(project)); } @NotNull public static List<String> removeCommonIndent(@NotNull Iterable<String> lines, boolean ignoreFirstLine) { return changeIndent(lines, ignoreFirstLine, ""); } @NotNull public static String removeCommonIndent(@NotNull String s, boolean ignoreFirstLine) { final List<String> trimmed = removeCommonIndent(LineTokenizer.tokenizeIntoList(s, false), ignoreFirstLine); return StringUtil.join(trimmed, "\n"); } @NotNull public static String changeIndent(@NotNull String s, boolean ignoreFirstLine, String newIndent) { final List<String> trimmed = changeIndent(LineTokenizer.tokenizeIntoList(s, false), ignoreFirstLine, newIndent); return StringUtil.join(trimmed, "\n"); } /** * Note that all empty lines will be trimmed regardless of their actual indentation. */ @NotNull public static List<String> changeIndent(@NotNull Iterable<String> lines, boolean ignoreFirstLine, final String newIndent) { final String oldIndent = findCommonIndent(lines, ignoreFirstLine); if (Iterables.isEmpty(lines)) { return Collections.emptyList(); } final List<String> result = ContainerUtil.map(Iterables.skip(lines, ignoreFirstLine ? 1 : 0), new Function<String, String>() { @Override public String fun(String line) { if (StringUtil.isEmptyOrSpaces(line)) { return ""; } else { return newIndent + line.substring(oldIndent.length()); } } }); if (ignoreFirstLine) { return ContainerUtil.prepend(result, Iterables.get(lines, 0)); } return result; } /** * Finds maximum common indentation of the given lines. Indentation of empty lines and lines containing only whitespaces is ignored unless * they're the only lines provided. In the latter case common indentation for such lines is returned. If mix of tabs and spaces was used * for indentation and any two of lines taken into account contain incompatible combination of these symbols, i.e. it's impossible to * decide which one can be used as prefix for another, empty string is returned. * * @param ignoreFirstLine whether the first line should be considered (useful for multiline string literals) */ @NotNull public static String findCommonIndent(@NotNull Iterable<String> lines, boolean ignoreFirstLine) { String minIndent = null; boolean allLinesEmpty = true; if (Iterables.isEmpty(lines)) { return ""; } boolean hasBadEmptyLineIndent = false; for (String line : Iterables.skip(lines, ignoreFirstLine ? 1 : 0)) { final boolean lineEmpty = StringUtil.isEmptyOrSpaces(line); if (lineEmpty && !allLinesEmpty) { continue; } final String indent = getLineIndent(line); if (minIndent == null || (!lineEmpty && allLinesEmpty) || minIndent.startsWith(indent)) { minIndent = indent; } else if (!indent.startsWith(minIndent)) { if (lineEmpty) { hasBadEmptyLineIndent = true; } else { return ""; } } allLinesEmpty &= lineEmpty; } if (allLinesEmpty && hasBadEmptyLineIndent) { return ""; } return StringUtil.notNullize(minIndent); } @NotNull public static String getLineIndent(@NotNull Document document, int lineNumber) { final TextRange lineRange = TextRange.create(document.getLineStartOffset(lineNumber), document.getLineEndOffset(lineNumber)); final String line = document.getText(lineRange); return getLineIndent(line); } }
/* * Copyright (c) 2008-2016 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.haulmont.cuba.web.gui.components; import com.haulmont.cuba.core.global.MetadataTools; import com.haulmont.cuba.gui.components.CaptionMode; import com.haulmont.cuba.gui.components.OptionsGroup; import com.haulmont.cuba.gui.components.data.meta.EntityValueSource; import com.haulmont.cuba.gui.components.data.meta.OptionsBinding; import com.haulmont.cuba.gui.components.data.Options; import com.haulmont.cuba.gui.components.data.options.OptionsBinder; import com.haulmont.cuba.web.widgets.CubaOptionGroup; import com.haulmont.cuba.web.widgets.client.optiongroup.OptionGroupOrientation; import com.vaadin.v7.data.util.IndexedContainer; import org.springframework.context.ApplicationContext; import javax.inject.Inject; import java.util.*; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import static com.google.common.base.Preconditions.checkNotNull; public class WebOptionsGroup<V, I> extends WebAbstractField<CubaOptionGroup, V> implements OptionsGroup<V, I> { protected MetadataTools metadataTools; protected ApplicationContext applicationContext; protected OptionsBinding<I> optionsBinding; protected Function<? super I, String> optionCaptionProvider; @SuppressWarnings("unchecked") public WebOptionsGroup() { component = createComponent(); component.setContainerDataSource(new IndexedContainer()); component.setItemCaptionGenerator(o -> generateItemCaption((I) o)); attachListener(component); } protected CubaOptionGroup createComponent() { return new CubaOptionGroup(); } protected String generateDefaultItemCaption(I item) { if (valueBinding != null && valueBinding.getSource() instanceof EntityValueSource) { EntityValueSource entityValueSource = (EntityValueSource) valueBinding.getSource(); return metadataTools.format(item, entityValueSource.getMetaPropertyPath().getMetaProperty()); } return metadataTools.format(item); } protected String generateItemCaption(I item) { if (item == null) { return null; } if (optionCaptionProvider != null) { return optionCaptionProvider.apply(item); } return generateDefaultItemCaption(item); } @Inject public void setMetadataTools(MetadataTools metadataTools) { this.metadataTools = metadataTools; } @Inject public void setApplicationContext(ApplicationContext applicationContext) { this.applicationContext = applicationContext; } @Override public boolean isMultiSelect() { return component.isMultiSelect(); } @Override public void setMultiSelect(boolean multiselect) { component.setMultiSelect(multiselect); } @Override public Orientation getOrientation() { switch (component.getOrientation()) { case HORIZONTAL: return Orientation.HORIZONTAL; case VERTICAL: return Orientation.VERTICAL; default: throw new RuntimeException("Unsupproted orientation of OptionGroup"); } } @Override public void setOrientation(Orientation orientation) { checkNotNull(orientation, "Orientation must not be null"); if (orientation == Orientation.HORIZONTAL) { component.setOrientation(OptionGroupOrientation.HORIZONTAL); } else { component.setOrientation(OptionGroupOrientation.VERTICAL); } } @SuppressWarnings("unchecked") @Override protected V convertToModel(Object componentRawValue) { if (isMultiSelect()) { Set collectionValue = (Set) componentRawValue; List<I> itemIds = getCurrentItems(); Stream<I> selectedItemsStream = itemIds.stream() .filter(collectionValue::contains); if (valueBinding != null) { Class<V> targetType = valueBinding.getSource().getType(); if (List.class.isAssignableFrom(targetType)) { return (V) selectedItemsStream.collect(Collectors.toList()); } if (Set.class.isAssignableFrom(targetType)) { return (V) selectedItemsStream.collect(Collectors.toCollection(LinkedHashSet::new)); } } return (V) selectedItemsStream.collect(Collectors.toCollection(LinkedHashSet::new)); } return super.convertToModel(componentRawValue); } @SuppressWarnings("unchecked") protected List<I> getCurrentItems() { IndexedContainer container = (IndexedContainer) component.getContainerDataSource(); return (List<I>) container.getItemIds(); } @SuppressWarnings("unchecked") @Override protected Object convertToPresentation(V modelValue) { if (isMultiSelect()) { if (modelValue instanceof List) { return new LinkedHashSet<I>((Collection<? extends I>) modelValue); } } return super.convertToPresentation(modelValue); } @Override public void setLookupSelectHandler(Consumer selectHandler) { // do nothing } @Override public Collection getLookupSelectedItems() { Object value = getValue(); return (value instanceof Collection) ? (Collection) value : Collections.singleton(value); } @Override public Options<I> getOptions() { return optionsBinding != null ? optionsBinding.getSource() : null; } @Override public void setOptions(Options<I> options) { if (this.optionsBinding != null) { this.optionsBinding.unbind(); this.optionsBinding = null; } if (options != null) { OptionsBinder optionsBinder = applicationContext.getBean(OptionsBinder.NAME, OptionsBinder.class); this.optionsBinding = optionsBinder.bind(options, this, this::setItemsToPresentation); this.optionsBinding.activate(); } } @Override protected void setValueToPresentation(Object value) { component.setValueIgnoreReadOnly(value); } protected void setItemsToPresentation(Stream<I> options) { List<I> itemIds = options.collect(Collectors.toList()); component.setContainerDataSource(new IndexedContainer(itemIds)); } @Override public void setOptionCaptionProvider(Function<? super I, String> optionCaptionProvider) { this.optionCaptionProvider = optionCaptionProvider; } @Override public Function<? super I, String> getOptionCaptionProvider() { return optionCaptionProvider; } @Override public CaptionMode getCaptionMode() { // vaadin8 return CaptionMode.ITEM; } @Override public void setCaptionMode(CaptionMode captionMode) { // vaadin8 } @Override public String getCaptionProperty() { // vaadin8 return null; } @Override public void setCaptionProperty(String captionProperty) { // vaadin8 } @Override public void focus() { component.focus(); } @Override public int getTabIndex() { return component.getTabIndex(); } @Override public void setTabIndex(int tabIndex) { component.setTabIndex(tabIndex); } }
/* * Copyright 2015 Torridity. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.tor.tribes.util.xml; import java.io.*; import java.util.*; import javax.xml.parsers.*; import org.apache.log4j.Logger; import org.jdom.*; import org.jdom.input.*; import org.jaxen.*; import org.jaxen.dom.*; import org.jaxen.jdom.*; import org.w3c.dom.DOMException; import org.w3c.dom.Node; import org.w3c.dom.NodeList; public class JaxenUtils { private static Logger logger = Logger.getLogger("XMLUtils"); /** Creates a new instance of JaxenUtil. * Never created externally! */ private JaxenUtils() { } /** Convert a DOM document into a JDOM document. * @param pDOM an org.w3c.dom.Document * @return an org.jdom.Document */ public static org.jdom.Document getDocument(org.w3c.dom.Document pDOM) { return new DOMBuilder().build(pDOM); } /** Get a JDOM document from an InputStream. * @param pInputStream an InputStream * @return an org.jdom.Document */ public static org.jdom.Document getDocument(InputStream pInputStream) throws Exception { return new SAXBuilder().build(pInputStream); } /** Get a JDOM document from a String representation. * @param pDocument a String containing an XML Document * @return an org.jdom.Document */ public static org.jdom.Document getDocument(String pDocument) throws Exception { return new SAXBuilder().build(new StringReader(pDocument)); } public static org.jdom.Document getDocument(File xmlFile) throws Exception { return new SAXBuilder().build(xmlFile); } public static org.w3c.dom.Document getW3CDocument(InputStream pInputStream) throws Exception { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); return builder.parse(pInputStream); } public static org.w3c.dom.Document getW3CDocument(File xmlFile) throws Exception { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); return builder.parse(xmlFile); } public static List getNodes(org.jdom.Document document, String xPath, Namespace[] context) { return getNodes(document.getRootElement(), xPath, context); } public static List getNodes(org.jdom.Document document, String xPath) { return getNodes(document.getRootElement(), xPath, null); } public static List getNodes(org.jdom.Element element, String xPath, Namespace[] context) { return getList(element, xPath, context); } public static List getNodes(org.jdom.Element element, String xPath) { return getList(element, xPath, null); } public static List getAttributes(org.jdom.Document document, String xPath, Namespace[] context) { return getAttributes(document.getRootElement(), xPath, context); } public static List getAttributes(org.jdom.Document document, String xPath) { return getAttributes(document.getRootElement(), xPath, null); } public static List getAttributes(org.jdom.Element element, String xPath, Namespace[] context) { return getList(element, xPath, context); } public static List getAttributes(org.jdom.Element element, String xPath) { return getList(element, xPath, null); } /** Get the value of the available first node. * @return the value of the first available node */ public static String getNodeValue(org.jdom.Document document, String xPath) { return getNodeValue(document, xPath, null); } /** Get the value of the first available node. * @return the value of the first available node */ public static String getNodeValue(org.jdom.Element element, String xPath) { return getNodeValue(element, xPath, null); } /** Get the value of the available first node. * @return the value of the first available node */ public static String getNodeValue(org.jdom.Document document, String xPath, Namespace[] context) { return getNodeValue(document.getRootElement(), xPath, context); } /** Get the value of the first available node. * @return the value of the first available node */ public static String getNodeValue(org.jdom.Element element, String xPath, Namespace[] context) { List list; String getNodeValue; // list = getList(element, xPath, context); if (list.size() == 0) { getNodeValue = null; } else { getNodeValue = ((Element) list.get(0)).getTextTrim(); } return getNodeValue; } public static String[] getNodesValues(org.jdom.Document document, String xPath, Namespace[] context) { return getNodesValues(document.getRootElement(), xPath, context); } public static String[] getNodesValues(org.jdom.Element element, String xPath, Namespace[] context) { int index; String[] values; List result = getList(element, xPath, context); values = new String[result.size()]; for (index = 0; index < result.size(); index++) { values[index] = ((Element) result.get(index)).getTextTrim(); } return values; } public static String[] getAttributesValues(org.jdom.Document document, String xPath, Namespace[] context) { return getAttributesValues(document.getRootElement(), xPath, context); } public static String[] getAttributesValues(org.jdom.Element element, String xPath, Namespace[] context) { int index; String[] values; List result = getList(element, xPath, context); values = new String[result.size()]; for (index = 0; index < result.size(); index++) { values[index] = ((Attribute) result.get(index)).getValue(); } return values; } public static String getAttributeValue(org.jdom.Element element, String xPath, Namespace[] context) { return getAttributesValues(element, xPath, context)[0]; } public static String getAttributeValue(org.jdom.Document document, String xPath, Namespace[] context) { return getAttributesValues(document, xPath, context)[0]; } public static String getAttributeValue(org.jdom.Document document, String xPath) { return getAttributesValues(document, xPath, null)[0]; } public static String getAttributeValue(org.jdom.Element element, String xPath) { String[] list; String getAttributeValue; // list = getAttributesValues(element, xPath, null); if (list.length == 0) { getAttributeValue = null; } else { getAttributeValue = list[0].trim(); } return getAttributeValue; } public static String[] getValues(org.jdom.Document document, String xPath, Namespace[] context) { return getValues(document.getRootElement(), xPath, context); } public static String[] getValues(org.jdom.Element element, String xPath, Namespace[] context) { int index; String[] values; List result = getList(element, xPath, context); values = new String[result.size()]; Object item; for (index = 0; index < result.size(); index++) { item = result.get(index); if (item instanceof Attribute) { values[index] = ((Attribute) item).getValue(); } else if (item instanceof Element) { values[index] = ((Element) item).getTextNormalize(); } else { values[index] = "unknown type!?: " + item.getClass().toString(); } } return values; } protected static List getList(org.jdom.Element element, String xPath, Namespace[] context) { List result = null; int index; String[] values; try { XPath filter = createFilter(xPath, context); result = filter.selectNodes(element); } catch (JaxenException je) { logger.error("Jaxen Exception", je); } return result; } protected static XPath createFilter(String xPath, Namespace[] context) throws JaxenException { XPath filter = new JDOMXPath(xPath); if (context != null) { for (Namespace aContext : context) { filter.addNamespace(aContext.getPrefix(), aContext.getURI()); } } return filter; } public static List getNodes(org.w3c.dom.Document document, String xPath, Namespace[] context) { return getNodes(document.getDocumentElement(), xPath, context); } public static List getNodes(org.w3c.dom.Document document, String xPath) { return getNodes(document.getDocumentElement(), xPath, null); } public static List getNodes(org.w3c.dom.Element element, String xPath, Namespace[] context) { return getList(element, xPath, context); } public static List getNodes(org.w3c.dom.Element element, String xPath) { return getList(element, xPath, null); } public static List getAttributes(org.w3c.dom.Document document, String xPath, Namespace[] context) { return getAttributes(document.getDocumentElement(), xPath, context); } public static List getAttributes(org.w3c.dom.Document document, String xPath) { return getAttributes(document.getDocumentElement(), xPath, null); } public static List getAttributes(org.w3c.dom.Element element, String xPath, Namespace[] context) { return getList(element, xPath, context); } public static List getAttributes(org.w3c.dom.Element element, String xPath) { return getList(element, xPath, null); } public static String[] getNodesValues(org.w3c.dom.Document document, String xPath, Namespace[] context) { return getNodesValues(document.getDocumentElement(), xPath, context); } public static String[] getNodesValues(org.w3c.dom.Element element, String xPath, Namespace[] context) { int index; int node; StringBuffer buffer; String[] values; NodeList nodeList; List result = getList(element, xPath, context); values = new String[result.size()]; for (index = 0; index < result.size(); index++) { try { buffer = new StringBuffer(); nodeList = ((org.w3c.dom.Element) result.get(index)).getChildNodes(); for (node = 0; node < nodeList.getLength(); node++) { if (nodeList.item(node).getNodeType() == Node.TEXT_NODE) { buffer.append(nodeList.item(node).getNodeValue()); } } values[index] = buffer.toString().trim(); } catch (DOMException de) { values[index] = "An Exception occured!"; } } return values; } public static String[] getAttributesValues(org.w3c.dom.Document document, String xPath, Namespace[] context) { return getAttributesValues(document.getDocumentElement(), xPath, context); } public static String[] getAttributesValues(org.w3c.dom.Element element, String xPath, Namespace[] context) { int index; String[] values; List result = getList(element, xPath, context); values = new String[result.size()]; for (index = 0; index < result.size(); index++) { values[index] = ((org.w3c.dom.Attr) result.get(index)).getValue(); } return values; } public static String getAttributeValue(org.w3c.dom.Element element, String xPath, Namespace[] context) { return getAttributesValues(element, xPath, context)[0]; } public static String getAttributeValue(org.w3c.dom.Document document, String xPath, Namespace[] context) { return getAttributesValues(document, xPath, context)[0]; } public static String getAttributeValue(org.w3c.dom.Document document, String xPath) { return getAttributesValues(document, xPath, null)[0]; } public static String getAttributeValue(org.w3c.dom.Element element, String xPath) { return getAttributesValues(element, xPath, null)[0]; } public static String[] getValues(org.w3c.dom.Document document, String xPath, Namespace[] context) { return getValues(document.getDocumentElement(), xPath, context); } public static String[] getValues(org.w3c.dom.Element element, String xPath, Namespace[] context) { int index; int node; StringBuffer buffer; String[] values; NodeList nodeList; List result = getList(element, xPath, context); values = new String[result.size()]; Object item; for (index = 0; index < result.size(); index++) { item = result.get(index); if (item instanceof org.w3c.dom.Attr) { values[index] = ((org.w3c.dom.Attr) item).getValue(); } else if (item instanceof org.w3c.dom.Element) { try { buffer = new StringBuffer(); nodeList = ((org.w3c.dom.Element) item).getChildNodes(); for (node = 0; node < nodeList.getLength(); node++) { if (nodeList.item(node).getNodeType() == Node.TEXT_NODE) { buffer.append(nodeList.item(node).getNodeValue()); } } values[index] = buffer.toString().trim(); } catch (DOMException de) { values[index] = "An Exception occured!"; } } else { values[index] = "unknown type!?: " + item.getClass().toString(); } } return values; } protected static List getList(org.w3c.dom.Element element, String xPath, Namespace[] context) { List result = null; int index; String[] values; try { XPath filter = createW3CFilter(xPath, context); result = filter.selectNodes(element); } catch (JaxenException je) { je.printStackTrace(); } return result; } protected static XPath createW3CFilter(String xPath, Namespace[] context) throws JaxenException { XPath filter = new DOMXPath(xPath); if (context != null) { for (Namespace aContext : context) { filter.addNamespace(aContext.getPrefix(), aContext.getURI()); } } return filter; } }
package com.crowdin.cli.client; import com.crowdin.cli.client.models.HttpExceptionBuilder; import com.crowdin.cli.utils.LanguageBuilder; import com.crowdin.client.core.http.HttpClient; import com.crowdin.client.core.http.impl.json.JacksonJsonTransformer; import com.crowdin.client.core.model.ClientConfig; import com.crowdin.client.core.model.Credentials; import com.crowdin.client.core.model.DownloadLink; import com.crowdin.client.core.model.DownloadLinkResponseObject; import com.crowdin.client.core.model.PatchRequest; import com.crowdin.client.languages.model.LanguageResponseList; import com.crowdin.client.languages.model.LanguageResponseObject; import com.crowdin.client.projectsgroups.model.Project; import com.crowdin.client.projectsgroups.model.ProjectResponseObject; import com.crowdin.client.projectsgroups.model.ProjectSettings; import com.crowdin.client.sourcefiles.model.AddBranchRequest; import com.crowdin.client.sourcefiles.model.AddDirectoryRequest; import com.crowdin.client.sourcefiles.model.AddFileRequest; import com.crowdin.client.sourcefiles.model.Branch; import com.crowdin.client.sourcefiles.model.BranchResponseList; import com.crowdin.client.sourcefiles.model.BranchResponseObject; import com.crowdin.client.sourcefiles.model.Directory; import com.crowdin.client.sourcefiles.model.DirectoryResponseList; import com.crowdin.client.sourcefiles.model.DirectoryResponseObject; import com.crowdin.client.sourcefiles.model.File; import com.crowdin.client.sourcefiles.model.FileInfoResponseList; import com.crowdin.client.sourcefiles.model.FileResponseObject; import com.crowdin.client.sourcefiles.model.UpdateFileRequest; import com.crowdin.client.sourcestrings.model.AddSourceStringRequest; import com.crowdin.client.sourcestrings.model.SourceString; import com.crowdin.client.sourcestrings.model.SourceStringResponseList; import com.crowdin.client.sourcestrings.model.SourceStringResponseObject; import com.crowdin.client.storage.model.Storage; import com.crowdin.client.storage.model.StorageResponseObject; import com.crowdin.client.translations.model.BuildProjectTranslationRequest; import com.crowdin.client.translations.model.ProjectBuild; import com.crowdin.client.translations.model.ProjectBuildResponseObject; import com.crowdin.client.translations.model.UploadTranslationsRequest; import com.crowdin.client.translations.model.UploadTranslationsResponse; import com.crowdin.client.translations.model.UploadTranslationsResponseObject; import com.crowdin.client.translationstatus.model.LanguageProgressResponseList; import org.apache.commons.io.IOUtils; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class CrowdinProjectClientTest { private HttpClient httpClientMock; private ProjectClient client; private static final String preUrl = "https://testme.crowdin.com"; private static final String url = "https://testme.crowdin.com/api/v2"; private static final long projectId = 42; private static final long fileId = 82; private static final String languageId = "uk"; private static final long buildId = 62; private static final long stringId = 52; private static final String downloadUrl = "https://downloadme.crowdin.com"; private static final String downloadUrlMalformed = "https"; private static final String listFilesUrl = String.format("%s/projects/%d/files", url, projectId); private static final String listDirectoriesUrl = String.format("%s/projects/%d/directories", url, projectId); private static final String listBranchesUrl = String.format("%s/projects/%d/branches", url, projectId); private static final String listSupportedLanguagesUrl = String.format("%s/languages", url); private static final String getProjectUrl = String.format("%s/projects/%d", url, projectId); private static final String getProjectProgressUrl = String.format("%s/projects/%d/languages/progress", url, projectId); private static final String addBranchUrl = String.format("%s/projects/%d/branches", url, projectId); private static final String uploadStorageUrl = String.format("%s/storages", url); private static final String addDirectoryUrl = String.format("%s/projects/%d/directories", url, projectId); private static final String updateSourceUrl = String.format("%s/projects/%d/files/%d", url, projectId, fileId); private static final String addSourceUrl = String.format("%s/projects/%d/files", url, projectId); private static final String uploadTranslationsUrl = String.format("%s/projects/%d/translations/%s", url, projectId, languageId); private static final String startBuildingTranslationsUrl = String.format("%s/projects/%d/translations/builds", url, projectId); private static final String checkBuildingTranslationUrl = String.format("%s/projects/%d/translations/builds/%d", url, projectId, buildId); private static final String downloadBuildUrl = String.format("%s/projects/%d/translations/builds/%d/download", url, projectId, buildId); private static final String addSourceStringUrl = String.format("%s/projects/%d/strings", url, projectId); private static final String listSourceStringUrl = String.format("%s/projects/%d/strings", url, projectId); private static final String deleteSourceStringUrl = String.format("%s/projects/%d/strings/%d", url, projectId, stringId); private static final String editSourceStringUrl = String.format("%s/projects/%d/strings/%d", url, projectId, stringId); @BeforeEach public void init() { Credentials creds = new Credentials("VeryBigToken", "TestingCompany", preUrl); httpClientMock = mock(HttpClient.class); ClientConfig clientConfig = ClientConfig.builder() .jsonTransformer(new JacksonJsonTransformer()) .httpClient(httpClientMock) .build(); com.crowdin.client.Client internalClient = new com.crowdin.client.Client(creds, clientConfig); client = new CrowdinProjectClient(internalClient, 42); } @Test public void testDownloadProjectFull() { Project project = new Project() {{ setId(projectId); setTargetLanguages(Arrays.asList(LanguageBuilder.ENG.build())); }}; List<LanguageResponseObject> supportedLangs = Arrays.asList( new LanguageResponseObject() {{ setData(LanguageBuilder.ENG.build()); }}, new LanguageResponseObject() {{ setData(LanguageBuilder.UKR.build()); }} ); List<FileResponseObject> files = Arrays.asList( new FileResponseObject() {{ setData(new File()); }} ); List<DirectoryResponseObject> directories = Arrays.asList( new DirectoryResponseObject() {{ setData(new Directory()); }} ); List<BranchResponseObject> branches = Arrays.asList( new BranchResponseObject() {{ setData(new Branch()); }} ); ProjectResponseObject projectResponse = new ProjectResponseObject() {{ setData(project); }}; LanguageResponseList langsResponse = new LanguageResponseList() {{ setData(supportedLangs); }}; FileInfoResponseList filesResponse = new FileInfoResponseList() {{ setData(files); }}; DirectoryResponseList directoriesResponse = new DirectoryResponseList() {{ setData(directories); }}; BranchResponseList branchesResponse = new BranchResponseList() {{ setData(branches); }}; when(httpClientMock.get(eq(getProjectUrl), any(), eq(ProjectResponseObject.class))) .thenReturn(projectResponse); when(httpClientMock.get(eq(listSupportedLanguagesUrl), any(), eq(LanguageResponseList.class))) .thenReturn(langsResponse); when(httpClientMock.get(eq(listFilesUrl), any(), eq(FileInfoResponseList.class))) .thenReturn(filesResponse); when(httpClientMock.get(eq(listDirectoriesUrl), any(), eq(DirectoryResponseList.class))) .thenReturn(directoriesResponse); when(httpClientMock.get(eq(listBranchesUrl), any(), eq(BranchResponseList.class))) .thenReturn(branchesResponse); CrowdinProject crowdinProject = client.downloadFullProject(); assertEquals(1, crowdinProject.getProjectLanguages(false).size()); assertEquals(2, crowdinProject.getSupportedLanguages().size()); assertTrue(crowdinProject.findLanguageById("ua", false).isPresent()); assertFalse(crowdinProject.findLanguageById("ua", true).isPresent()); verify(httpClientMock).get(eq(getProjectUrl), any(), eq(ProjectResponseObject.class)); verify(httpClientMock).get(eq(listSupportedLanguagesUrl), any(), eq(LanguageResponseList.class)); verify(httpClientMock).get(eq(listFilesUrl), any(), eq(FileInfoResponseList.class)); verify(httpClientMock).get(eq(listDirectoriesUrl), any(), eq(DirectoryResponseList.class)); verify(httpClientMock).get(eq(listBranchesUrl), any(), eq(BranchResponseList.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testDownloadProjectWithLangs() { Project project = new Project() {{ setId(projectId); setTargetLanguages(Arrays.asList(LanguageBuilder.ENG.build())); }}; List<LanguageResponseObject> supportedLangs = Arrays.asList( new LanguageResponseObject() {{ setData(LanguageBuilder.ENG.build()); }}, new LanguageResponseObject() {{ setData(LanguageBuilder.UKR.build()); }} ); ProjectResponseObject projectResponse = new ProjectResponseObject() {{ setData(project); }}; LanguageResponseList langsResponse = new LanguageResponseList() {{ setData(supportedLangs); }}; when(httpClientMock.get(eq(getProjectUrl), any(), eq(ProjectResponseObject.class))) .thenReturn(projectResponse); when(httpClientMock.get(eq(listSupportedLanguagesUrl), any(), eq(LanguageResponseList.class))) .thenReturn(langsResponse); CrowdinProject crowdinProject = client.downloadProjectWithLanguages(); assertEquals(1, crowdinProject.getProjectLanguages(false).size()); verify(httpClientMock).get(eq(getProjectUrl), any(), eq(ProjectResponseObject.class)); verify(httpClientMock).get(eq(listSupportedLanguagesUrl), any(), eq(LanguageResponseList.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testDownloadProjectInfoManagerAccess() { Project project = new ProjectSettings() {{ setId(projectId); setTargetLanguageIds(Arrays.asList("en", "ua")); setLanguageMapping(new HashMap<>()); }}; ProjectResponseObject response = new ProjectResponseObject() {{ setData(project); }}; when(httpClientMock.get(eq(getProjectUrl), any(), eq(ProjectResponseObject.class))) .thenReturn(response); CrowdinProjectInfo projectInfo = client.downloadProjectInfo(); assertTrue(projectInfo.isManagerAccess()); assertFalse(projectInfo.getInContextLanguage().isPresent()); verify(httpClientMock).get(eq(getProjectUrl), any(), eq(ProjectResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testDownloadProjectInfoManagerAccessWithInContext() { Project project = new ProjectSettings() {{ setId(projectId); setTargetLanguageIds(Arrays.asList("uk", "ua")); setLanguageMapping(new HashMap<>()); setInContext(true); setInContextPseudoLanguage(LanguageBuilder.ENG.build()); }}; ProjectResponseObject response = new ProjectResponseObject() {{ setData(project); }}; when(httpClientMock.get(eq(getProjectUrl), any(), eq(ProjectResponseObject.class))) .thenReturn(response); CrowdinProjectInfo projectInfo = client.downloadProjectInfo(); assertTrue(projectInfo.getInContextLanguage().isPresent()); verify(httpClientMock).get(eq(getProjectUrl), any(), eq(ProjectResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testDownloadProjectInfoTranslatorAccess() { Project project = new Project() {{ setId(projectId); setTargetLanguageIds(Arrays.asList("uk", "ua")); }}; ProjectResponseObject response = new ProjectResponseObject() {{ setData(project); }}; when(httpClientMock.get(eq(getProjectUrl), any(), eq(ProjectResponseObject.class))) .thenReturn(response); CrowdinProjectInfo projectInfo = client.downloadProjectInfo(); assertFalse(projectInfo.isManagerAccess()); verify(httpClientMock).get(eq(getProjectUrl), any(), eq(ProjectResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testGetProjectProgress() { LanguageProgressResponseList response = new LanguageProgressResponseList() {{ setData(new ArrayList<>()); }}; when(httpClientMock.get(eq(getProjectProgressUrl), any(), eq(LanguageProgressResponseList.class))) .thenReturn(response); client.getProjectProgress(languageId); verify(httpClientMock).get(eq(getProjectProgressUrl), any(), eq(LanguageProgressResponseList.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testAddBranch() { AddBranchRequest request = new AddBranchRequest(); BranchResponseObject response = new BranchResponseObject() {{ setData(new Branch()); }}; when(httpClientMock.post(eq(addBranchUrl), any(), any(), eq(BranchResponseObject.class))) .thenReturn(response); client.addBranch(request); verify(httpClientMock).post(eq(addBranchUrl), any(), any(), eq(BranchResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testUploadStorage() throws IOException { InputStream requestData = IOUtils.toInputStream("Something to send", "UTF-8"); StorageResponseObject response = new StorageResponseObject() {{ setData(new Storage()); }}; when(httpClientMock.post(eq(uploadStorageUrl), any(), any(), eq(StorageResponseObject.class))) .thenReturn(response); client.uploadStorage("filename", requestData); verify(httpClientMock).post(eq(uploadStorageUrl), any(), any(), eq(StorageResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testAddDirectory() throws ResponseException { AddDirectoryRequest request = new AddDirectoryRequest(); DirectoryResponseObject response = new DirectoryResponseObject() {{ setData(new Directory()); }}; when(httpClientMock.post(eq(addDirectoryUrl), any(), any(), eq(DirectoryResponseObject.class))) .thenReturn(response); client.addDirectory(request); verify(httpClientMock).post(eq(addDirectoryUrl), any(), any(), eq(DirectoryResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testAddDirectoryThrows() throws ResponseException { AddDirectoryRequest request = new AddDirectoryRequest(); when(httpClientMock.post(eq(addDirectoryUrl), any(), any(), eq(DirectoryResponseObject.class))) .thenThrow(HttpExceptionBuilder.build("unknown", "problem")); assertThrows(RuntimeException.class, () -> client.addDirectory(request)); verify(httpClientMock).post(eq(addDirectoryUrl), any(), any(), eq(DirectoryResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testUpdateSource() throws ResponseException { FileResponseObject response = new FileResponseObject() {{ setData(new File()); }}; UpdateFileRequest request = new UpdateFileRequest(); request.setStorageId(100L); when(httpClientMock.put(eq(updateSourceUrl), any(), any(), eq(FileResponseObject.class))) .thenThrow(HttpExceptionBuilder.build("-", "File from storage with id #" + request.getStorageId() + " was not found")) .thenReturn(response); client.updateSource(fileId, request); verify(httpClientMock, times(2)).put(eq(updateSourceUrl), any(), any(), eq(FileResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testAddSource() throws ResponseException { FileResponseObject response = new FileResponseObject() {{ setData(new File()); }}; AddFileRequest request = new AddFileRequest(); request.setStorageId(100L); when(httpClientMock.post(eq(addSourceUrl), any(), any(), eq(FileResponseObject.class))) .thenThrow(HttpExceptionBuilder.build("-", "File from storage with id #" + request.getStorageId() + " was not found")) .thenReturn(response); client.addSource(request); verify(httpClientMock, times(2)).post(eq(addSourceUrl), any(), any(), eq(FileResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testUploadTranslations() throws ResponseException { UploadTranslationsResponseObject response = new UploadTranslationsResponseObject() {{ setData(new UploadTranslationsResponse()); }}; when(httpClientMock.post(eq(uploadTranslationsUrl), any(), any(), eq(UploadTranslationsResponseObject.class))) .thenReturn(response); UploadTranslationsRequest request = new UploadTranslationsRequest(); client.uploadTranslations(languageId, request); verify(httpClientMock).post(eq(uploadTranslationsUrl), any(), any(), eq(UploadTranslationsResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testUploadTranslationsWithRepeat() throws ResponseException { UploadTranslationsResponseObject response = new UploadTranslationsResponseObject() {{ setData(new UploadTranslationsResponse()); }}; UploadTranslationsRequest request = new UploadTranslationsRequest(); request.setStorageId(100L); when(httpClientMock.post(eq(uploadTranslationsUrl), any(), any(), eq(UploadTranslationsResponseObject.class))) .thenThrow(HttpExceptionBuilder.build("-", "File from storage with id #" + request.getStorageId() + " was not found")) .thenReturn(response); client.uploadTranslations(languageId, request); verify(httpClientMock, times(2)).post(eq(uploadTranslationsUrl), any(), any(), eq(UploadTranslationsResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testStartBuildingTranslation() { ProjectBuildResponseObject response = new ProjectBuildResponseObject() {{ setData(new ProjectBuild()); }}; when(httpClientMock.post(eq(startBuildingTranslationsUrl), any(), any(), eq(ProjectBuildResponseObject.class))) .thenReturn(response); BuildProjectTranslationRequest request = new BuildProjectTranslationRequest() { }; client.startBuildingTranslation(request); verify(httpClientMock).post(eq(startBuildingTranslationsUrl), any(), any(), eq(ProjectBuildResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testCheckBuildingTranslation() { ProjectBuildResponseObject response = new ProjectBuildResponseObject() {{ setData(new ProjectBuild()); }}; when(httpClientMock.get(eq(checkBuildingTranslationUrl), any(), eq(ProjectBuildResponseObject.class))) .thenReturn(response); client.checkBuildingTranslation(this.buildId); verify(httpClientMock).get(eq(checkBuildingTranslationUrl), any(), eq(ProjectBuildResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testDownloadBuild() { DownloadLinkResponseObject response = new DownloadLinkResponseObject() {{ setData(new DownloadLink() {{ setUrl(downloadUrl); }} ); }}; when(httpClientMock.get(eq(downloadBuildUrl), any(), eq(DownloadLinkResponseObject.class))) .thenReturn(response); client.downloadBuild(this.buildId); verify(httpClientMock).get(eq(downloadBuildUrl), any(), eq(DownloadLinkResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testDownloadBuildMalformed() { DownloadLinkResponseObject response = new DownloadLinkResponseObject() {{ setData(new DownloadLink() {{ setUrl(downloadUrlMalformed); }} ); }}; when(httpClientMock.get(eq(downloadBuildUrl), any(), eq(DownloadLinkResponseObject.class))) .thenReturn(response); assertThrows(RuntimeException.class, () -> client.downloadBuild(this.buildId)); verify(httpClientMock).get(eq(downloadBuildUrl), any(), eq(DownloadLinkResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testAddSourceString() { SourceStringResponseObject response = new SourceStringResponseObject() {{ setData(new SourceString()); }}; when(httpClientMock.post(eq(addSourceStringUrl), any(), any(), eq(SourceStringResponseObject.class))) .thenReturn(response); AddSourceStringRequest request = new AddSourceStringRequest(); client.addSourceString(request); verify(httpClientMock).post(eq(addSourceStringUrl), any(), any(), eq(SourceStringResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testListSourceString() { SourceStringResponseList response = new SourceStringResponseList() {{ setData(new ArrayList<>()); }}; when(httpClientMock.get(eq(listSourceStringUrl), any(), eq(SourceStringResponseList.class))) .thenReturn(response); client.listSourceString(fileId, null, null, "FiLtEr", null); verify(httpClientMock).get(eq(listSourceStringUrl), any(), eq(SourceStringResponseList.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testDeleteSourceString() { client.deleteSourceString(stringId); verify(httpClientMock).delete(eq(deleteSourceStringUrl), any(), eq(Void.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testEditSourceString() { List<PatchRequest> request = new ArrayList<>(); SourceStringResponseObject response = new SourceStringResponseObject() {{ setData(new SourceString()); }}; when(httpClientMock.patch(eq(editSourceStringUrl), eq(request), any(), eq(SourceStringResponseObject.class))) .thenReturn(response); client.editSourceString(stringId, request); verify(httpClientMock).patch(eq(editSourceStringUrl), eq(request), any(), eq(SourceStringResponseObject.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testHttpBadRequestException() { Mockito.doThrow(HttpExceptionBuilder.buildBadRequest("some-key", "987", "Some error")) .when(httpClientMock) .delete(eq(deleteSourceStringUrl), any(), eq(Void.class)); assertThrows(RuntimeException.class, () -> client.deleteSourceString(stringId)); verify(httpClientMock).delete(eq(deleteSourceStringUrl), any(), eq(Void.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testHttpExceptionUnknown() { Mockito.doThrow(HttpExceptionBuilder.build("unknown", "unknown exception")) .when(httpClientMock) .delete(eq(deleteSourceStringUrl), any(), eq(Void.class)); assertThrows(RuntimeException.class, () -> client.deleteSourceString(stringId)); verify(httpClientMock).delete(eq(deleteSourceStringUrl), any(), eq(Void.class)); verifyNoMoreInteractions(httpClientMock); } @Test public void testRuntimeException() { Mockito.doThrow(new RuntimeException("must be rethrown")) .when(httpClientMock) .delete(eq(deleteSourceStringUrl), any(), eq(Void.class)); assertThrows(RuntimeException.class, () -> client.deleteSourceString(stringId)); verify(httpClientMock).delete(eq(deleteSourceStringUrl), any(), eq(Void.class)); verifyNoMoreInteractions(httpClientMock); } }
package xdb.dom.impl; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.xpath.XPathExpressionException; import net.sf.saxon.om.DocumentInfo; import net.sf.saxon.om.NodeInfo; import net.sf.saxon.type.Type; import org.w3c.dom.Attr; import org.w3c.dom.CDATASection; import org.w3c.dom.Comment; import org.w3c.dom.DOMConfiguration; import org.w3c.dom.DOMException; import org.w3c.dom.DOMImplementation; import org.w3c.dom.Document; import org.w3c.dom.DocumentFragment; import org.w3c.dom.DocumentType; import org.w3c.dom.Element; import org.w3c.dom.EntityReference; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.ProcessingInstruction; import org.w3c.dom.Text; import xdb.config.ModelConfigManager; import xdb.config.ModelConfigManager.Key; import xdb.dom.CustomFunctions; import xdb.dom.XPathEngine; /** * Implementation of Document class. * * @author ps142237 */ public class DocumentImpl extends NodeImpl implements Document, DocumentInfo { public static final DOMImplementationImpl IMPLEMENTATION = new DOMImplementationImpl(); private NodeList childNodes = NodeListImpl.EMPTY_LIST; private Map<String, String> nodeNames; private Map<String, Element> elementById = new HashMap<String, Element>(); private volatile Map<String, Map<String, List<Node>>> keyMap = Collections.emptyMap(); private final int documentNumber; /** * Constructor. * * @param position * Node position within document. */ public DocumentImpl(int position) { super(position); documentNumber = getConfiguration().getDocumentNumberAllocator().allocateDocumentNumber(); } @Override public Node adoptNode(Node source) throws DOMException { throw DOMImplementationImpl.notSupportedErr(); } @Override public Attr createAttribute(String name) throws DOMException { throw DOMImplementationImpl.notSupportedErr(); } @Override public Attr createAttributeNS(String namespaceURI, String qualifiedName) throws DOMException { throw DOMImplementationImpl.notSupportedErr(); } @Override public CDATASection createCDATASection(String data) throws DOMException { throw DOMImplementationImpl.notSupportedErr(); } @Override public Comment createComment(String data) { throw DOMImplementationImpl.notSupportedErr(); } @Override public DocumentFragment createDocumentFragment() { throw DOMImplementationImpl.notSupportedErr(); } @Override public Element createElement(String tagName) throws DOMException { throw DOMImplementationImpl.notSupportedErr(); } @Override public Element createElementNS(String namespaceURI, String qualifiedName) throws DOMException { throw DOMImplementationImpl.notSupportedErr(); } @Override public EntityReference createEntityReference(String name) throws DOMException { throw DOMImplementationImpl.notSupportedErr(); } @Override public ProcessingInstruction createProcessingInstruction(String target, String data) throws DOMException { throw DOMImplementationImpl.notSupportedErr(); } @Override public Text createTextNode(String data) { throw DOMImplementationImpl.notSupportedErr(); } @Override public DocumentType getDoctype() { return null; } @Override public Element getDocumentElement() { return (Element) this.getFirstChild(); } @Override public String getDocumentURI() { return null; } @Override public DOMConfiguration getDomConfig() { throw DOMImplementationImpl.notSupportedErr(); } @Override public Element getElementById(String id) { return elementById.get(id); } @Override public NodeList getElementsByTagName(String tagname) { String name = getCanonicalName(tagname); if (name == null) { return NodeListImpl.EMPTY_LIST; } List<Node> list = new LinkedList<Node>(); ElementBase.addByTagName(getChildNodes(), name, list); return NodeListImpl.toNodeList(list); } @Override public NodeList getElementsByTagNameNS(String namespaceURI, String localName) { return getElementsByTagName(localName); } @Override public DOMImplementation getImplementation() { return IMPLEMENTATION; } @Override public String getInputEncoding() { return null; } @Override public boolean getStrictErrorChecking() { return false; } @Override public String getXmlEncoding() { return null; } @Override public boolean getXmlStandalone() { return false; } @Override public String getXmlVersion() { return "1.0"; } @Override public Node importNode(Node importedNode, boolean deep) throws DOMException { if (!deep || !(importedNode instanceof ElementBase)) { throw DOMImplementationImpl.badImportErr(); } return importNode((ElementBase) importedNode, this); } private ElementBase importNode(ElementBase element, Node parent) throws DOMException { String name = getOrSetCanonicalName(element.getNodeName()); int position = element.getPosition(); ElementBase result = null; if (element instanceof TextElementImpl) { String text = element.getTextContent(); result = new TextElementImpl(position, name, parent, text); } else if (element instanceof ElementImpl) { result = new ElementImpl(position, name, parent); } NamedNodeMapImpl atts = (NamedNodeMapImpl) element.getAttributes(); String[][] rawAtts = atts.getRawAttributes(); String[][] newAtts = new String[rawAtts.length][2]; for (int i = 0; i < rawAtts.length; i++) { String[] att = rawAtts[i]; String attName = getOrSetCanonicalName(att[0]); String[] newAtt = { attName, att[1] }; newAtts[i] = newAtt; } result.setAttributes(new NamedNodeMapImpl(result, newAtts)); if (element instanceof ElementImpl) { ElementImpl resultImpl = (ElementImpl) result; NodeList children = element.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { ElementBase child = (ElementBase) children.item(i); child = importNode(child, result); resultImpl.addChild(child); } resultImpl.fixChildren(); } return result; } @Override public void normalizeDocument() { } @Override public Node renameNode(Node n, String namespaceURI, String qualifiedName) throws DOMException { throw DOMImplementationImpl.notSupportedErr(); } @Override public void setDocumentURI(String documentURI) { } @Override public void setStrictErrorChecking(boolean strictErrorChecking) { } @Override public void setXmlStandalone(boolean xmlStandalone) throws DOMException { } @Override public void setXmlVersion(String xmlVersion) throws DOMException { } @Override public short getNodeType() { return Node.DOCUMENT_NODE; } @Override public String getNodeValue() throws DOMException { return null; } @Override public String getNodeName() { return "#document"; } @Override public NodeList getChildNodes() { return this.childNodes; } @Override public boolean hasChildNodes() { return true; } @Override public Node getParentNode() { return null; } @Override public String getTextContent() throws DOMException { return null; } @Override public Node getPreviousSibling() { return null; } @Override public Node getNextSibling() { return null; } /** * Get the canonical name string for elements and attributes. This string is the same object used for the node name * of strings and elements in the document, suitable for comparison using "==" instead of "String.equals()". Using * the canonical name string instead of an otherwise equal string can significantly improve performance. * * TODO: Deprecate and replace with optimized logic based on Saxon name-pool. * * @param name * The name. * @return The canonical name string. */ public String getCanonicalName(String name) { return this.nodeNames.get(name); } private String getOrSetCanonicalName(String name) { String canonical = this.nodeNames.get(name); if (canonical == null) { this.nodeNames.put(name, name); canonical = name; } return canonical; } /** * Get elements by a key from the indexes. * * @param key * The key/index name. * @param value * The lookup value. * @return The list of elements. */ public List<Node> getElementsFromKey(String key, String value) { Map<String, List<Node>> index = keyMap.get(key); if (index == null) { return Collections.emptyList(); } List<Node> matches = index.get(value); if (matches == null) { return Collections.emptyList(); } return matches; } /** * Build the index for key lookups. * * @throws javax.xml.xpath.XPathExpressionException * For errors. */ public void buildIndex() throws XPathExpressionException { Element element = getDocumentElement(); CustomFunctions.setHashcodeMap(indexHashcodes(element)); Map<String, Map<String, List<Node>>> map = new HashMap<String, Map<String, List<Node>>>(); indexElement(element, map); keyMap = map; } private Map<String, String> indexHashcodes(Element root) { Map<String, String> hashcodeMap = new HashMap<String, String>(); Set<String> pageIds = new HashSet<String>(); NodeList words = root.getElementsByTagName("word"); // Initialize hashcodes for elements with page ids for (int i = 0; i < words.getLength(); i++) { Element word = (Element) words.item(i); String pageId = word.getAttribute("page-id"); if (pageId.length() > 0) { String key = CustomFunctions.hashKey(word.getAttribute("l"), word.getAttribute("v")); hashcodeMap.put(key, pageId); pageIds.add(pageId); } } // Initialize hashcodes for elements without page ids for (int i = 0; i < words.getLength(); i++) { Element word = (Element) words.item(i); if (word.getAttribute("page-id").length() == 0) { String key = CustomFunctions.hashKey(word.getAttribute("l"), word.getAttribute("v")); String pageId = CustomFunctions.hashValue(key); while (pageIds.contains(pageId)) { pageId = String.valueOf(Long.valueOf(pageId) + 1); } hashcodeMap.put(key, pageId); pageIds.add(pageId); } } return hashcodeMap; } void indexElement(Element element, Map<String, Map<String, List<Node>>> map) throws XPathExpressionException { String elementName = element.getNodeName(); List<Key> keys = ModelConfigManager.instance().getKeys(elementName); for (Key key : keys) { Map<String, List<Node>> index = map.get(key.getName()); if (index == null) { index = new HashMap<String, List<Node>>(); map.put(key.getName(), index); } if (key.getMatch().equals(elementName)) { updateIndex(index, element, key, false); } } if (element instanceof ElementImpl) { NodeList children = element.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { Element child = (Element) children.item(i); indexElement(child, map); } } } private void updateIndex(Map<String, List<Node>> index, Element element, Key key, boolean sort) throws XPathExpressionException { if ("string".equals(key.getType())) { String id = XPathEngine.stringQuery(element, key.getXpath()); if (id != null && id.length() > 0) { putInIndex(index, element, id, sort); } } else if ("string-list".equals(key.getType())) { String ids = XPathEngine.stringQuery(element, key.getXpath()).trim(); if (ids != null && ids.length() > 0) { String[] idList = ids.split(" "); for (String id : idList) { putInIndex(index, element, id, sort); } } } else { List<Node> nodes = XPathEngine.query(element, key.getXpath()); for (Node node : nodes) { String id = node.getTextContent(); if (id != null && id.length() > 0) { putInIndex(index, element, id, sort); } } } } private void putInIndex(Map<String, List<Node>> index, Element element, String id, boolean sort) { List<Node> list = index.get(id); if (list == null) { list = new LinkedList<Node>(); index.put(id, list); } list.add(element); if (sort) { Collections.sort(list, NODE_COMPARATOR); } } /** * Recalculate node positions in document order. Should be called after incorporating new nodes into the DOM, but * before re-indexing. */ public void reposition() { reposition((ElementBase) getDocumentElement(), 1); } void reposition(ElementBase element, int position) { element.setPosition(position++); if (element instanceof ElementImpl) { position += element.getAttributes().getLength() * 2; NodeList children = element.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { reposition((ElementBase) children.item(i), position); } } else { position++; // For text node } } /** * Reindex a given element. * * @param element * The element. * @throws XPathExpressionException * For errors. */ public void reindex(Element element) throws XPathExpressionException { this.elementById.put(element.getAttribute("id"), element); String elementName = element.getNodeName(); List<Key> keys = ModelConfigManager.instance().getKeys(elementName); for (Key key : keys) { Map<String, List<Node>> index = keyMap.get(key.getName()); if (index == null) { index = new HashMap<String, List<Node>>(); keyMap.put(key.getName(), index); } if (key.getMatch().equals(elementName)) { updateIndex(index, element, key, true); } } } /** * Remove the given elements from the index. * * @param elements * The elements. */ public void unindex(Element[] elements) { for (Map<String, List<Node>> index : keyMap.values()) { for (List<Node> list : index.values()) { Iterator<Node> i = list.iterator(); while (i.hasNext()) { Element element = (Element) i.next(); for (Element unindex : elements) { if (element == unindex) { i.remove(); } } } } } } @Override void addChild(Node child) { if (!(childNodes instanceof NodeListMutable)) { childNodes = new NodeListMutable(); } ((NodeListMutable) childNodes).add(child); } @Override void fixChildren() { if (childNodes instanceof NodeListMutable) { childNodes = ((NodeListMutable) childNodes).fixed(); } } void setNodeNames(Set<String> nodeNames) { this.nodeNames = new HashMap<String, String>(nodeNames.size()); for (String name : nodeNames) { this.nodeNames.put(name, name); } } void setElementById(Map<String, Element> elementById) { this.elementById = elementById; } private static final Comparator<Node> NODE_COMPARATOR = new Comparator<Node>() { public int compare(Node o1, Node o2) { if (o1.isSameNode(o2)) { return 0; } else if ((o1.compareDocumentPosition(o2) & Node.DOCUMENT_POSITION_FOLLOWING) > 0) { return 1; } else { return -1; } } }; @Override public int getNodeKind() { return Type.DOCUMENT; } @Override public int getDocumentNumber() { return documentNumber; } @Override public NodeInfo selectID(String id) { return (NodeImpl) getElementById(id); } @Override public Iterator<?> getUnparsedEntityNames() { return Collections.EMPTY_LIST.iterator(); } @Override public String[] getUnparsedEntity(String name) { return null; } }
/* ************************************************************************ LEBAH PORTAL FRAMEWORK Copyright (C) 2007 Shamsul Bahrin * ************************************************************************ */ /* ************************************************************************ LEBAH PORTAL FRAMEWORK Copyright (C) 2007 Shamsul Bahrin * ************************************************************************ */ package lebah.app; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import javax.servlet.http.HttpSession; import lebah.portal.HtmlModuleData; import lebah.portal.db.CustomClass; import lebah.portal.db.RegisterModule; import org.apache.velocity.Template; /** * @author Shamsul Bahrin Abd Mutalib * @version 1.01 */ public class HtmlContentModule2 extends lebah.portal.velocity.VTemplate implements lebah.portal.HtmlContainer { private String strUrl = ""; private String moduleTitle = ""; //set the url public void setUrl(String strUrl) { this.strUrl = strUrl; } public Template doTemplate() throws Exception { HttpSession session = request.getSession(); String userId = (String) session.getAttribute("_portal_login"); context.put("userId", userId); String isLogin = (String) session.getAttribute("_portal_islogin"); String moduleId = getId(); context.put("isLogin", "true".equals(isLogin) ? Boolean.TRUE : Boolean.FALSE); context.put("formname", getId().replace(" ", "")); String submit = getParam("command"); String formname = getParam(getId()); if ( !formname.equals(getId()) ) submit = ""; if ( "changeProperties".equals(submit)) { strUrl = getParam("url"); moduleTitle = getParam("moduleTitle"); RegisterModule.updateHtmlLocation(moduleId, strUrl); } else if ( "deleteModule".equals(submit)) { String tab_id = (String) session.getAttribute("_tab_id"); RegisterModule.deleteUserModule(tab_id, moduleId, userId); } context.put("url", strUrl); //String moduleTitle = CustomClass.getCustomTitle(getId()); //context.put("moduleTitle", moduleTitle); try { doJob(session); } catch ( Exception e ) { context.put("stringbuffer", e.getMessage()); } Template template = engine.getTemplate("vtl/url/urlcontent.vm"); //realease for gc context.put("stringbuffer", null); return template; } private void doJob(HttpSession session) throws Exception { String http = request.getRequestURL().toString().substring(0, request.getRequestURL().toString().indexOf("://") + 3); URL url = null; try { if (strUrl.indexOf(http)== -1 ) { String hostURL = (String) session.getAttribute("_portal_server"); String appName = (String) session.getAttribute("_portal_appname"); if ( strUrl.charAt(0) == '/' ) { strUrl = http + hostURL + strUrl; } else { strUrl = http + hostURL + "/" + appName + "/" + strUrl; } } url = new URL(strUrl); //---strUrl must be HTML file, (.htm or .html) and must not contains <FRAMSET if ( strUrl.length() < 8 ) throw new Exception("Invalid URL"); //take the last 3 chars or the last 4 chars of the strUrl String last4 = strUrl.substring(strUrl.length()-4); String last5 = strUrl.substring(strUrl.length()-5); /* if ( !last4.equals(".htm") && !last5.equals(".html") ) throw new Exception("The link must be to an html file!"); */ //in the url, look for last "/" int last_bslash = strUrl.lastIndexOf("/"); //and get this url String url2 = strUrl.substring(0, last_bslash); InputStream content = (InputStream) url.getContent(); StringBuffer buf = new StringBuffer(); int ch = 0, prevchar = 0; while ( (ch = content.read() ) != - 1) { buf.append((char) ch); } //remove <style> String str = buf.toString().toLowerCase(); if ( (str.indexOf("<style") > -1) && (str.indexOf("</style>") > -1) ) { String str1 = buf.substring(0, str.indexOf("<style")); String str2 = buf.substring(str.indexOf("</style>") + "</style>".length()); buf = new StringBuffer(str1).append(str2); } //look for <img src //replacetoFullURL(buf, "<img src", url2); //replacetoFullURL(buf, "<embed src", url2); replacetoFullURL(buf, "<a href", url2); //replacetoFullURL(buf, "<img src", url2); replacetoFullURL(buf, "src=", url2); context.put("stringbuffer", buf); } catch( MalformedURLException e1) { throw new Exception("MalformedURLException: " + e1.getMessage()); } catch ( IOException e2 ) { throw new Exception("IOException: " + e2.getMessage()); } } private void replacetoFullURL(StringBuffer contentBuffer, String str_tag, String url2) { String http = request.getRequestURL().toString().substring(0, request.getRequestURL().toString().indexOf("://") + 3); String str = contentBuffer.toString(); int pos_start = 0; while (true) { //infinite loop!! int tag_open = str.indexOf(str_tag, pos_start); if ( tag_open == -1 ) break; //get out of this loop when no more img src //start from here, look for ">" StringBuffer sb = new StringBuffer(); int cnt = 0; for ( cnt = tag_open; cnt < str.length(); cnt++ ) { sb.append(str.charAt(cnt)); if ( str.charAt(cnt) == '>' ) break; } int tag_close = cnt; //analyze //find first "=" str = sb.toString(); int first_eq = str.indexOf("="); //take left hand side of "=" and trim it String eq_left = str.substring(0, first_eq).trim(); //add target="_new" before the '>' in the eq_right, but only for http if ( "<a ".equals(eq_left.substring(0, "<a ".length()) ) ) { eq_left = "<a target=\"_new\" " + eq_left.substring("<a ".length()); } //take right hand side of "=" and trim it String eq_right = str.substring(first_eq + 1).trim(); //get the first "/" after http://, this url is root String url_root = url2; int first_bslash = url2.indexOf("/", http.length() + 1); if ( first_bslash > 0 ) url_root = url2.substring(0, first_bslash); if ( eq_right.length() > http.length()+1) { if ( eq_right.charAt(0) == '\"' ) { //must not start with http:// and mailto: if ( !http.equals(eq_right.substring(1, http.length()+1)) && !"mailto:".equals(eq_right.substring(1, "mailto:".length()+1))) { //eg: "/image/home.gif" if ( eq_right.startsWith("\"/") ) { eq_right = "\"" + url_root + eq_right.substring(1); } else { eq_right = "\"" + url2 + "/" + eq_right.substring(1); } } } else { if ( !http.equals(eq_right.substring(0, http.length()+1)) && !"mailto:".equals(eq_right.substring(0, "mailto:".length()+1))) { //eg: "/image/home.gif" if ( eq_right.startsWith("\"/") ) { eq_right = url_root + eq_right.substring(0); } else { eq_right = url2 + "/" + eq_right; } } } } //construct back the whole string str = eq_left + "=" + eq_right; pos_start = tag_open + str.length(); //now replace in the buf contentBuffer.replace(tag_open, tag_close+1, str); str = contentBuffer.toString(); } } void changeProperties(String userId, String id, String url, String moduleTitle) throws Exception { HtmlModuleData.update(userId, id, url, moduleTitle); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.collect.ImmutableList; import java.io.IOException; import java.util.List; import java.util.NoSuchElementException; import java.util.Objects; import org.apache.beam.sdk.coders.AvroCoder; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.DefaultCoder; import org.apache.beam.sdk.coders.VarLongCoder; import org.apache.beam.sdk.io.UnboundedSource.UnboundedReader; import org.apache.beam.sdk.metrics.Counter; import org.apache.beam.sdk.metrics.SourceMetrics; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.sdk.values.PCollection; import org.joda.time.Duration; import org.joda.time.Instant; /** * Most users should use {@link GenerateSequence} instead. * * <p>A source that produces longs. When used as a {@link BoundedSource}, {@link CountingSource} * starts at {@code 0} and counts up to a specified maximum. When used as an {@link * UnboundedSource}, it counts up to {@link Long#MAX_VALUE} and then never produces more output. (In * practice, this limit should never be reached.) * * <p>The bounded {@link CountingSource} is implemented based on {@link OffsetBasedSource} and * {@link OffsetBasedSource.OffsetBasedReader}, so it performs efficient initial splitting and it * supports dynamic work rebalancing. * * <p>To produce a bounded source, use {@link #createSourceForSubrange(long, long)}. To produce an * unbounded source, use {@link #createUnboundedFrom(long)}. */ public class CountingSource { /** * Creates a {@link BoundedSource} that will produce the specified number of elements, * from {@code 0} to {@code numElements - 1}. * * @deprecated use {@link GenerateSequence} instead */ @Deprecated public static BoundedSource<Long> upTo(long numElements) { checkArgument(numElements >= 0, "numElements (%s) must be greater than or equal to 0", numElements); return new BoundedCountingSource(0, numElements); } /** * Creates a {@link BoundedSource} that will produce elements * starting from {@code startIndex} (inclusive) to {@code endIndex} (exclusive). * If {@code startIndex == endIndex}, then no elements will be produced. */ static BoundedSource<Long> createSourceForSubrange(long startIndex, long endIndex) { checkArgument(endIndex >= startIndex, "endIndex (%s) must be greater than or equal to startIndex (%s)", endIndex, startIndex); return new BoundedCountingSource(startIndex, endIndex); } /** * Create a new {@link UnboundedCountingSource}. */ // package-private to return a typed UnboundedCountingSource rather than the UnboundedSource type. static UnboundedCountingSource createUnboundedFrom(long start) { return new UnboundedCountingSource(start, 1, 1L, Duration.ZERO, new NowTimestampFn()); } /** * Creates an {@link UnboundedSource} that will produce numbers starting from {@code 0} up to * {@link Long#MAX_VALUE}. * * <p>After {@link Long#MAX_VALUE}, the source never produces more output. (In practice, this * limit should never be reached.) * * <p>Elements in the resulting {@link PCollection PCollection&lt;Long&gt;} will have timestamps * corresponding to processing time at element generation, provided by {@link Instant#now}. * * @deprecated use {@link GenerateSequence} instead */ @Deprecated public static UnboundedSource<Long, CounterMark> unbounded() { return unboundedWithTimestampFn(new NowTimestampFn()); } /** * Creates an {@link UnboundedSource} that will produce numbers starting from {@code 0} up to * {@link Long#MAX_VALUE}, with element timestamps supplied by the specified function. * * <p>After {@link Long#MAX_VALUE}, the source never produces more output. (In practice, this * limit should never be reached.) * * <p>Note that the timestamps produced by {@code timestampFn} may not decrease. * * @deprecated use {@link GenerateSequence} and call * {@link GenerateSequence#withTimestampFn(SerializableFunction)} instead */ @Deprecated public static UnboundedSource<Long, CounterMark> unboundedWithTimestampFn( SerializableFunction<Long, Instant> timestampFn) { return new UnboundedCountingSource(0, 1, 1L, Duration.ZERO, timestampFn); } ///////////////////////////////////////////////////////////////////////////////////////////// /** Prevent instantiation. */ private CountingSource() {} /** * A function that returns {@link Instant#now} as the timestamp for each generated element. */ static class NowTimestampFn implements SerializableFunction<Long, Instant> { @Override public Instant apply(Long input) { return Instant.now(); } @Override public boolean equals(Object other) { return other instanceof NowTimestampFn; } @Override public int hashCode() { return getClass().hashCode(); } } /** * An implementation of {@link CountingSource} that produces a bounded {@link PCollection}. * It is implemented on top of {@link OffsetBasedSource} (with associated reader * {@link BoundedCountingReader}) and performs efficient initial splitting and supports dynamic * work rebalancing. */ private static class BoundedCountingSource extends OffsetBasedSource<Long> { /** * Creates a {@link BoundedCountingSource} that generates the numbers in the specified * {@code [start, end)} range. */ public BoundedCountingSource(long start, long end) { super(start, end, 1 /* can be split every 1 offset */); } //////////////////////////////////////////////////////////////////////////////////////////// @Override public long getBytesPerOffset() { return 8; } @Override public long getMaxEndOffset(PipelineOptions options) throws Exception { return getEndOffset(); } @Override public OffsetBasedSource<Long> createSourceForSubrange(long start, long end) { return new BoundedCountingSource(start, end); } @Override public org.apache.beam.sdk.io.BoundedSource.BoundedReader<Long> createReader( PipelineOptions options) throws IOException { return new BoundedCountingReader(this); } @Override public Coder<Long> getOutputCoder() { return VarLongCoder.of(); } @Override public boolean equals(Object other) { if (!(other instanceof BoundedCountingSource)) { return false; } BoundedCountingSource that = (BoundedCountingSource) other; return this.getStartOffset() == that.getStartOffset() && this.getEndOffset() == that.getEndOffset(); } @Override public int hashCode() { return Objects.hash(this.getStartOffset(), (int) this.getEndOffset()); } } /** * The reader associated with {@link BoundedCountingSource}. * * @see BoundedCountingSource */ private static class BoundedCountingReader extends OffsetBasedSource.OffsetBasedReader<Long> { private long current; private final Counter elementsRead = SourceMetrics.elementsRead(); public BoundedCountingReader(OffsetBasedSource<Long> source) { super(source); } @Override protected long getCurrentOffset() throws NoSuchElementException { return current; } @Override public synchronized long getSplitPointsRemaining() { return Math.max(0, getCurrentSource().getEndOffset() - current); } @Override public synchronized BoundedCountingSource getCurrentSource() { return (BoundedCountingSource) super.getCurrentSource(); } @Override public Long getCurrent() throws NoSuchElementException { return current; } @Override protected boolean startImpl() throws IOException { current = getCurrentSource().getStartOffset(); return true; } @Override protected boolean advanceImpl() throws IOException { elementsRead.inc(); current++; return true; } @Override public void close() throws IOException {} } /** * An implementation of {@link CountingSource} that produces an unbounded {@link PCollection}. */ static class UnboundedCountingSource extends UnboundedSource<Long, CounterMark> { /** The first number (>= 0) generated by this {@link UnboundedCountingSource}. */ private final long start; /** The interval between numbers generated by this {@link UnboundedCountingSource}. */ private final long stride; /** The number of elements to produce each period. */ private final long elementsPerPeriod; /** The time between producing numbers from this {@link UnboundedCountingSource}. */ private final Duration period; /** The function used to produce timestamps for the generated elements. */ private final SerializableFunction<Long, Instant> timestampFn; /** * Creates an {@link UnboundedSource} that will produce numbers starting from {@code 0} up to * {@link Long#MAX_VALUE}, with element timestamps supplied by the specified function. * * <p>After {@link Long#MAX_VALUE}, the source never produces more output. (In practice, this * limit should never be reached.) * * <p>Note that the timestamps produced by {@code timestampFn} may not decrease. */ private UnboundedCountingSource( long start, long stride, long elementsPerPeriod, Duration period, SerializableFunction<Long, Instant> timestampFn) { this.start = start; this.stride = stride; checkArgument( elementsPerPeriod > 0L, "Must produce at least one element per period, got %s", elementsPerPeriod); this.elementsPerPeriod = elementsPerPeriod; checkArgument( period.getMillis() >= 0L, "Must have a non-negative period length, got %s", period); this.period = period; this.timestampFn = timestampFn; } /** * Returns an {@link UnboundedCountingSource} like this one with the specified period. Elements * will be produced with an interval between them equal to the period. */ public UnboundedCountingSource withRate(long elementsPerPeriod, Duration period) { return new UnboundedCountingSource(start, stride, elementsPerPeriod, period, timestampFn); } /** * Returns an {@link UnboundedCountingSource} like this one where the timestamp of output * elements are supplied by the specified function. * * <p>Note that timestamps produced by {@code timestampFn} may not decrease. */ public UnboundedCountingSource withTimestampFn( SerializableFunction<Long, Instant> timestampFn) { checkNotNull(timestampFn); return new UnboundedCountingSource(start, stride, elementsPerPeriod, period, timestampFn); } /** * Splits an unbounded source {@code desiredNumSplits} ways by giving each split every * {@code desiredNumSplits}th element that this {@link UnboundedCountingSource} * produces. * * <p>E.g., if a source produces all even numbers {@code [0, 2, 4, 6, 8, ...)} and we want to * split into 3 new sources, then the new sources will produce numbers that are 6 apart and * are offset at the start by the original stride: {@code [0, 6, 12, ...)}, * {@code [2, 8, 14, ...)}, and {@code [4, 10, 16, ...)}. */ @Override public List<? extends UnboundedSource<Long, CountingSource.CounterMark>> split( int desiredNumSplits, PipelineOptions options) throws Exception { // Using Javadoc example, stride 2 with 3 splits becomes stride 6. long newStride = stride * desiredNumSplits; ImmutableList.Builder<UnboundedCountingSource> splits = ImmutableList.builder(); for (int i = 0; i < desiredNumSplits; ++i) { // Starts offset by the original stride. Using Javadoc example, this generates starts of // 0, 2, and 4. splits.add( new UnboundedCountingSource( start + i * stride, newStride, elementsPerPeriod, period, timestampFn)); } return splits.build(); } @Override public UnboundedReader<Long> createReader( PipelineOptions options, CounterMark checkpointMark) { return new UnboundedCountingReader(this, checkpointMark); } @Override public Coder<CountingSource.CounterMark> getCheckpointMarkCoder() { return AvroCoder.of(CountingSource.CounterMark.class); } @Override public void validate() {} @Override public Coder<Long> getOutputCoder() { return VarLongCoder.of(); } public boolean equals(Object other) { if (!(other instanceof UnboundedCountingSource)) { return false; } UnboundedCountingSource that = (UnboundedCountingSource) other; return this.start == that.start && this.stride == that.stride && this.elementsPerPeriod == that.elementsPerPeriod && Objects.equals(this.period, that.period) && Objects.equals(this.timestampFn, that.timestampFn); } public int hashCode() { return Objects.hash(start, stride, elementsPerPeriod, period, timestampFn); } } /** * The reader associated with {@link UnboundedCountingSource}. * * @see UnboundedCountingSource */ private static class UnboundedCountingReader extends UnboundedReader<Long> { private UnboundedCountingSource source; private long current; private Instant currentTimestamp; private Instant firstStarted; private final Counter elementsRead = SourceMetrics.elementsRead(); public UnboundedCountingReader(UnboundedCountingSource source, CounterMark mark) { this.source = source; if (mark == null) { // Because we have not emitted an element yet, and start() calls advance, we need to // "un-advance" so that start() produces the correct output. this.current = source.start - source.stride; } else { this.current = mark.getLastEmitted(); this.firstStarted = mark.getStartTime(); } } @Override public boolean start() throws IOException { if (firstStarted == null) { this.firstStarted = Instant.now(); } return advance(); } @Override public boolean advance() throws IOException { // Overflow-safe check that (current + source.stride) <= LONG.MAX_VALUE. Else, stop producing. if (Long.MAX_VALUE - source.stride < current) { return false; } long nextValue = current + source.stride; if (expectedValue() < nextValue) { return false; } elementsRead.inc(); current = nextValue; currentTimestamp = source.timestampFn.apply(current); return true; } private long expectedValue() { if (source.period.getMillis() == 0L) { return Long.MAX_VALUE; } double periodsElapsed = (Instant.now().getMillis() - firstStarted.getMillis()) / (double) source.period.getMillis(); return (long) (source.elementsPerPeriod * periodsElapsed); } @Override public Instant getWatermark() { return source.timestampFn.apply(current); } @Override public CounterMark getCheckpointMark() { return new CounterMark(current, firstStarted); } @Override public UnboundedSource<Long, CounterMark> getCurrentSource() { return source; } @Override public Long getCurrent() throws NoSuchElementException { return current; } @Override public Instant getCurrentTimestamp() throws NoSuchElementException { return currentTimestamp; } @Override public void close() throws IOException {} @Override public long getSplitBacklogBytes() { long expected = expectedValue(); return Math.max(0L, 8 * (expected - current) / source.stride); } } /** * The checkpoint for an unbounded {@link CountingSource} is simply the last value produced. The * associated source object encapsulates the information needed to produce the next value. */ @DefaultCoder(AvroCoder.class) public static class CounterMark implements UnboundedSource.CheckpointMark { /** The last value emitted. */ private final long lastEmitted; private final Instant startTime; /** * Creates a checkpoint mark reflecting the last emitted value. */ public CounterMark(long lastEmitted, Instant startTime) { this.lastEmitted = lastEmitted; this.startTime = startTime; } /** * Returns the last value emitted by the reader. */ public long getLastEmitted() { return lastEmitted; } /** * Returns the time the reader was started. */ public Instant getStartTime() { return startTime; } ///////////////////////////////////////////////////////////////////////////////////// @SuppressWarnings("unused") // For AvroCoder private CounterMark() { this.lastEmitted = 0L; this.startTime = Instant.now(); } @Override public void finalizeCheckpoint() throws IOException {} } }
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2004, 2005, 2006, 2007, 2008, 2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.assessment.facade; import static org.sakaiproject.tool.assessment.facade.ItemHashUtil.ALL_HASH_BACKFILLABLE_ITEM_IDS_HQL; import static org.sakaiproject.tool.assessment.facade.ItemHashUtil.ID_PARAMS_PLACEHOLDER; import static org.sakaiproject.tool.assessment.facade.ItemHashUtil.ITEMS_BY_ID_HQL; import static org.sakaiproject.tool.assessment.facade.ItemHashUtil.TOTAL_HASH_BACKFILLABLE_ITEM_COUNT_HQL; import static org.sakaiproject.tool.assessment.facade.ItemHashUtil.TOTAL_ITEM_COUNT_HQL; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.hibernate.Query; import org.sakaiproject.tool.assessment.data.dao.assessment.ItemAttachment; import org.sakaiproject.tool.assessment.data.dao.assessment.ItemData; import org.sakaiproject.tool.assessment.data.dao.assessment.ItemMetaData; import org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentIfc; import org.sakaiproject.tool.assessment.data.ifc.assessment.ItemDataIfc; import org.sakaiproject.tool.assessment.data.ifc.assessment.ItemMetaDataIfc; import org.sakaiproject.tool.assessment.data.ifc.assessment.SectionDataIfc; import org.sakaiproject.tool.assessment.integration.helper.ifc.TagServiceHelper; import org.sakaiproject.tool.assessment.osid.shared.impl.IdImpl; import org.sakaiproject.tool.assessment.services.assessment.AssessmentService; import org.springframework.dao.DataAccessException; import org.springframework.orm.hibernate4.HibernateCallback; import org.springframework.orm.hibernate4.support.HibernateDaoSupport; import lombok.Setter; import lombok.extern.slf4j.Slf4j; @Slf4j public class ItemFacadeQueries extends HibernateDaoSupport implements ItemFacadeQueriesAPI { @Setter private ItemHashUtil itemHashUtil; public IdImpl getItemId(String id){ return new IdImpl(id); } public IdImpl getItemId(Long id){ return new IdImpl(id); } public IdImpl getItemId(long id){ return new IdImpl(id); } public List list() { return getHibernateTemplate().find("from ItemData"); } public void show(Long itemId) { getHibernateTemplate().load(ItemData.class, itemId); } public ItemFacade getItem(Long itemId, String agent) { return getItem(itemId); } public void deleteItem(Long itemId, String agent) { ItemData item = getHibernateTemplate().get(ItemData.class, itemId); // get list of attachment in item if (item != null) { AssessmentService service = new AssessmentService(); List itemAttachmentList = service.getItemResourceIdList(item); service.deleteResources(itemAttachmentList); SectionDataIfc section = item.getSection(); // section might be null if you are deleting an item created inside a pool, that's not linked to any assessment. if (section != null) { section.getItemSet().remove(item); } getHibernateTemplate().delete(item); } } public void deleteItemContent(Long itemId, String agent) { ItemData item = getHibernateTemplate().get(ItemData.class, itemId); if (item != null) { item.getItemTextSet().clear(); item.getItemMetaDataSet().clear(); item.getItemFeedbackSet().clear(); getHibernateTemplate().merge(item); } } public void deleteItemMetaData(final Long itemId, final String label) { // delete metadata by label ItemData item = getHibernateTemplate().get(ItemData.class, itemId); List<ItemMetaDataIfc> itemmetadatalist = (List<ItemMetaDataIfc>) getHibernateTemplate() .findByNamedParam("from ItemMetaData imd where imd.item.itemId = :id and imd.label = :label", new String[] {"id", "label"}, new Object[] {itemId, label}); item.getItemMetaDataSet().removeAll(itemmetadatalist); getHibernateTemplate().merge(item); } public void addItemMetaData(Long itemId, String label, String value) { ItemData item = (ItemData) getHibernateTemplate().get(ItemData.class, itemId); if (item != null) { log.debug("**Id = {}, **score = {}, **grade = {}, **CorrectFeedback is lazy = {}, **Objective not lazy = {}", item.getItemId(), item.getScore(), item.getGrade(), item.getCorrectItemFeedback(), item.getItemMetaDataByLabel("ITEM_OBJECTIVE") ); item.getItemMetaDataSet().add(new ItemMetaData(item, label, value)); getHibernateTemplate().merge(item); } } public ItemFacade saveItem(ItemFacade item) throws DataFacadeException { List<ItemFacade> list = new ArrayList<>(1); list.add(item); list = saveItems(list); return list.isEmpty() ? null : list.get(0); } public void removeItemAttachment(Long itemAttachmentId) { ItemAttachment itemAttachment = getHibernateTemplate().load(ItemAttachment.class, itemAttachmentId); ItemDataIfc item = itemAttachment.getItem(); if (item != null) { item.getItemAttachmentSet().remove(itemAttachment); getHibernateTemplate().merge(item); } } public List<ItemFacade> saveItems(final List<ItemFacade> items) throws DataFacadeException { log.debug("Persist items: {}", items); try { for (ItemFacade item : items) { ItemDataIfc itemData = item.getData(); itemData.setLastModifiedDate(new Date()); itemData.setLastModifiedBy(AgentFacade.getAgentString()); itemData.setHash(itemHashUtil.hashItem(itemData)); itemData = getHibernateTemplate().merge(itemData); item.setData(itemData); item.setItemId(itemData.getItemId()); if (itemData.getSection() != null) { AssessmentIfc assessment = itemData.getSection().getAssessment(); assessment.setLastModifiedBy(AgentFacade.getAgentString()); assessment.setLastModifiedDate(new Date()); getHibernateTemplate().merge(assessment); } } return items; } catch (Exception e) { log.warn("Could not save items, {}", e.getMessage(), e); return Collections.emptyList(); } } private static final Map<String,String> BACKFILL_HASHES_HQL = new HashMap<String,String>() {{ this.put(TOTAL_ITEM_COUNT_HQL, "select count(*) from ItemData"); this.put(TOTAL_HASH_BACKFILLABLE_ITEM_COUNT_HQL, "select count(*) from ItemData as item where item.hash is null"); this.put(ALL_HASH_BACKFILLABLE_ITEM_IDS_HQL, "select item.id from ItemData as item where item.hash is null"); this.put(ITEMS_BY_ID_HQL, "select item from ItemData as item where item.id in (" + ID_PARAMS_PLACEHOLDER + ")"); }}; @Override public BackfillItemHashResult backfillItemHashes(int batchSize) { return itemHashUtil.backfillItemHashes( batchSize, BACKFILL_HASHES_HQL, ItemData.class, i -> { final String hash = itemHashUtil.hashItemUnchecked(i); i.setHash(hash); return i; }, getHibernateTemplate()); } public ItemFacade getItem(Long itemId) { ItemData item = null; try { item = (ItemData) getHibernateTemplate().load(ItemData.class, itemId); } catch (DataAccessException e) { log.warn("unable to retrieve item " + itemId + " due to:", e); return null; } return new ItemFacade(item); } public Boolean itemExists(Long itemId) { try { if (getHibernateTemplate().get(ItemData.class, itemId)==null){ return false; }else{ return true; } } catch (Exception e) { return false; } } public Map<String, ItemFacade> getItemsByHash(String hash) { List<ItemData> list1 = (List<ItemData>) getHibernateTemplate() .findByNamedParam("from ItemData where hash = :hash", "hash", hash); Map<String, ItemFacade> itemFacadeMap = new HashMap(); for (int i = 0; i < list1.size(); i++) { ItemData a = (ItemData) list1.get(i); ItemFacade f = new ItemFacade(a); itemFacadeMap.put(f.getItemIdString(),f); } return itemFacadeMap; } public Map<String, ItemFacade> getItemsByKeyword(final String keyword) { final HibernateCallback<List<ItemData>> hcb = session -> { Query q = session.createQuery("select ab from ItemData ab, ItemText itext where itext.item=ab and itext.text like :text"); q.setString("text", keyword); return q.list(); }; List<ItemData> list1 = getHibernateTemplate().execute(hcb); final HibernateCallback<List<ItemData>> hcb2 = session -> { Query q = session.createQuery("select distinct ab from ItemData ab, Answer answer where answer.item=ab and answer.text like :text"); q.setString("text", keyword); return q.list(); }; List<ItemData> list2 = getHibernateTemplate().execute(hcb2); final HibernateCallback<List<ItemData>> hcb3 = session -> { Query q = session.createQuery("select ab from ItemData ab, ItemMetaData md where md.item=ab and md.entry like :keyword and md.label = :label"); q.setString("keyword", keyword); q.setString("label", "KEYWORD"); return q.list(); }; List<ItemData> list3 = getHibernateTemplate().execute(hcb3); final HibernateCallback<List<ItemData>> hcb4 = session -> { Query q = session.createQuery("select ab from ItemData ab where ab.instruction like :keyword"); q.setString("keyword", keyword); return q.list(); }; List<ItemData> list4 = getHibernateTemplate().execute(hcb4); Map<String, ItemFacade> itemfacadeMap = new HashMap(); for (int i = 0; i < list1.size(); i++) { ItemData a = list1.get(i); ItemFacade f = new ItemFacade(a); itemfacadeMap.put(f.getItemIdString(),f); } for (int i = 0; i < list2.size(); i++) { ItemData a = list2.get(i); ItemFacade f = new ItemFacade(a); itemfacadeMap.put(f.getItemIdString(),f); } for (int i = 0; i < list3.size(); i++) { ItemData a = list3.get(i); ItemFacade f = new ItemFacade(a); itemfacadeMap.put(f.getItemIdString(),f); } for (int i = 0; i < list4.size(); i++) { ItemData a = list4.get(i); ItemFacade f = new ItemFacade(a); itemfacadeMap.put(f.getItemIdString(),f); } log.debug("Search for keyword, found: " + itemfacadeMap.size()); return itemfacadeMap; } /* * This API is for linear access to create a dummy record to indicate the student * has taken action on the item (question). Therefore, we just need one itemTextId * for recording - use the first one (index 0). */ public Long getItemTextId(final Long publishedItemId) { final HibernateCallback<List<Long>> hcb = session -> { Query q = session.createQuery("select i.id from PublishedItemText i where i.item.itemId = :id"); q.setLong("id", publishedItemId); return q.list(); }; List<Long> list = getHibernateTemplate().execute(hcb); log.debug("list.size() = " + list.size()); Long itemTextId = -1l; if (!list.isEmpty()) itemTextId = list.get(0); log.debug("itemTextId" + itemTextId); return itemTextId; } @Override public void updateItemTagBindingsHavingTag(TagServiceHelper.TagView tagView) { // TODO when we add item search indexing, this is going to have to change to // first read in all the affected item IDs so we can generate events for each // (similar to what we do in the tag service) getHibernateTemplate().bulkUpdate("update ItemTag it " + "set it.tagLabel = ?, it.tagCollectionId = ?, it.tagCollectionName = ? " + "where it.tagId = ?", tagView.tagLabel, tagView.tagCollectionId, tagView.tagCollectionName, tagView.tagId); } @Override public void deleteItemTagBindingsHavingTagId(String tagId) { // TODO when we add item search indexing, this is going to have to change to // first read in all the affected item IDs so we can generate events for each // (similar to what we do in the tag service) getHibernateTemplate().bulkUpdate("delete ItemTag it where it.tagId = ?", tagId); } @Override public void updateItemTagBindingsHavingTagCollection(TagServiceHelper.TagCollectionView tagCollectionView) { // TODO when we add item search indexing, this is going to have to change to // first read in all the affected item IDs so we can generate events for each // (similar to what we do in the tag service) getHibernateTemplate().bulkUpdate("update ItemTag it " + "set it.tagCollectionName = ? " + "where it.tagCollectionId = ?", tagCollectionView.tagCollectionName, tagCollectionView.tagCollectionId); } @Override public void deleteItemTagBindingsHavingTagCollectionId(String tagCollectionId) { // TODO when we add item search indexing, this is going to have to change to // first read in all the affected item IDs so we can generate events for each // (similar to what we do in the tag service) getHibernateTemplate().bulkUpdate("delete ItemTag it where it.tagCollectionId = ?", tagCollectionId); } @Override public List<Long> getItemsIdsByHash(String hash) { final HibernateCallback<List<Long>> hcb = session -> { Query q = session.createQuery("select ab.itemId from ItemData ab where ab.hash = ? "); q.setString(0, hash); return q.list(); }; List<Long> list1 = getHibernateTemplate().execute(hcb); return list1; } @Override public Long getAssessmentId(Long itemId) { final HibernateCallback<List<Long>> hcb = session -> { Query q = session.createQuery("select s.assessment.assessmentBaseId from SectionData s, ItemData i where s.id = i.section AND i.itemId = ?"); q.setLong(0, itemId); return q.list(); }; List<Long> list1 = getHibernateTemplate().execute(hcb); if (list1.isEmpty()){ return -1L; }else{ return (Long) list1.get(0); } } }
/* Copyright 2016 Goldman Sachs. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.gs.fw.common.mithra.test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.gs.fw.common.mithra.MithraManagerProvider; import com.gs.fw.common.mithra.MithraTransaction; import com.gs.fw.common.mithra.finder.Operation; import com.gs.fw.common.mithra.test.domain.*; import java.sql.Timestamp; import java.text.SimpleDateFormat; public class TestPeerToPeerMithraTestCase extends PeerToPeerMithraServerTestCase { public static final Logger logger = LoggerFactory.getLogger(TestPeerToPeerMithraTestCase.class); private static final SimpleDateFormat timestampFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); protected Class[] getRestrictedClassList() { return new Class[] { Order.class, OrderItem.class, ExchangeRate.class, Employee.class, Product.class, TinyBalance.class, Contract.class }; } public void testPeerToPeerInsert() throws Exception { int updateClassCount = OrderFinder.getMithraObjectPortal().getPerClassUpdateCountHolder().getUpdateCount(); int orderId = 999999; Operation op = OrderFinder.orderId().eq(orderId); Order order0 = OrderFinder.findOne(op); assertNull(order0); this.getRemoteSlaveVm().executeMethod("peerInsertOrder", new Class[]{int.class}, new Object[]{new Integer(orderId)}); waitForMessages(updateClassCount, OrderFinder.getMithraObjectPortal()); Order order1 = OrderFinder.findOne(op); assertNotNull(order1); } public void testBatchInsertNotification() throws Exception { int updateClassCount = OrderFinder.getMithraObjectPortal().getPerClassUpdateCountHolder().getUpdateCount(); Operation op = OrderFinder.orderId().greaterThanEquals(990000); OrderList list0 = new OrderList(op); assertEquals(0, list0.size()); this.getRemoteSlaveVm().executeMethod("peerInsertOrderList", new Class[]{int.class, int.class}, new Object[]{new Integer(997000), new Integer(1500)}); waitForMessages(updateClassCount, OrderFinder.getMithraObjectPortal()); OrderList list1 = new OrderList(op); assertEquals(1500, list1.size()); updateClassCount = OrderFinder.getMithraObjectPortal().getPerClassUpdateCountHolder().getUpdateCount(); this.getRemoteSlaveVm().executeMethod("peerInsertOrderList", new Class[]{int.class, int.class}, new Object[]{new Integer(998500), new Integer(1500)}); waitForMessages(updateClassCount, OrderFinder.getMithraObjectPortal()); OrderList list2 = new OrderList(op); assertEquals(3000, list2.size()); } public void testInsertNotificationWithCompoundPrimaryKey() throws Exception { int updateClassCount = ExchangeRateFinder.getMithraObjectPortal().getPerClassUpdateCountHolder().getUpdateCount(); Timestamp ts = new Timestamp(System.currentTimeMillis()); Operation op = ExchangeRateFinder.acmapCode().eq("A"); op = op.and(ExchangeRateFinder.source().eq(11)); op = op.and(ExchangeRateFinder.currency().eq("USD")); op = op.and(ExchangeRateFinder.date().eq(ts)); ExchangeRateList list0 = new ExchangeRateList(op); assertEquals(0, list0.size()); this.getRemoteSlaveVm().executeMethod("peerInsertExchangeRate", new Class[]{String.class, String.class, int.class, Timestamp.class, double.class}, new Object[]{"A", "USD", new Integer(11), ts, new Double(1.40)}); waitForMessages(updateClassCount, ExchangeRateFinder.getMithraObjectPortal()); ExchangeRateList list1 = new ExchangeRateList(op); assertEquals(1, list1.size()); } public void testUpdateNotificationWithCompoundPrimaryKey() throws Exception { int updateClassCount = ExchangeRateFinder.getMithraObjectPortal().getPerClassUpdateCountHolder().getUpdateCount(); Timestamp ts = new Timestamp(timestampFormat.parse("2004-09-30 18:30:00.0").getTime()); Operation op = ExchangeRateFinder.acmapCode().eq("A"); op = op.and(ExchangeRateFinder.source().eq(10)); op = op.and(ExchangeRateFinder.currency().eq("USD")); op = op.and(ExchangeRateFinder.date().eq(ts)); ExchangeRate exchangeRate0 = ExchangeRateFinder.findOne(op); assertNotNull(exchangeRate0); assertEquals(1.0, exchangeRate0.getExchangeRate(), 0.0); this.getRemoteSlaveVm().executeMethod("peerUpdateExchangeRate", new Class[]{String.class, String.class, int.class, Timestamp.class, double.class}, new Object[]{"A", "USD", new Integer(10), ts, new Double(1.40)}); waitForMessages(updateClassCount, ExchangeRateFinder.getMithraObjectPortal()); ExchangeRate exchangeRate1 = ExchangeRateFinder.findOne(op); assertNotNull(exchangeRate1); assertEquals(1.40, exchangeRate1.getExchangeRate(), 0.0); } public void testBatchInsertDatedObjects() throws Exception { int updateClassCount = TinyBalanceFinder.getMithraObjectPortal().getPerClassUpdateCountHolder().getUpdateCount(); Operation op = TinyBalanceFinder.acmapCode().eq("A"); op = op.and(TinyBalanceFinder.balanceId().greaterThan(1000)); op = op.and(TinyBalanceFinder.businessDate().eq(InfinityTimestamp.getParaInfinity())); op = op.and(TinyBalanceFinder.processingDate().eq(InfinityTimestamp.getParaInfinity())); TinyBalanceList list = new TinyBalanceList(op); assertEquals(0, list.size()); this.getRemoteSlaveVm().executeMethod("peerInsertTinyBalanceList", new Class[]{String.class, int.class, int.class}, new Object[]{"A", new Integer(1001), new Integer(1250)}); waitForMessages(updateClassCount, TinyBalanceFinder.getMithraObjectPortal()); TinyBalanceList list1 = new TinyBalanceList(op); assertEquals(1250, list1.size()); } public void testDatedObjectInsert() throws Exception { int updateClassCount = TinyBalanceFinder.getMithraObjectPortal().getPerClassUpdateCountHolder().getUpdateCount(); int balanceId = 1235; String sourceAttribute = "B"; Timestamp businessDate0 = new Timestamp(timestampFormat.parse("2005-12-05 12:00:00.0").getTime()); Operation op0 = TinyBalanceFinder.acmapCode().eq(sourceAttribute); op0 = op0.and(TinyBalanceFinder.balanceId().eq(balanceId)); op0 = op0.and(TinyBalanceFinder.businessDate().eq(businessDate0)); TinyBalanceList list0 = new TinyBalanceList(op0); assertEquals(0, list0.size()); this.getRemoteSlaveVm().executeMethod("serverInsertNewTinyBalance", new Class[]{String.class, int.class, Timestamp.class}, new Object[]{sourceAttribute, new Integer(balanceId), businessDate0}); waitForMessages(updateClassCount, TinyBalanceFinder.getMithraObjectPortal()); TinyBalanceList list1 = new TinyBalanceList(op0); assertEquals(1, list1.size()); } // class com.gs.fw.common.mithra.test.domain.TinyBalance // balanceId,quantity,businessDateFrom,businessDateTo,processingDateFrom,processingDateTo // 1234,100.00,"2005-12-01 18:30:00.0","9999-12-01 23:59:00.0","2005-12-01 19:30:00.0","2005-12-15 18:49:00.0" // 1234,100.00,"2005-12-01 18:30:00.0","2005-12-15 18:30:00.0","2005-12-15 18:49:00.0","9999-12-01 23:59:00.0" // 1234,200.00,"2005-12-15 18:30:00.0","9999-12-01 23:59:00.0","2005-12-15 18:30:00.0","9999-12-01 23:59:00.0" public void testDatedObjectUpdate() throws Exception { int updateClassCount = TinyBalanceFinder.getMithraObjectPortal().getPerClassUpdateCountHolder().getUpdateCount(); int balanceId = 1234; String sourceAttribute = "B"; Timestamp businessDate0 = new Timestamp(timestampFormat.parse("2005-12-10 12:00:00.0").getTime()); Operation op0 = TinyBalanceFinder.acmapCode().eq(sourceAttribute); op0 = op0.and(TinyBalanceFinder.balanceId().eq(balanceId)); op0 = op0.and(TinyBalanceFinder.businessDate().eq(businessDate0)); TinyBalanceList list0 = new TinyBalanceList(op0); assertEquals(1, list0.size()); TinyBalance balance0 = (TinyBalance)list0.get(0); assertEquals(1234, balance0.getBalanceId()); assertEquals(100.00, balance0.getQuantity(),0); this.getRemoteSlaveVm().executeMethod("serverUpdateTinyBalance", new Class[]{String.class, int.class, Timestamp.class, double.class}, new Object[]{sourceAttribute, new Integer(balanceId), businessDate0, new Double(150.00)}); waitForMessages(updateClassCount, TinyBalanceFinder.getMithraObjectPortal()); TinyBalanceList list1 = new TinyBalanceList(op0); assertEquals(1, list1.size()); TinyBalance balance1 = (TinyBalance)list0.get(0); assertEquals(1234, balance1.getBalanceId()); assertEquals(150.00, balance1.getQuantity(),0); } public void testDatedObjectIncrement() throws Exception { int updateClassCount = TinyBalanceFinder.getMithraObjectPortal().getPerClassUpdateCountHolder().getUpdateCount(); int balanceId = 1234; String sourceAttribute = "B"; Timestamp businessDate0 = new Timestamp(timestampFormat.parse("2005-12-10 12:00:00.0").getTime()); Operation op0 = TinyBalanceFinder.acmapCode().eq(sourceAttribute); op0 = op0.and(TinyBalanceFinder.balanceId().eq(balanceId)); op0 = op0.and(TinyBalanceFinder.businessDate().eq(businessDate0)); TinyBalanceList list0 = new TinyBalanceList(op0); assertEquals(1, list0.size()); TinyBalance balance0 = (TinyBalance)list0.get(0); assertEquals(1234, balance0.getBalanceId()); assertEquals(100.00, balance0.getQuantity(),0); this.getRemoteSlaveVm().executeMethod("serverIncrementTinyBalance", new Class[]{String.class, int.class, Timestamp.class, double.class}, new Object[]{sourceAttribute, new Integer(balanceId), businessDate0, new Double(150.00)}); waitForMessages(updateClassCount, TinyBalanceFinder.getMithraObjectPortal()); TinyBalanceList list1 = new TinyBalanceList(op0); assertEquals(1, list1.size()); TinyBalance balance1 = (TinyBalance)list0.get(0); assertEquals(1234, balance1.getBalanceId()); assertEquals(250.00, balance1.getQuantity(),0); } public void testDatedObjectAdjustments() throws Exception { //Insert initial balance for balance id 999 of 100.00 on 1/1/2005 int updateClassCount = TinyBalanceFinder.getMithraObjectPortal().getPerClassUpdateCountHolder().getUpdateCount(); Timestamp currentTime = new Timestamp(System.currentTimeMillis()); Timestamp businessDate0 = new Timestamp(timestampFormat.parse("2005-12-05 12:00:00.0").getTime()); Timestamp businessDate1 = new Timestamp(timestampFormat.parse("2005-12-12 12:00:00.0").getTime()); Timestamp businessDate2 = new Timestamp(timestampFormat.parse("2005-12-16 12:00:00.0").getTime()); String sourceAttribute = "B"; int balanceId = 1234; //Get the balance on 12/05/2005 for balance 1234 Operation operation0 = TinyBalanceFinder.acmapCode().eq(sourceAttribute) .and(TinyBalanceFinder.balanceId().eq(balanceId)) .and(TinyBalanceFinder.businessDate().eq(businessDate0)) .and(TinyBalanceFinder.processingDate().eq(InfinityTimestamp.getParaInfinity())); TinyBalanceList list0 = new TinyBalanceList(operation0); assertEquals(1, list0.size()); TinyBalance balance0 = (TinyBalance)list0.get(0); assertEquals(100.00, balance0.getQuantity(), 0); //Get the balance on 12/12/2005 for balance 1234 Operation operation1 = TinyBalanceFinder.acmapCode().eq(sourceAttribute) .and(TinyBalanceFinder.balanceId().eq(balanceId)) .and(TinyBalanceFinder.businessDate().eq(businessDate1)) .and(TinyBalanceFinder.processingDate().eq(InfinityTimestamp.getParaInfinity())); TinyBalanceList list1 = new TinyBalanceList(operation1); assertEquals(1, list1.size()); TinyBalance balance1 = (TinyBalance)list1.get(0); assertEquals(100.00, balance1.getQuantity(), 0); //Get the balance on 12/16/2005 for balance 1234 Operation operation2 = TinyBalanceFinder.acmapCode().eq(sourceAttribute) .and(TinyBalanceFinder.balanceId().eq(balanceId)) .and(TinyBalanceFinder.businessDate().eq(businessDate2)) .and(TinyBalanceFinder.processingDate().eq(InfinityTimestamp.getParaInfinity())); TinyBalanceList list2 = new TinyBalanceList(operation2); assertEquals(1, list2.size()); TinyBalance balance2 = (TinyBalance)list2.get(0); assertEquals(200.00, balance2.getQuantity(), 0); //Get the balance on currentTime as of 12/16/2005 for balance 1234 Operation operation3 = TinyBalanceFinder.acmapCode().eq(sourceAttribute) .and(TinyBalanceFinder.balanceId().eq(balanceId)) .and(TinyBalanceFinder.businessDate().eq(businessDate2)) .and(TinyBalanceFinder.processingDate().eq(currentTime)); TinyBalanceList list3 = new TinyBalanceList(operation3); assertEquals(1, list3.size()); TinyBalance balance3 = (TinyBalance)list3.get(0); assertEquals(200.00, balance3.getQuantity(), 0); //Get the current balance Operation operation4 = TinyBalanceFinder.acmapCode().eq(sourceAttribute) .and(TinyBalanceFinder.balanceId().eq(balanceId)) .and(TinyBalanceFinder.businessDate().eq(InfinityTimestamp.getParaInfinity())) .and(TinyBalanceFinder.processingDate().eq(InfinityTimestamp.getParaInfinity())); TinyBalanceList list4 = new TinyBalanceList(operation4); assertEquals(1, list4.size()); TinyBalance balance4 = (TinyBalance)list4.get(0); assertEquals(200.00, balance4.getQuantity(), 0); //Oooppsss We just found out a trade that was done on 12/10/2005 that increased the balance by 50 Timestamp businessDate3 = new Timestamp(timestampFormat.parse("2005-12-10 18:30:00.0").getTime()); this.getRemoteSlaveVm().executeMethod("serverIncrementTinyBalance", new Class[]{String.class, int.class, Timestamp.class, double.class}, new Object[]{sourceAttribute, new Integer(balanceId), businessDate3, new Double(50)}); waitForMessages(updateClassCount, TinyBalanceFinder.getMithraObjectPortal()); //Get the balance on 12/05/2005 for balance 1234 TinyBalanceList list0a = new TinyBalanceList(operation0); assertEquals(1, list0a.size()); TinyBalance balance0a = (TinyBalance)list0.get(0); assertEquals(100.00, balance0a.getQuantity(), 0); //Get the balance on 12/12/2005 for balance 1234 TinyBalanceList list1a = new TinyBalanceList(operation1); assertEquals(1, list1a.size()); TinyBalance balance1a = (TinyBalance)list1a.get(0); assertEquals(150.00, balance1a.getQuantity(), 0); //Get the balance on 12/16/2005 for balance 1234 TinyBalanceList list2a = new TinyBalanceList(operation2); assertEquals(1, list2a.size()); TinyBalance balance2a = (TinyBalance)list2a.get(0); assertEquals(250.00, balance2a.getQuantity(), 0); //Get the balance on currentTime as of 12/16/2005 for balance 1234 TinyBalanceList list3a = new TinyBalanceList(operation3); assertEquals(1, list3a.size()); TinyBalance balance3a = (TinyBalance)list3a.get(0); assertEquals(200.00, balance3a.getQuantity(), 0); //Get the current balance TinyBalanceList list4a = new TinyBalanceList(operation4); assertEquals(1, list4a.size()); TinyBalance balance4a= (TinyBalance)list4a.get(0); assertEquals(250.00, balance4a.getQuantity(), 0); } public void peerInsertOrder(int orderId) { Order order = new Order(); order.setNullablePrimitiveAttributesToNull(); order.setOrderId(orderId); order.insert(); logger.info("Peer Inserted Order with ID "+orderId); } public void peerIncrementPreviousTinyBalance(String sourceAttribute, int balanceId, Timestamp businessDate, double newQuantity) { MithraTransaction tx = MithraManagerProvider.getMithraManager().startOrContinueTransaction(); TinyBalance tinyBalance = TinyBalanceFinder.findOne(TinyBalanceFinder.acmapCode().eq(sourceAttribute).and(TinyBalanceFinder.balanceId().eq(balanceId).and(TinyBalanceFinder.businessDate().eq(businessDate).and(TinyBalanceFinder.processingDate().eq(InfinityTimestamp.getParaInfinity()))))); tinyBalance.incrementQuantity(newQuantity); tx.commit(); } public void peerInsertOrderList(int initialOrderId, int listSize) { OrderList list = new OrderList(); Order order = null; for (int i = 0; i < listSize; i++) { order = new Order(); order.setOrderId(initialOrderId+i); order.setDescription("TestOrder"); list.add(order); } list.insertAll(); } public void peerInsertNewTinyBalance(String sourceAttribute, int balanceId, Timestamp businessDate) { MithraTransaction tx = MithraManagerProvider.getMithraManager().startOrContinueTransaction(); TinyBalance balance = new TinyBalance(businessDate); balance.setAcmapCode(sourceAttribute); balance.setBalanceId(balanceId); balance.insert(); tx.commit(); } public void peerInsertTinyBalanceList(String sourceAttribute, int initialBalanceId, int listSize) { TinyBalanceList list = new TinyBalanceList(); for(int i = 0; i < listSize; i++) { TinyBalance balance = new TinyBalance(new Timestamp(System.currentTimeMillis())); balance.setAcmapCode(sourceAttribute); balance.setBalanceId(initialBalanceId+i); balance.setQuantity(100.00+i); list.add(balance); } list.insertAll(); } public void serverInsertNewTinyBalance(String sourceAttribute, int balanceId, Timestamp businessDate) { MithraTransaction tx = MithraManagerProvider.getMithraManager().startOrContinueTransaction(); TinyBalance balance = new TinyBalance(businessDate); balance.setAcmapCode(sourceAttribute); balance.setBalanceId(balanceId); balance.insert(); tx.commit(); } public void serverUpdateTinyBalance(String sourceAttribute, int balanceId, Timestamp businessDate, double newQuantity) { MithraTransaction tx = MithraManagerProvider.getMithraManager().startOrContinueTransaction(); TinyBalance tinyBalance = TinyBalanceFinder.findOne(TinyBalanceFinder.acmapCode().eq(sourceAttribute).and(TinyBalanceFinder.balanceId().eq(balanceId).and(TinyBalanceFinder.businessDate().eq(businessDate).and(TinyBalanceFinder.processingDate().eq(InfinityTimestamp.getParaInfinity()))))); tinyBalance.setQuantity(newQuantity); tx.commit(); } public void serverIncrementUntilTinyBalance(String sourceAttribute, int balanceId, Timestamp businessDate, double newQuantity, Timestamp untilTimestamp) { MithraTransaction tx = MithraManagerProvider.getMithraManager().startOrContinueTransaction(); TinyBalance tinyBalance = TinyBalanceFinder.findOne(TinyBalanceFinder.acmapCode().eq(sourceAttribute).and(TinyBalanceFinder.balanceId().eq(balanceId).and(TinyBalanceFinder.businessDate().eq(businessDate).and(TinyBalanceFinder.processingDate().eq(InfinityTimestamp.getParaInfinity()))))); tinyBalance.incrementQuantityUntil(newQuantity, untilTimestamp); tx.commit(); } public void serverIncrementTinyBalance(String sourceAttribute, int balanceId, Timestamp businessDate, double newQuantity) { MithraTransaction tx = MithraManagerProvider.getMithraManager().startOrContinueTransaction(); TinyBalance tinyBalance = TinyBalanceFinder.findOne(TinyBalanceFinder.acmapCode().eq(sourceAttribute).and(TinyBalanceFinder.balanceId().eq(balanceId).and(TinyBalanceFinder.businessDate().eq(businessDate).and(TinyBalanceFinder.processingDate().eq(InfinityTimestamp.getParaInfinity()))))); tinyBalance.incrementQuantity(newQuantity); tx.commit(); } public void peerInsertExchangeRate(String acmapCode, String currency, int source, Timestamp date, double rate) { ExchangeRate erate = new ExchangeRate(); erate.setAcmapCode(acmapCode); erate.setCurrency(currency); erate.setSource(source); erate.setDate(date); erate.setExchangeRate(rate); erate.insert(); } public void peerUpdateExchangeRate(String acmapCode, String currency, int source, Timestamp date, double rate) { Operation op = ExchangeRateFinder.acmapCode().eq(acmapCode); op = op.and(ExchangeRateFinder.currency().eq(currency)); op = op.and(ExchangeRateFinder.source().eq(source)); op = op.and(ExchangeRateFinder.date().eq(date)); ExchangeRate erate = ExchangeRateFinder.findOne(op); erate.setExchangeRate(rate); } }
/* * Copyright (c) 2014 Simon Robinson * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package qr.cloud.qrpedia; import qr.cloud.util.BackDetectorRelativeLayout; import qr.cloud.util.QRCloudDatabase; import qr.cloud.util.QRCloudProvider; import qr.cloud.util.QRCloudUtils; import qr.cloud.util.Typefaces; import android.app.AlertDialog; import android.content.ContentValues; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.os.Bundle; import android.text.Editable; import android.text.TextUtils; import android.text.TextWatcher; import android.view.inputmethod.InputMethodManager; import android.widget.EditText; import android.widget.Toast; import com.actionbarsherlock.app.ActionBar; import com.actionbarsherlock.app.SherlockFragmentActivity; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuItem; public class SavedTextEditorActivity extends SherlockFragmentActivity { EditText mMessageText; boolean initialMessageSet; int mMaxMessageLength; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_message_editor); // set up tabs and action bar hints ActionBar actionBar = getSupportActionBar(); actionBar.setDisplayShowTitleEnabled(true); actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setDisplayUseLogoEnabled(true); BackDetectorRelativeLayout.setSearchActivity(this); mMessageText = (EditText) findViewById(R.id.message_text); mMessageText.setTypeface(Typefaces.get(SavedTextEditorActivity.this, getString(R.string.default_font))); // load an initial message, if requested final Intent launchIntent = getIntent(); if (launchIntent != null) { String action = launchIntent.getAction(); String type = launchIntent.getType(); if (Intent.ACTION_SEND.equals(action) && "text/plain".equals(type)) { String sharedText = launchIntent.getStringExtra(Intent.EXTRA_TEXT); if (sharedText != null) { mMessageText.append(sharedText); // no need to check length - EditText does so for us initialMessageSet = true; } } else { String initialMessage = launchIntent.getStringExtra(QRCloudUtils.DATABASE_PROP_MESSAGE); if (initialMessage != null) { mMessageText.append(initialMessage); // no need to check length - EditText does so for us initialMessageSet = true; } } } // set up the message length in the title bar mMaxMessageLength = getResources().getInteger(R.integer.max_message_characters); actionBar.setTitle(getString(R.string.title_clipping_editor, (mMaxMessageLength - mMessageText.getText() .length()))); } @Override protected void onResume() { super.onResume(); if (mMessageText != null) { // we want to get notifications when the text is changed (but after adding existing text in onCreate) mMessageText.addTextChangedListener(mTextWatcher); } } @Override protected void onPause() { if (mMessageText != null) { // we don't want to get the notification that the text was removed from the window on pause or destroy mMessageText.removeTextChangedListener(mTextWatcher); } super.onPause(); } @Override public void onBackPressed() { hideKeyboard(); if (mMessageText.length() > 0 && mMessageText.getText().toString().trim().length() > 0) { AlertDialog.Builder builder = new AlertDialog.Builder(SavedTextEditorActivity.this); builder.setTitle(R.string.new_message); builder.setMessage(R.string.save_message); builder.setIcon(android.R.drawable.ic_dialog_alert); builder.setNegativeButton(R.string.btn_discard, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int whichButton) { finish(); } }); builder.setPositiveButton(R.string.btn_save, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int whichButton) { saveMessage(); } }); builder.show(); return; } super.onBackPressed(); } private void hideKeyboard() { if (mMessageText != null) { InputMethodManager inputMethodManager = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE); inputMethodManager.hideSoftInputFromWindow(mMessageText.getWindowToken(), 0); } } @Override public boolean onCreateOptionsMenu(Menu menu) { getSupportMenuInflater().inflate(R.menu.activity_clipping_editor, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: onBackPressed(); return true; case R.id.menu_save_clipping: saveMessage(); return true; default: return super.onOptionsItemSelected(item); } } private void saveMessage() { String messageText = mMessageText.getText().toString(); if (TextUtils.isEmpty(messageText)) { return; } if (messageText.trim().length() <= 0) { return; } mMessageText.setEnabled(false); // insert into the database ContentValues messageData = new ContentValues(); messageData.put(QRCloudDatabase.COL_MESSAGE, messageText); messageData.put(QRCloudDatabase.COL_DATE, System.currentTimeMillis()); getContentResolver().insert(QRCloudProvider.CONTENT_URI_MESSAGES, messageData); if (initialMessageSet) { // if we launched from an existing item make sure they know the message was saved Toast.makeText(SavedTextEditorActivity.this, R.string.item_added_to_clippings, Toast.LENGTH_SHORT).show(); } finish(); } private TextWatcher mTextWatcher = new TextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { getSupportActionBar().setTitle(getString(R.string.title_clipping_editor, (mMaxMessageLength - s.length()))); } @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void afterTextChanged(Editable s) { } }; }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.v2beta1; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Service for managing [Agents][google.cloud.dialogflow.v2beta1.Agent]. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/cloud/dialogflow/v2beta1/agent.proto") @io.grpc.stub.annotations.GrpcGenerated public final class AgentsGrpc { private AgentsGrpc() {} public static final String SERVICE_NAME = "google.cloud.dialogflow.v2beta1.Agents"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.GetAgentRequest, com.google.cloud.dialogflow.v2beta1.Agent> getGetAgentMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetAgent", requestType = com.google.cloud.dialogflow.v2beta1.GetAgentRequest.class, responseType = com.google.cloud.dialogflow.v2beta1.Agent.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.GetAgentRequest, com.google.cloud.dialogflow.v2beta1.Agent> getGetAgentMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.GetAgentRequest, com.google.cloud.dialogflow.v2beta1.Agent> getGetAgentMethod; if ((getGetAgentMethod = AgentsGrpc.getGetAgentMethod) == null) { synchronized (AgentsGrpc.class) { if ((getGetAgentMethod = AgentsGrpc.getGetAgentMethod) == null) { AgentsGrpc.getGetAgentMethod = getGetAgentMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.GetAgentRequest, com.google.cloud.dialogflow.v2beta1.Agent> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetAgent")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.GetAgentRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.Agent.getDefaultInstance())) .setSchemaDescriptor(new AgentsMethodDescriptorSupplier("GetAgent")) .build(); } } } return getGetAgentMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.SetAgentRequest, com.google.cloud.dialogflow.v2beta1.Agent> getSetAgentMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "SetAgent", requestType = com.google.cloud.dialogflow.v2beta1.SetAgentRequest.class, responseType = com.google.cloud.dialogflow.v2beta1.Agent.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.SetAgentRequest, com.google.cloud.dialogflow.v2beta1.Agent> getSetAgentMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.SetAgentRequest, com.google.cloud.dialogflow.v2beta1.Agent> getSetAgentMethod; if ((getSetAgentMethod = AgentsGrpc.getSetAgentMethod) == null) { synchronized (AgentsGrpc.class) { if ((getSetAgentMethod = AgentsGrpc.getSetAgentMethod) == null) { AgentsGrpc.getSetAgentMethod = getSetAgentMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.SetAgentRequest, com.google.cloud.dialogflow.v2beta1.Agent> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "SetAgent")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.SetAgentRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.Agent.getDefaultInstance())) .setSchemaDescriptor(new AgentsMethodDescriptorSupplier("SetAgent")) .build(); } } } return getSetAgentMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.DeleteAgentRequest, com.google.protobuf.Empty> getDeleteAgentMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeleteAgent", requestType = com.google.cloud.dialogflow.v2beta1.DeleteAgentRequest.class, responseType = com.google.protobuf.Empty.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.DeleteAgentRequest, com.google.protobuf.Empty> getDeleteAgentMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.DeleteAgentRequest, com.google.protobuf.Empty> getDeleteAgentMethod; if ((getDeleteAgentMethod = AgentsGrpc.getDeleteAgentMethod) == null) { synchronized (AgentsGrpc.class) { if ((getDeleteAgentMethod = AgentsGrpc.getDeleteAgentMethod) == null) { AgentsGrpc.getDeleteAgentMethod = getDeleteAgentMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.DeleteAgentRequest, com.google.protobuf.Empty> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteAgent")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.DeleteAgentRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.protobuf.Empty.getDefaultInstance())) .setSchemaDescriptor(new AgentsMethodDescriptorSupplier("DeleteAgent")) .build(); } } } return getDeleteAgentMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.SearchAgentsRequest, com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse> getSearchAgentsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "SearchAgents", requestType = com.google.cloud.dialogflow.v2beta1.SearchAgentsRequest.class, responseType = com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.SearchAgentsRequest, com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse> getSearchAgentsMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.SearchAgentsRequest, com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse> getSearchAgentsMethod; if ((getSearchAgentsMethod = AgentsGrpc.getSearchAgentsMethod) == null) { synchronized (AgentsGrpc.class) { if ((getSearchAgentsMethod = AgentsGrpc.getSearchAgentsMethod) == null) { AgentsGrpc.getSearchAgentsMethod = getSearchAgentsMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.SearchAgentsRequest, com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "SearchAgents")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.SearchAgentsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse .getDefaultInstance())) .setSchemaDescriptor(new AgentsMethodDescriptorSupplier("SearchAgents")) .build(); } } } return getSearchAgentsMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.TrainAgentRequest, com.google.longrunning.Operation> getTrainAgentMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "TrainAgent", requestType = com.google.cloud.dialogflow.v2beta1.TrainAgentRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.TrainAgentRequest, com.google.longrunning.Operation> getTrainAgentMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.TrainAgentRequest, com.google.longrunning.Operation> getTrainAgentMethod; if ((getTrainAgentMethod = AgentsGrpc.getTrainAgentMethod) == null) { synchronized (AgentsGrpc.class) { if ((getTrainAgentMethod = AgentsGrpc.getTrainAgentMethod) == null) { AgentsGrpc.getTrainAgentMethod = getTrainAgentMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.TrainAgentRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "TrainAgent")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.TrainAgentRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new AgentsMethodDescriptorSupplier("TrainAgent")) .build(); } } } return getTrainAgentMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.ExportAgentRequest, com.google.longrunning.Operation> getExportAgentMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ExportAgent", requestType = com.google.cloud.dialogflow.v2beta1.ExportAgentRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.ExportAgentRequest, com.google.longrunning.Operation> getExportAgentMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.ExportAgentRequest, com.google.longrunning.Operation> getExportAgentMethod; if ((getExportAgentMethod = AgentsGrpc.getExportAgentMethod) == null) { synchronized (AgentsGrpc.class) { if ((getExportAgentMethod = AgentsGrpc.getExportAgentMethod) == null) { AgentsGrpc.getExportAgentMethod = getExportAgentMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.ExportAgentRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ExportAgent")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.ExportAgentRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new AgentsMethodDescriptorSupplier("ExportAgent")) .build(); } } } return getExportAgentMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.ImportAgentRequest, com.google.longrunning.Operation> getImportAgentMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ImportAgent", requestType = com.google.cloud.dialogflow.v2beta1.ImportAgentRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.ImportAgentRequest, com.google.longrunning.Operation> getImportAgentMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.ImportAgentRequest, com.google.longrunning.Operation> getImportAgentMethod; if ((getImportAgentMethod = AgentsGrpc.getImportAgentMethod) == null) { synchronized (AgentsGrpc.class) { if ((getImportAgentMethod = AgentsGrpc.getImportAgentMethod) == null) { AgentsGrpc.getImportAgentMethod = getImportAgentMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.ImportAgentRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ImportAgent")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.ImportAgentRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new AgentsMethodDescriptorSupplier("ImportAgent")) .build(); } } } return getImportAgentMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest, com.google.longrunning.Operation> getRestoreAgentMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "RestoreAgent", requestType = com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest, com.google.longrunning.Operation> getRestoreAgentMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest, com.google.longrunning.Operation> getRestoreAgentMethod; if ((getRestoreAgentMethod = AgentsGrpc.getRestoreAgentMethod) == null) { synchronized (AgentsGrpc.class) { if ((getRestoreAgentMethod = AgentsGrpc.getRestoreAgentMethod) == null) { AgentsGrpc.getRestoreAgentMethod = getRestoreAgentMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "RestoreAgent")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new AgentsMethodDescriptorSupplier("RestoreAgent")) .build(); } } } return getRestoreAgentMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.GetValidationResultRequest, com.google.cloud.dialogflow.v2beta1.ValidationResult> getGetValidationResultMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetValidationResult", requestType = com.google.cloud.dialogflow.v2beta1.GetValidationResultRequest.class, responseType = com.google.cloud.dialogflow.v2beta1.ValidationResult.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.GetValidationResultRequest, com.google.cloud.dialogflow.v2beta1.ValidationResult> getGetValidationResultMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.GetValidationResultRequest, com.google.cloud.dialogflow.v2beta1.ValidationResult> getGetValidationResultMethod; if ((getGetValidationResultMethod = AgentsGrpc.getGetValidationResultMethod) == null) { synchronized (AgentsGrpc.class) { if ((getGetValidationResultMethod = AgentsGrpc.getGetValidationResultMethod) == null) { AgentsGrpc.getGetValidationResultMethod = getGetValidationResultMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.GetValidationResultRequest, com.google.cloud.dialogflow.v2beta1.ValidationResult> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName( generateFullMethodName(SERVICE_NAME, "GetValidationResult")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.GetValidationResultRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.ValidationResult .getDefaultInstance())) .setSchemaDescriptor( new AgentsMethodDescriptorSupplier("GetValidationResult")) .build(); } } } return getGetValidationResultMethod; } /** Creates a new async stub that supports all call types for the service */ public static AgentsStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<AgentsStub> factory = new io.grpc.stub.AbstractStub.StubFactory<AgentsStub>() { @java.lang.Override public AgentsStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new AgentsStub(channel, callOptions); } }; return AgentsStub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static AgentsBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<AgentsBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<AgentsBlockingStub>() { @java.lang.Override public AgentsBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new AgentsBlockingStub(channel, callOptions); } }; return AgentsBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static AgentsFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<AgentsFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<AgentsFutureStub>() { @java.lang.Override public AgentsFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new AgentsFutureStub(channel, callOptions); } }; return AgentsFutureStub.newStub(factory, channel); } /** * * * <pre> * Service for managing [Agents][google.cloud.dialogflow.v2beta1.Agent]. * </pre> */ public abstract static class AgentsImplBase implements io.grpc.BindableService { /** * * * <pre> * Retrieves the specified agent. * </pre> */ public void getAgent( com.google.cloud.dialogflow.v2beta1.GetAgentRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Agent> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetAgentMethod(), responseObserver); } /** * * * <pre> * Creates/updates the specified agent. * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public void setAgent( com.google.cloud.dialogflow.v2beta1.SetAgentRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Agent> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getSetAgentMethod(), responseObserver); } /** * * * <pre> * Deletes the specified agent. * </pre> */ public void deleteAgent( com.google.cloud.dialogflow.v2beta1.DeleteAgentRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDeleteAgentMethod(), responseObserver); } /** * * * <pre> * Returns the list of agents. * Since there is at most one conversational agent per project, this method is * useful primarily for listing all agents across projects the caller has * access to. One can achieve that with a wildcard project collection id "-". * Refer to [List * Sub-Collections](https://cloud.google.com/apis/design/design_patterns#list_sub-collections). * </pre> */ public void searchAgents( com.google.cloud.dialogflow.v2beta1.SearchAgentsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getSearchAgentsMethod(), responseObserver); } /** * * * <pre> * Trains the specified agent. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public void trainAgent( com.google.cloud.dialogflow.v2beta1.TrainAgentRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getTrainAgentMethod(), responseObserver); } /** * * * <pre> * Exports the specified agent to a ZIP file. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: [ExportAgentResponse][google.cloud.dialogflow.v2beta1.ExportAgentResponse] * </pre> */ public void exportAgent( com.google.cloud.dialogflow.v2beta1.ExportAgentRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getExportAgentMethod(), responseObserver); } /** * * * <pre> * Imports the specified agent from a ZIP file. * Uploads new intents and entity types without deleting the existing ones. * Intents and entity types with the same name are replaced with the new * versions from [ImportAgentRequest][google.cloud.dialogflow.v2beta1.ImportAgentRequest]. After the import, the imported draft * agent will be trained automatically (unless disabled in agent settings). * However, once the import is done, training may not be completed yet. Please * call [TrainAgent][google.cloud.dialogflow.v2beta1.Agents.TrainAgent] and wait for the operation it returns in order to train * explicitly. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * The operation only tracks when importing is complete, not when it is done * training. * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public void importAgent( com.google.cloud.dialogflow.v2beta1.ImportAgentRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getImportAgentMethod(), responseObserver); } /** * * * <pre> * Restores the specified agent from a ZIP file. * Replaces the current agent version with a new one. All the intents and * entity types in the older version are deleted. After the restore, the * restored draft agent will be trained automatically (unless disabled in * agent settings). However, once the restore is done, training may not be * completed yet. Please call [TrainAgent][google.cloud.dialogflow.v2beta1.Agents.TrainAgent] and wait for the operation it * returns in order to train explicitly. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * The operation only tracks when restoring is complete, not when it is done * training. * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public void restoreAgent( com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getRestoreAgentMethod(), responseObserver); } /** * * * <pre> * Gets agent validation result. Agent validation is performed during * training time and is updated automatically when training is completed. * </pre> */ public void getValidationResult( com.google.cloud.dialogflow.v2beta1.GetValidationResultRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.ValidationResult> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getGetValidationResultMethod(), responseObserver); } @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getGetAgentMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.GetAgentRequest, com.google.cloud.dialogflow.v2beta1.Agent>(this, METHODID_GET_AGENT))) .addMethod( getSetAgentMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.SetAgentRequest, com.google.cloud.dialogflow.v2beta1.Agent>(this, METHODID_SET_AGENT))) .addMethod( getDeleteAgentMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.DeleteAgentRequest, com.google.protobuf.Empty>(this, METHODID_DELETE_AGENT))) .addMethod( getSearchAgentsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.SearchAgentsRequest, com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse>( this, METHODID_SEARCH_AGENTS))) .addMethod( getTrainAgentMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.TrainAgentRequest, com.google.longrunning.Operation>(this, METHODID_TRAIN_AGENT))) .addMethod( getExportAgentMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.ExportAgentRequest, com.google.longrunning.Operation>(this, METHODID_EXPORT_AGENT))) .addMethod( getImportAgentMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.ImportAgentRequest, com.google.longrunning.Operation>(this, METHODID_IMPORT_AGENT))) .addMethod( getRestoreAgentMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest, com.google.longrunning.Operation>(this, METHODID_RESTORE_AGENT))) .addMethod( getGetValidationResultMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.GetValidationResultRequest, com.google.cloud.dialogflow.v2beta1.ValidationResult>( this, METHODID_GET_VALIDATION_RESULT))) .build(); } } /** * * * <pre> * Service for managing [Agents][google.cloud.dialogflow.v2beta1.Agent]. * </pre> */ public static final class AgentsStub extends io.grpc.stub.AbstractAsyncStub<AgentsStub> { private AgentsStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected AgentsStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new AgentsStub(channel, callOptions); } /** * * * <pre> * Retrieves the specified agent. * </pre> */ public void getAgent( com.google.cloud.dialogflow.v2beta1.GetAgentRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Agent> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetAgentMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Creates/updates the specified agent. * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public void setAgent( com.google.cloud.dialogflow.v2beta1.SetAgentRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Agent> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getSetAgentMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Deletes the specified agent. * </pre> */ public void deleteAgent( com.google.cloud.dialogflow.v2beta1.DeleteAgentRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeleteAgentMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Returns the list of agents. * Since there is at most one conversational agent per project, this method is * useful primarily for listing all agents across projects the caller has * access to. One can achieve that with a wildcard project collection id "-". * Refer to [List * Sub-Collections](https://cloud.google.com/apis/design/design_patterns#list_sub-collections). * </pre> */ public void searchAgents( com.google.cloud.dialogflow.v2beta1.SearchAgentsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getSearchAgentsMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Trains the specified agent. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public void trainAgent( com.google.cloud.dialogflow.v2beta1.TrainAgentRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getTrainAgentMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Exports the specified agent to a ZIP file. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: [ExportAgentResponse][google.cloud.dialogflow.v2beta1.ExportAgentResponse] * </pre> */ public void exportAgent( com.google.cloud.dialogflow.v2beta1.ExportAgentRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getExportAgentMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Imports the specified agent from a ZIP file. * Uploads new intents and entity types without deleting the existing ones. * Intents and entity types with the same name are replaced with the new * versions from [ImportAgentRequest][google.cloud.dialogflow.v2beta1.ImportAgentRequest]. After the import, the imported draft * agent will be trained automatically (unless disabled in agent settings). * However, once the import is done, training may not be completed yet. Please * call [TrainAgent][google.cloud.dialogflow.v2beta1.Agents.TrainAgent] and wait for the operation it returns in order to train * explicitly. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * The operation only tracks when importing is complete, not when it is done * training. * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public void importAgent( com.google.cloud.dialogflow.v2beta1.ImportAgentRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getImportAgentMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Restores the specified agent from a ZIP file. * Replaces the current agent version with a new one. All the intents and * entity types in the older version are deleted. After the restore, the * restored draft agent will be trained automatically (unless disabled in * agent settings). However, once the restore is done, training may not be * completed yet. Please call [TrainAgent][google.cloud.dialogflow.v2beta1.Agents.TrainAgent] and wait for the operation it * returns in order to train explicitly. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * The operation only tracks when restoring is complete, not when it is done * training. * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public void restoreAgent( com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getRestoreAgentMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Gets agent validation result. Agent validation is performed during * training time and is updated automatically when training is completed. * </pre> */ public void getValidationResult( com.google.cloud.dialogflow.v2beta1.GetValidationResultRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.ValidationResult> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetValidationResultMethod(), getCallOptions()), request, responseObserver); } } /** * * * <pre> * Service for managing [Agents][google.cloud.dialogflow.v2beta1.Agent]. * </pre> */ public static final class AgentsBlockingStub extends io.grpc.stub.AbstractBlockingStub<AgentsBlockingStub> { private AgentsBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected AgentsBlockingStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new AgentsBlockingStub(channel, callOptions); } /** * * * <pre> * Retrieves the specified agent. * </pre> */ public com.google.cloud.dialogflow.v2beta1.Agent getAgent( com.google.cloud.dialogflow.v2beta1.GetAgentRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetAgentMethod(), getCallOptions(), request); } /** * * * <pre> * Creates/updates the specified agent. * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public com.google.cloud.dialogflow.v2beta1.Agent setAgent( com.google.cloud.dialogflow.v2beta1.SetAgentRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getSetAgentMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes the specified agent. * </pre> */ public com.google.protobuf.Empty deleteAgent( com.google.cloud.dialogflow.v2beta1.DeleteAgentRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteAgentMethod(), getCallOptions(), request); } /** * * * <pre> * Returns the list of agents. * Since there is at most one conversational agent per project, this method is * useful primarily for listing all agents across projects the caller has * access to. One can achieve that with a wildcard project collection id "-". * Refer to [List * Sub-Collections](https://cloud.google.com/apis/design/design_patterns#list_sub-collections). * </pre> */ public com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse searchAgents( com.google.cloud.dialogflow.v2beta1.SearchAgentsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getSearchAgentsMethod(), getCallOptions(), request); } /** * * * <pre> * Trains the specified agent. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public com.google.longrunning.Operation trainAgent( com.google.cloud.dialogflow.v2beta1.TrainAgentRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getTrainAgentMethod(), getCallOptions(), request); } /** * * * <pre> * Exports the specified agent to a ZIP file. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: [ExportAgentResponse][google.cloud.dialogflow.v2beta1.ExportAgentResponse] * </pre> */ public com.google.longrunning.Operation exportAgent( com.google.cloud.dialogflow.v2beta1.ExportAgentRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getExportAgentMethod(), getCallOptions(), request); } /** * * * <pre> * Imports the specified agent from a ZIP file. * Uploads new intents and entity types without deleting the existing ones. * Intents and entity types with the same name are replaced with the new * versions from [ImportAgentRequest][google.cloud.dialogflow.v2beta1.ImportAgentRequest]. After the import, the imported draft * agent will be trained automatically (unless disabled in agent settings). * However, once the import is done, training may not be completed yet. Please * call [TrainAgent][google.cloud.dialogflow.v2beta1.Agents.TrainAgent] and wait for the operation it returns in order to train * explicitly. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * The operation only tracks when importing is complete, not when it is done * training. * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public com.google.longrunning.Operation importAgent( com.google.cloud.dialogflow.v2beta1.ImportAgentRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getImportAgentMethod(), getCallOptions(), request); } /** * * * <pre> * Restores the specified agent from a ZIP file. * Replaces the current agent version with a new one. All the intents and * entity types in the older version are deleted. After the restore, the * restored draft agent will be trained automatically (unless disabled in * agent settings). However, once the restore is done, training may not be * completed yet. Please call [TrainAgent][google.cloud.dialogflow.v2beta1.Agents.TrainAgent] and wait for the operation it * returns in order to train explicitly. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * The operation only tracks when restoring is complete, not when it is done * training. * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public com.google.longrunning.Operation restoreAgent( com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getRestoreAgentMethod(), getCallOptions(), request); } /** * * * <pre> * Gets agent validation result. Agent validation is performed during * training time and is updated automatically when training is completed. * </pre> */ public com.google.cloud.dialogflow.v2beta1.ValidationResult getValidationResult( com.google.cloud.dialogflow.v2beta1.GetValidationResultRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetValidationResultMethod(), getCallOptions(), request); } } /** * * * <pre> * Service for managing [Agents][google.cloud.dialogflow.v2beta1.Agent]. * </pre> */ public static final class AgentsFutureStub extends io.grpc.stub.AbstractFutureStub<AgentsFutureStub> { private AgentsFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected AgentsFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new AgentsFutureStub(channel, callOptions); } /** * * * <pre> * Retrieves the specified agent. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.v2beta1.Agent> getAgent(com.google.cloud.dialogflow.v2beta1.GetAgentRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetAgentMethod(), getCallOptions()), request); } /** * * * <pre> * Creates/updates the specified agent. * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.v2beta1.Agent> setAgent(com.google.cloud.dialogflow.v2beta1.SetAgentRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getSetAgentMethod(), getCallOptions()), request); } /** * * * <pre> * Deletes the specified agent. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteAgent(com.google.cloud.dialogflow.v2beta1.DeleteAgentRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeleteAgentMethod(), getCallOptions()), request); } /** * * * <pre> * Returns the list of agents. * Since there is at most one conversational agent per project, this method is * useful primarily for listing all agents across projects the caller has * access to. One can achieve that with a wildcard project collection id "-". * Refer to [List * Sub-Collections](https://cloud.google.com/apis/design/design_patterns#list_sub-collections). * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse> searchAgents(com.google.cloud.dialogflow.v2beta1.SearchAgentsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getSearchAgentsMethod(), getCallOptions()), request); } /** * * * <pre> * Trains the specified agent. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> trainAgent(com.google.cloud.dialogflow.v2beta1.TrainAgentRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getTrainAgentMethod(), getCallOptions()), request); } /** * * * <pre> * Exports the specified agent to a ZIP file. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: [ExportAgentResponse][google.cloud.dialogflow.v2beta1.ExportAgentResponse] * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> exportAgent(com.google.cloud.dialogflow.v2beta1.ExportAgentRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getExportAgentMethod(), getCallOptions()), request); } /** * * * <pre> * Imports the specified agent from a ZIP file. * Uploads new intents and entity types without deleting the existing ones. * Intents and entity types with the same name are replaced with the new * versions from [ImportAgentRequest][google.cloud.dialogflow.v2beta1.ImportAgentRequest]. After the import, the imported draft * agent will be trained automatically (unless disabled in agent settings). * However, once the import is done, training may not be completed yet. Please * call [TrainAgent][google.cloud.dialogflow.v2beta1.Agents.TrainAgent] and wait for the operation it returns in order to train * explicitly. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * The operation only tracks when importing is complete, not when it is done * training. * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> importAgent(com.google.cloud.dialogflow.v2beta1.ImportAgentRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getImportAgentMethod(), getCallOptions()), request); } /** * * * <pre> * Restores the specified agent from a ZIP file. * Replaces the current agent version with a new one. All the intents and * entity types in the older version are deleted. After the restore, the * restored draft agent will be trained automatically (unless disabled in * agent settings). However, once the restore is done, training may not be * completed yet. Please call [TrainAgent][google.cloud.dialogflow.v2beta1.Agents.TrainAgent] and wait for the operation it * returns in order to train explicitly. * This method is a [long-running * operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). * The returned `Operation` type has the following method-specific fields: * - `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * The operation only tracks when restoring is complete, not when it is done * training. * Note: You should always train an agent prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/es/docs/training). * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> restoreAgent(com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getRestoreAgentMethod(), getCallOptions()), request); } /** * * * <pre> * Gets agent validation result. Agent validation is performed during * training time and is updated automatically when training is completed. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.v2beta1.ValidationResult> getValidationResult( com.google.cloud.dialogflow.v2beta1.GetValidationResultRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetValidationResultMethod(), getCallOptions()), request); } } private static final int METHODID_GET_AGENT = 0; private static final int METHODID_SET_AGENT = 1; private static final int METHODID_DELETE_AGENT = 2; private static final int METHODID_SEARCH_AGENTS = 3; private static final int METHODID_TRAIN_AGENT = 4; private static final int METHODID_EXPORT_AGENT = 5; private static final int METHODID_IMPORT_AGENT = 6; private static final int METHODID_RESTORE_AGENT = 7; private static final int METHODID_GET_VALIDATION_RESULT = 8; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AgentsImplBase serviceImpl; private final int methodId; MethodHandlers(AgentsImplBase serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_GET_AGENT: serviceImpl.getAgent( (com.google.cloud.dialogflow.v2beta1.GetAgentRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Agent>) responseObserver); break; case METHODID_SET_AGENT: serviceImpl.setAgent( (com.google.cloud.dialogflow.v2beta1.SetAgentRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Agent>) responseObserver); break; case METHODID_DELETE_AGENT: serviceImpl.deleteAgent( (com.google.cloud.dialogflow.v2beta1.DeleteAgentRequest) request, (io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver); break; case METHODID_SEARCH_AGENTS: serviceImpl.searchAgents( (com.google.cloud.dialogflow.v2beta1.SearchAgentsRequest) request, (io.grpc.stub.StreamObserver< com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse>) responseObserver); break; case METHODID_TRAIN_AGENT: serviceImpl.trainAgent( (com.google.cloud.dialogflow.v2beta1.TrainAgentRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_EXPORT_AGENT: serviceImpl.exportAgent( (com.google.cloud.dialogflow.v2beta1.ExportAgentRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_IMPORT_AGENT: serviceImpl.importAgent( (com.google.cloud.dialogflow.v2beta1.ImportAgentRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_RESTORE_AGENT: serviceImpl.restoreAgent( (com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_GET_VALIDATION_RESULT: serviceImpl.getValidationResult( (com.google.cloud.dialogflow.v2beta1.GetValidationResultRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.ValidationResult>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } private abstract static class AgentsBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { AgentsBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.cloud.dialogflow.v2beta1.AgentProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("Agents"); } } private static final class AgentsFileDescriptorSupplier extends AgentsBaseDescriptorSupplier { AgentsFileDescriptorSupplier() {} } private static final class AgentsMethodDescriptorSupplier extends AgentsBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final String methodName; AgentsMethodDescriptorSupplier(String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (AgentsGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new AgentsFileDescriptorSupplier()) .addMethod(getGetAgentMethod()) .addMethod(getSetAgentMethod()) .addMethod(getDeleteAgentMethod()) .addMethod(getSearchAgentsMethod()) .addMethod(getTrainAgentMethod()) .addMethod(getExportAgentMethod()) .addMethod(getImportAgentMethod()) .addMethod(getRestoreAgentMethod()) .addMethod(getGetValidationResultMethod()) .build(); } } } return result; } }
/* * The MIT License (MIT) * * Copyright (c) 2015 Piasy * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.github.piasy.handywidgets.centertitlesidebuttonbar; import android.content.Context; import android.content.res.ColorStateList; import android.content.res.TypedArray; import android.os.Build; import android.support.annotation.ColorInt; import android.support.annotation.DrawableRes; import android.support.annotation.NonNull; import android.util.AttributeSet; import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.ImageButton; import android.widget.RelativeLayout; import android.widget.TextView; import com.github.piasy.handywidgets.clearableedittext.ClearableEditText; import com.github.piasy.handywidgets.clearableedittext.OnEditorActionDoneListener; import com.github.piasy.handywidgets.clearableedittext.OnTextChangedListener; import com.transitionseverywhere.Fade; import com.transitionseverywhere.Slide; import com.transitionseverywhere.TransitionManager; import com.transitionseverywhere.TransitionSet; import rx.Observable; /** * Created by Piasy{github.com/Piasy} on 15/8/27. */ public final class CenterTitleSideButtonBar extends RelativeLayout implements View.OnClickListener { private int mLayoutHeight = 44; private boolean mHasLeftButton = false; private int mLeftButtonId = -1; private boolean mLeftButtonShownDefault = true; private boolean mLeftButtonAsText = false; private String mLeftButtonText = "Left"; private ColorStateList mLeftButtonTextColor = null; private int mLeftButtonTextSize = 20; private @DrawableRes int mLeftButtonSrc = 0; private @DrawableRes int mLeftButtonBg = 0; private boolean mHasRightButton = false; private int mRightButtonId = -1; private boolean mRightButtonShownDefault = false; private boolean mRightButtonAsText = false; private String mRightButtonText = "Right"; private ColorStateList mRightButtonTextColor = null; private int mRightButtonTextSize = 20; private @DrawableRes int mRightButtonSrc = 0; private @DrawableRes int mRightButtonBg = 0; private int mCloseSearchViewId = -1; private boolean mRightButtonAsSearchView = false; private boolean mSearchViewDefaultShown = false; private @DrawableRes int mSearchViewBg = 0; private int mSearchViewHeight = ViewGroup.LayoutParams.MATCH_PARENT; private int mSearchViewMarginLeft = 0; private int mSearchViewMarginRight = 0; private String mCloseSearchViewText = ""; private ColorStateList mCloseSearchViewTextColor; private int mCloseSearchViewTextSize = 20; private boolean mHasTitle = true; private int mTitleId = -1; private String mTitle = ""; private @ColorInt int mTitleColor = 0xFF333333; private int mTitleSize = 20; private int mTitleGravity = 0; private boolean mHasDivider = false; private int mDividerId = -1; private int mDividerHeight = 2; private @ColorInt int mDividerColor = 0x19FFFFFF; private ImageButton mLeftImageButton = null; private Button mLeftButton = null; private ImageButton mRightImageButton = null; private Button mRightButton = null; private TextView mTitleTextView = null; private ClearableEditText mClearableEditText; private Button mCloseSearchButton; private View mDivider = null; private OnClickListener mLeftButtonClickListener; private OnClickListener mRightButtonClickListener; public CenterTitleSideButtonBar(@NonNull Context context) { this(context, null, 0); } public CenterTitleSideButtonBar(@NonNull Context context, AttributeSet attrs) { this(context, attrs, 0); } public CenterTitleSideButtonBar(@NonNull Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); getLayoutAttrs(context, attrs); initAttrs(context, attrs, defStyleAttr); initChild(context, attrs, defStyleAttr); } private void getLayoutAttrs(@NonNull Context context, AttributeSet attrs) { int[] systemAttrs = { android.R.attr.layout_height }; TypedArray a = context.obtainStyledAttributes(attrs, systemAttrs); mLayoutHeight = a.getDimensionPixelSize(0, ViewGroup.LayoutParams.WRAP_CONTENT); a.recycle(); } private void initAttrs(@NonNull Context context, AttributeSet attrs, int defStyleAttr) { TypedArray a = context.getTheme() .obtainStyledAttributes(attrs, R.styleable.CenterTitleSideButtonBar, defStyleAttr, 0); mHasLeftButton = a.getBoolean(R.styleable.CenterTitleSideButtonBar_hasLeftButton, false); mLeftButtonId = a.getResourceId(R.styleable.CenterTitleSideButtonBar_leftButtonId, -1); mLeftButtonShownDefault = a.getBoolean(R.styleable.CenterTitleSideButtonBar_leftButtonShownDefault, true); mLeftButtonAsText = a.getBoolean(R.styleable.CenterTitleSideButtonBar_leftButtonAsText, false); mLeftButtonText = a.getString(R.styleable.CenterTitleSideButtonBar_leftButtonText); mLeftButtonTextColor = a.getColorStateList(R.styleable.CenterTitleSideButtonBar_leftButtonTextColor); mLeftButtonTextSize = (int) a.getDimension(R.styleable.CenterTitleSideButtonBar_leftButtonTextSize, 20); mLeftButtonSrc = a.getResourceId(R.styleable.CenterTitleSideButtonBar_leftButtonSrc, 0); mLeftButtonBg = a.getResourceId(R.styleable.CenterTitleSideButtonBar_leftButtonBg, 0); mHasRightButton = a.getBoolean(R.styleable.CenterTitleSideButtonBar_hasRightButton, false); mRightButtonId = a.getResourceId(R.styleable.CenterTitleSideButtonBar_rightButtonId, -1); mRightButtonShownDefault = a.getBoolean(R.styleable.CenterTitleSideButtonBar_rightButtonShownDefault, false); mRightButtonAsText = a.getBoolean(R.styleable.CenterTitleSideButtonBar_rightButtonAsText, false); mRightButtonText = a.getString(R.styleable.CenterTitleSideButtonBar_rightButtonText); mRightButtonTextColor = a.getColorStateList(R.styleable.CenterTitleSideButtonBar_rightButtonTextColor); mRightButtonTextSize = (int) a.getDimension(R.styleable.CenterTitleSideButtonBar_rightButtonTextSize, 20); mRightButtonSrc = a.getResourceId(R.styleable.CenterTitleSideButtonBar_rightButtonSrc, 0); mRightButtonBg = a.getResourceId(R.styleable.CenterTitleSideButtonBar_rightButtonBg, 0); mRightButtonAsSearchView = a.getBoolean(R.styleable.CenterTitleSideButtonBar_rightButtonAsSearchView, false); if (mRightButtonAsSearchView) { mCloseSearchViewId = a.getResourceId(R.styleable.CenterTitleSideButtonBar_closeSearchViewId, -1); mSearchViewDefaultShown = a.getBoolean(R.styleable.CenterTitleSideButtonBar_searchViewDefaultShown, false); mSearchViewBg = a.getResourceId(R.styleable.CenterTitleSideButtonBar_searchViewBg, 0); mSearchViewHeight = a.getDimensionPixelSize(R.styleable.CenterTitleSideButtonBar_searchViewHeight, ViewGroup.LayoutParams.MATCH_PARENT); mSearchViewMarginLeft = a.getDimensionPixelSize( R.styleable.CenterTitleSideButtonBar_searchViewMarginLeft, 0); mSearchViewMarginRight = a.getDimensionPixelSize( R.styleable.CenterTitleSideButtonBar_searchViewMarginRight, 0); mCloseSearchViewText = a.getString(R.styleable.CenterTitleSideButtonBar_closeSearchViewText); mCloseSearchViewTextColor = a.getColorStateList( R.styleable.CenterTitleSideButtonBar_closeSearchViewTextColor); mCloseSearchViewTextSize = (int) a.getDimension( R.styleable.CenterTitleSideButtonBar_closeSearchViewTextSize, 20); } mHasTitle = a.getBoolean(R.styleable.CenterTitleSideButtonBar_hasTitle, true); mTitleId = a.getResourceId(R.styleable.CenterTitleSideButtonBar_titleId, -1); mTitle = a.getString(R.styleable.CenterTitleSideButtonBar_centerTitle); mTitleColor = a.getColor(R.styleable.CenterTitleSideButtonBar_centerTitleTextColor, 0xFF333333); mTitleSize = (int) a.getDimension(R.styleable.CenterTitleSideButtonBar_centerTitleTextSize, 20); mTitleGravity = a.getInteger(R.styleable.CenterTitleSideButtonBar_centerTitleTextGravity, 0); mHasDivider = a.getBoolean(R.styleable.CenterTitleSideButtonBar_hasDivider, false); mDividerId = a.getResourceId(R.styleable.CenterTitleSideButtonBar_dividerId, -1); mDividerColor = a.getColor(R.styleable.CenterTitleSideButtonBar_dividerColor, 0x19FFFFFF); mDividerHeight = a.getDimensionPixelSize(R.styleable.CenterTitleSideButtonBar_dividerHeight, 2); a.recycle(); } private void initChild(Context context, AttributeSet attrs, int defStyleAttr) { if (mHasLeftButton) { LayoutParams params; if (mLayoutHeight != ViewGroup.LayoutParams.WRAP_CONTENT) { params = new LayoutParams(mLayoutHeight, mLayoutHeight); } else { params = new LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT); } params.addRule(CENTER_VERTICAL); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { params.addRule(ALIGN_PARENT_START); } else { params.addRule(ALIGN_PARENT_LEFT); } if (mLeftButtonAsText) { mLeftButton = new Button(context); mLeftButton.setId(mLeftButtonId); params.width = ViewGroup.LayoutParams.WRAP_CONTENT; mLeftButton.setLayoutParams(params); if (mLeftButtonBg != 0) { mLeftButton.setBackgroundResource(mLeftButtonBg); } mLeftButton.setText(mLeftButtonText); if (mLeftButtonTextColor != null) { mLeftButton.setTextColor(mLeftButtonTextColor); } mLeftButton.setTextSize(TypedValue.COMPLEX_UNIT_PX, mLeftButtonTextSize); mLeftButton.setOnClickListener(this); if (!mLeftButtonShownDefault) { mLeftButton.setVisibility(INVISIBLE); mIsLeftButtonShown = false; } addView(mLeftButton); } else if (mLeftButtonBg != 0 || mLeftButtonSrc != 0) { mLeftImageButton = new ImageButton(context); mLeftImageButton.setId(mLeftButtonId); mLeftImageButton.setLayoutParams(params); if (mLeftButtonBg != 0) { mLeftImageButton.setBackgroundResource(mLeftButtonBg); } if (mLeftButtonSrc != 0) { mLeftImageButton.setImageResource(mLeftButtonSrc); } mLeftImageButton.setOnClickListener(this); if (!mLeftButtonShownDefault) { mLeftImageButton.setVisibility(INVISIBLE); mIsLeftButtonShown = false; } addView(mLeftImageButton); } } if (mHasRightButton) { LayoutParams params; if (mLayoutHeight != ViewGroup.LayoutParams.WRAP_CONTENT) { params = new LayoutParams(mLayoutHeight, mLayoutHeight); } else { params = new LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT); } params.addRule(CENTER_VERTICAL); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { params.addRule(ALIGN_PARENT_END); } else { params.addRule(ALIGN_PARENT_RIGHT); } if (mRightButtonAsText) { mRightButton = new Button(context); mRightButton.setId(mRightButtonId); params.width = ViewGroup.LayoutParams.WRAP_CONTENT; mRightButton.setLayoutParams(params); if (mRightButtonBg != 0) { mRightButton.setBackgroundResource(mRightButtonBg); } mRightButton.setText(mRightButtonText); if (mRightButtonTextColor != null) { mRightButton.setTextColor(mRightButtonTextColor); } mRightButton.setTextSize(TypedValue.COMPLEX_UNIT_PX, mRightButtonTextSize); mRightButton.setOnClickListener(this); if (!mRightButtonShownDefault) { mRightButton.setVisibility(INVISIBLE); mIsRightButtonShown = false; } addView(mRightButton); } else if (mRightButtonBg != 0 || mRightButtonSrc != 0) { mRightImageButton = new ImageButton(context); mRightImageButton.setId(mRightButtonId); mRightImageButton.setLayoutParams(params); if (mRightButtonBg != 0) { mRightImageButton.setBackgroundResource(mRightButtonBg); } if (mRightButtonSrc != 0) { mRightImageButton.setImageResource(mRightButtonSrc); } mRightImageButton.setOnClickListener(this); if (!mRightButtonShownDefault) { mRightImageButton.setVisibility(INVISIBLE); mIsRightButtonShown = false; } addView(mRightImageButton); } } if (mHasTitle) { mTitleTextView = new TextView(context); mTitleTextView.setId(mTitleId); LayoutParams params = new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); if (mLeftImageButton != null) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { params.addRule(END_OF, mLeftImageButton.getId()); } else { params.addRule(RIGHT_OF, mLeftImageButton.getId()); } } else if (mLeftButton != null) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { params.addRule(END_OF, mLeftButton.getId()); } else { params.addRule(RIGHT_OF, mLeftButton.getId()); } } if (mRightImageButton != null) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { params.addRule(START_OF, mRightImageButton.getId()); } else { params.addRule(LEFT_OF, mRightImageButton.getId()); } } else if (mRightButton != null) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { params.addRule(START_OF, mRightButton.getId()); } else { params.addRule(LEFT_OF, mRightButton.getId()); } } mTitleTextView.setLayoutParams(params); mTitleTextView.setText(mTitle); mTitleTextView.setTextSize(TypedValue.COMPLEX_UNIT_PX, mTitleSize); mTitleTextView.setTextColor(mTitleColor); if (mTitleGravity == 1 && ((mLeftButton == null && mLeftImageButton == null) || ((mLeftButton != null || mLeftImageButton != null) && mLeftButtonId != -1))) { // left mTitleTextView.setGravity( (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH ? Gravity.START : Gravity.LEFT) | Gravity.CENTER_VERTICAL); } else if (mTitleGravity == 2 && ((mRightButton == null && mRightImageButton == null) || ((mRightButton != null || mRightImageButton != null) && mRightButtonId != -1))) { // right mTitleTextView.setGravity( (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH ? Gravity.END : Gravity.RIGHT) | Gravity.CENTER_VERTICAL); } else { mTitleTextView.setGravity(Gravity.CENTER); } addView(mTitleTextView); } if (mHasDivider) { mDivider = new View(context); mDivider.setId(mDividerId); LayoutParams params; params = new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, mDividerHeight); params.addRule(ALIGN_PARENT_BOTTOM); mDivider.setLayoutParams(params); mDivider.setBackgroundColor(mDividerColor); addView(mDivider); } if (mRightButtonAsSearchView) { mClearableEditText = new ClearableEditText(context, attrs, defStyleAttr); mClearableEditText.setBackgroundResource(mSearchViewBg); mCloseSearchButton = new Button(context); mCloseSearchButton.setId(mCloseSearchViewId); LayoutParams params1 = new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, mSearchViewHeight); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { params1.addRule(START_OF, mCloseSearchButton.getId()); } else { params1.addRule(LEFT_OF, mCloseSearchButton.getId()); } params1.leftMargin = mSearchViewMarginLeft; params1.rightMargin = mSearchViewMarginRight; params1.addRule(CENTER_VERTICAL); mClearableEditText.setLayoutParams(params1); mCloseSearchButton.setText(mCloseSearchViewText); mCloseSearchButton.setTextColor(mCloseSearchViewTextColor); mCloseSearchButton.setTextSize(TypedValue.COMPLEX_UNIT_PX, mCloseSearchViewTextSize); LayoutParams params; if (mLayoutHeight != ViewGroup.LayoutParams.WRAP_CONTENT) { params = new LayoutParams(mLayoutHeight, mLayoutHeight); } else { params = new LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT); } params.addRule(CENTER_VERTICAL); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { params.addRule(ALIGN_PARENT_END); } else { params.addRule(ALIGN_PARENT_RIGHT); } params.width = ViewGroup.LayoutParams.WRAP_CONTENT; mCloseSearchButton.setLayoutParams(params); mCloseSearchButton.setBackgroundResource(0); if (mSearchViewDefaultShown) { showSearchView(); } else { mClearableEditText.setVisibility(GONE); mCloseSearchButton.setVisibility(GONE); } mCloseSearchButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { hideSearchView(); } }); addView(mClearableEditText); addView(mCloseSearchButton); } } public void showSearchView() { setEnterSearchAnimation(); if (mLeftButton != null) { mLeftButton.setVisibility(GONE); } if (mLeftImageButton != null) { mLeftImageButton.setVisibility(GONE); } if (mRightButton != null) { mRightButton.setVisibility(GONE); } if (mRightImageButton != null) { mRightImageButton.setVisibility(GONE); } if (mTitleTextView != null) { mTitleTextView.setVisibility(GONE); } mClearableEditText.showKeyboard(); mClearableEditText.setVisibility(VISIBLE); mCloseSearchButton.setVisibility(VISIBLE); } private void setEnterSearchAnimation() { TransitionSet transitionSet = new TransitionSet(); if (mClearableEditText != null) { transitionSet.addTransition( new Slide(Gravity.TOP).addTarget(mClearableEditText).setDuration(150)); } if (mCloseSearchButton != null) { transitionSet.addTransition(new Fade(Fade.IN).addTarget(mCloseSearchButton)); } if (mLeftButton != null) { transitionSet.addTransition(new Fade(Fade.OUT).addTarget(mLeftButton)); } if (mLeftImageButton != null) { transitionSet.addTransition(new Fade(Fade.OUT).addTarget(mLeftImageButton)); } if (mTitleTextView != null) { transitionSet.addTransition(new Fade(Fade.OUT).addTarget(mTitleTextView).setDuration( 150)); } if (mRightButton != null) { transitionSet.addTransition(new Fade(Fade.OUT).addTarget(mRightButton)); } if (mRightImageButton != null) { transitionSet.addTransition(new Fade(Fade.OUT).addTarget(mRightImageButton)); } TransitionManager.beginDelayedTransition(this, transitionSet); } public void hideSearchView() { setLeaveSearchAnimation(); if (mIsLeftButtonShown) { if (mLeftButton != null) { mLeftButton.setVisibility(VISIBLE); } else if (mLeftImageButton != null) { mLeftImageButton.setVisibility(VISIBLE); } } if (mIsRightButtonShown) { if (mRightButton != null) { mRightButton.setVisibility(VISIBLE); } else if (mRightImageButton != null) { mRightImageButton.setVisibility(VISIBLE); } } if (mTitleTextView != null) { mTitleTextView.setVisibility(VISIBLE); } mClearableEditText.hideKeyboard(); mClearableEditText.setText(""); mClearableEditText.setVisibility(GONE); mCloseSearchButton.setVisibility(GONE); } private void setLeaveSearchAnimation() { TransitionManager.beginDelayedTransition(this); } @Override public void onClick(@NonNull View v) { if (mLeftButton != null && v == mLeftButton && mLeftButtonClickListener != null) { mLeftButtonClickListener.onClick(v); } else if (mLeftImageButton != null && v == mLeftImageButton && mLeftButtonClickListener != null) { mLeftButtonClickListener.onClick(v); } else if (mRightButton != null && v == mRightButton) { if (mRightButtonClickListener != null) { mRightButtonClickListener.onClick(v); } if (mRightButtonAsSearchView) { if (mClearableEditText.getVisibility() == VISIBLE) { hideSearchView(); } else { showSearchView(); } } } else if (mRightImageButton != null && v == mRightImageButton) { if (mRightButtonClickListener != null) { mRightButtonClickListener.onClick(v); } if (mRightButtonAsSearchView) { if (mClearableEditText.getVisibility() == VISIBLE) { hideSearchView(); } else { showSearchView(); } } } } public void setLeftButtonOnClickListener(OnClickListener listener) { mLeftButtonClickListener = listener; } public void setRightButtonOnClickListener(OnClickListener listener) { mRightButtonClickListener = listener; } public void showLeftButton() { mIsLeftButtonShown = true; if (mLeftButton != null) { mLeftButton.setVisibility(VISIBLE); } if (mLeftImageButton != null) { mLeftImageButton.setVisibility(VISIBLE); } } private boolean mIsLeftButtonShown = true; public void hideLeftButton() { mIsLeftButtonShown = false; if (mLeftButton != null) { mLeftButton.setVisibility(GONE); } if (mLeftImageButton != null) { mLeftImageButton.setVisibility(GONE); } } public boolean leftButtonShown() { return (mLeftButton != null && mLeftButton.getVisibility() == VISIBLE) || (mLeftImageButton != null && mLeftImageButton.getVisibility() == VISIBLE); } private boolean mIsRightButtonShown = true; public void showRightButton() { mIsRightButtonShown = true; if (mRightButton != null) { mRightButton.setVisibility(VISIBLE); } if (mRightImageButton != null) { mRightImageButton.setVisibility(VISIBLE); } } public void hideRightButton() { mIsRightButtonShown = false; if (mRightButton != null) { mRightButton.setVisibility(GONE); } if (mRightImageButton != null) { mRightImageButton.setVisibility(GONE); } } public boolean rightButtonShown() { return (mRightButton != null && mRightButton.getVisibility() == VISIBLE) || (mRightImageButton != null && mRightImageButton.getVisibility() == VISIBLE); } public void setTitle(String title) { if (mTitleTextView != null) { mTitleTextView.setText(title); mTitle = title; } } public void enableLeftButton() { if (mLeftButton != null) { mLeftButton.setEnabled(true); } if (mLeftImageButton != null) { mLeftImageButton.setEnabled(true); } } public void disableLeftButton() { if (mLeftButton != null) { mLeftButton.setEnabled(false); } if (mLeftImageButton != null) { mLeftImageButton.setEnabled(false); } } public void enableRightButton() { if (mRightButton != null) { mRightButton.setEnabled(true); } if (mRightImageButton != null) { mRightImageButton.setEnabled(true); } } public void disableRightButton() { if (mRightButton != null) { mRightButton.setEnabled(false); } if (mRightImageButton != null) { mRightImageButton.setEnabled(false); } } public Observable<CharSequence> searchQueryChanges() { if (mClearableEditText == null) { throw new IllegalStateException("No search view configured!"); } return mClearableEditText.textChanges(); } public Observable<Integer> editorActions() { if (mClearableEditText == null) { throw new IllegalStateException("No search view configured!"); } return mClearableEditText.editorActions(); } public void setOnQueryChangedListener(OnTextChangedListener onQueryChangedListener) { if (mClearableEditText == null) { throw new IllegalStateException("No search view configured!"); } mClearableEditText.setOnTextChangedListener(onQueryChangedListener); } public void setOnEditorActionDoneListener( OnEditorActionDoneListener onEditorActionDoneListener) { if (mClearableEditText == null) { throw new IllegalStateException("No search view configured!"); } mClearableEditText.setOnEditorActionDoneListener(onEditorActionDoneListener); } }
package atarigamer; /** * <p>Title: Atari Gamer</p> * <p>Description: Atari Video Game Console Development Environment</p> * <p>Copyright: Copyright (c) 2002</p> * <p>Company: Atari Aaron</p> * @author Aaron.Bergstrom@ndsu.nodak.edu * @version 1.0 */ import javax.swing.*; import java.lang.*; import java.awt.event.*; import javax.swing.event.*; import java.beans.*; import java.awt.*; import java.awt.image.*; public class CC2600 extends JInternalFrame{ private int notifier = -1; protected Color[][] ntsc = new Color[16][8]; protected Color[][] pal = new Color[16][8]; protected Color[][] secam = new Color[16][8]; protected int col = 0; protected int lum = 0; protected ImageIcon nii; protected ImageIcon pii; protected ImageIcon sii; protected Image ni; protected Image pi; protected Image si; protected int pixels[]; protected JButton obutton = new JButton(); protected int cf = 0; protected CC2600 cc; protected DevProject dp; protected JDesktopPane jdp; protected JPanel card; protected CJPanels[] cjp = new CJPanels[3]; protected String[] tvType = {"NTSC","PAL","SECAM"}; protected CardLayout cl = new CardLayout(0,0); protected PixelGrabber[] pgs = new PixelGrabber[3]; protected JButton[] tvChoice = new JButton[3]; protected JMenu options = new JMenu("Options"); protected JMenuBar mbar = new JMenuBar(); private ColorPalettes cp; public CC2600(DevProject dp) { this.cc = this; this.dp = dp; this.cp = dp.cp; this.jdp = this.dp.jdp; // createNTSCColors(); // createPALColors(); // createSECAMColors(); setupFrame(); loadPalettes(); } protected void setupFrame(){ // mbar.add(options); // this.setJMenuBar(mbar); this.setFrameIcon(new ImageIcon(ASMEditor.class.getResource("color_fi.gif"))); this.setSize(276,194); this.setResizable(false); this.setIconifiable(true); this.setClosable(true); this.getContentPane().setLayout(new BorderLayout(1,1)); JPanel menu = new JPanel(new FlowLayout(FlowLayout.CENTER,0,0)); for(int i=0;i<3;i++){ tvChoice[i] = new JButton(); tvChoice[i].setBackground(new Color(204,153,153)); tvChoice[i].setMargin(new Insets(0,0,0,0)); tvChoice[i].setBorder(new javax.swing.border.LineBorder(Color.BLACK,1,true)); menu.add(tvChoice[i]); menu.add(new JLabel(" ")); } tvChoice[0].setText(" NTSC "); tvChoice[1].setText(" PAL "); tvChoice[2].setText(" SECAM "); tvChoice[0].addActionListener(new ActionListener(){ public void actionPerformed(ActionEvent e){ setColorType(0); } }); tvChoice[1].addActionListener(new ActionListener(){ public void actionPerformed(ActionEvent e){ setColorType(1); } }); tvChoice[2].addActionListener(new ActionListener(){ public void actionPerformed(ActionEvent e){ setColorType(2); } }); this.getContentPane().add("North", menu); JLabel gs = new JLabel("Based on color charts compiled by Glenn Saunders"); gs.setForeground(Color.BLACK); this.getContentPane().add("South",gs); setNewTitle(); obutton.setIcon(this.getFrameIcon()); obutton.setToolTipText(cc.getTitle()); obutton.setMargin(new Insets(0,0,0,0)); obutton.setBorderPainted(false); obutton.setForeground(Color.BLACK); obutton.addActionListener(new ActionListener(){ public void actionPerformed(ActionEvent e){ try{ cc.setIcon(false); }catch(PropertyVetoException pve){}; cc.setVisible(true); cc.revalidate(); } }); this.setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); this.addInternalFrameListener(new InternalFrameListener(){ public void internalFrameDeactivated(InternalFrameEvent e){ } public void internalFrameActivated(InternalFrameEvent e){ } public void internalFrameDeiconified(InternalFrameEvent e){ dp.grd.icontool.remove(obutton); } public void internalFrameIconified(InternalFrameEvent e){ obutton.setToolTipText(cc.getTitle()); cc.setVisible(false); dp.grd.icontool.add(obutton,2); } public void internalFrameClosed(InternalFrameEvent e){ } public void internalFrameClosing(InternalFrameEvent e){ cc.setVisible(false); } public void internalFrameOpened(InternalFrameEvent e){ } }); /* jdp.add(this); JPanel info = new JPanel(new GridLayout(2,1,0,0)); card = new JPanel(cl); cjp[0] = new CJPanels(ntsc,tvType[0],this); cjp[1] = new CJPanels(pal,tvType[1],this); cjp[2] = new CJPanels(secam,tvType[2],this); for(int i=0;i<3;i++){ card.add(cjp[i].tp,tvType[i]); } info.add(card); this.getContentPane().add("Center",info); */ } protected void setColorType(int ct){ // this.cf = ct; dp.cPalette = ct; cp.setDrawImage(dp.cPalette); setNewTitle(); // this.obutton.setToolTipText(this.getTitle()); // cl.show(card,tvType[cf]); } protected void setNewTitle(){ switch(dp.cPalette){ case 1: this.setTitle("2600 Color Chooser - PAL"); break; case 2: this.setTitle("2600 Color Chooser - SECAM"); break; default: this.setTitle("2600 Color Chooser - NTSC"); break; } //cp.repaint(); revalidate(); } protected void loadPalettes(){ this.getContentPane().add("Center", cp); overRideClose(); jdp.add(this); this.setVisible(false); /* nii = new ImageIcon(CC2600.class.getResource("ntsc.jpg")); pii = new ImageIcon(CC2600.class.getResource("pal.jpg")); sii = new ImageIcon(CC2600.class.getResource("secam.jpg")); ni = nii.getImage(); pi = pii.getImage(); si = sii.getImage(); */ } private void overRideClose(){ this.setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); this.addInternalFrameListener(new InternalFrameAdapter(){ public void internalFrameClosing(InternalFrameEvent e){ setVisible(false); } }); } protected void createNTSCColors(){ pixels = new int[nii.getIconWidth() * nii.getIconHeight()]; pgs[0] = new PixelGrabber(ni,0,0,nii.getIconWidth(),nii.getIconHeight(),pixels,0,nii.getIconWidth()); try{ pgs[0].grabPixels(); }catch(InterruptedException ie){} col = 12; lum = 11; for(int i=0;i<8;i++){ setColorLine(i,ntsc,ni); } lum = 11; } protected void createPALColors(){ pixels = new int[pii.getIconWidth() * pii.getIconHeight()]; pgs[1] = new PixelGrabber(pi,0,0,pii.getIconWidth(),pii.getIconHeight(),pixels,0,pii.getIconWidth()); try{ pgs[1].grabPixels(); }catch(InterruptedException ie){} col = 12; lum = 11; for(int i=0;i<8;i++){ setColorLine(i,pal,pi); } lum = 11; } protected void createSECAMColors(){ pixels = new int[sii.getIconWidth() * sii.getIconHeight()]; pgs[2] = new PixelGrabber(si,0,0,sii.getIconWidth(),sii.getIconHeight(),pixels,0,sii.getIconWidth()); try{ pgs[2].grabPixels(); }catch(InterruptedException ie){} col = 12; lum = 11; for(int i=0;i<8;i++){ setColorLine(i,secam,si); } lum = 11; } protected void setColorLine(int row, Color[][] palette, Image sImage){ for(int i=0;i<16;i++){ palette[i][row] = getPixelColor(lum,col,sImage); col = col+24; } col = 12; lum = lum+12; } protected Color getVCSColor(String value){ String lumString = value.substring(0,1); String colString = value.substring(1); lumString = (String)cjp[cf].y.get(lumString); colString = (String)cjp[cf].x.get(colString); int lumInt = Integer.parseInt(lumString); int colInt = Integer.parseInt(colString); Color tColor = cjp[cf].getPixelColor(lumInt,colInt,cjp[cf].bi); return tColor; } public Color getPixelColor(int row, int column, Image sImage){ int pixel = pixels[row * sImage.getWidth(this) + column]; int alpha = (pixel >> 24) & 0xff; int red = (pixel >> 16) & 0xff; int green = (pixel >> 8) & 0xff; int blue = pixel & 0xff; Color c = new Color(red, green, blue, alpha); return c; } public void setNotifier(int notifier){ this.notifier = notifier; } public int getNotifier(){ return notifier; } }
/** * * Copyright (c) 2006-2016, Speedment, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); You may not * use this file except in compliance with the License. You may obtain a copy of * the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.github.pyknic.reactor; import com.speedment.field.ComparableField; import com.speedment.internal.logging.Logger; import com.speedment.internal.logging.LoggerManager; import com.speedment.manager.Manager; import static java.util.Collections.newSetFromMap; import static java.util.Collections.unmodifiableList; import java.util.List; import static java.util.Objects.requireNonNull; import java.util.Set; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import static java.util.stream.Collectors.toList; /** * A reactor is an object that polls the database for changes at a particular * interval, and if changes was found, notifies a set of listeners. * * @author Emil Forslund * @param <ENTITY> the entity type */ public final class Reactor<ENTITY> { private final static Logger LOGGER = LoggerManager.getLogger(Reactor.class); private final Timer timer; /** * * @param timer the timer that is polling the database */ private Reactor(Timer timer) { this.timer = requireNonNull(timer); } /** * Stops the reactor from polling the database. */ public void stop() { timer.cancel(); } /** * Creates a new reactor builder for the specified {@link Manager} by using * the specified field to identify which events refer to the same entity. * This is normally different from the primary key. * * @param <ENTITY> the entity type * @param manager the manager to use * @param idField the field that identifier which entity is refered to in * the event * @return the new builder */ public static <ENTITY> Builder<ENTITY> builder( Manager<ENTITY> manager, ComparableField<ENTITY, Long, Long> idField) { return new Builder<>(manager, idField); } /** * Builder class for creating new {@link Reactor} instances. * * @param <ENTITY> the entity type to react on */ public final static class Builder<ENTITY> { private final Manager<ENTITY> manager; private final ComparableField<ENTITY, Long, Long> idField; private final Set<Consumer<List<ENTITY>>> listeners; private long interval; private long limit; /** * Initiates the builder with default values. * * @param manager the manager to use for database polling * @param idField the field that identifier which entity is refered to * in the event */ private Builder( Manager<ENTITY> manager, ComparableField<ENTITY, Long, Long> idField) { this.manager = requireNonNull(manager); this.idField = requireNonNull(idField); this.listeners = newSetFromMap(new ConcurrentHashMap<>()); this.interval = 1000; // in milliseconds. this.limit = 100; } /** * Adds a listener to the reactor being built. The listener will be * notified each time new rows are loaded. * * @param listener the new listener * @return a reference to this builder */ public Builder<ENTITY> withListener(Consumer<List<ENTITY>> listener) { listeners.add(listener); return this; } /** * Sets the interval for which the database will be polled for changes. * The interval is specified in milliseconds. * <p> * This setting is optional. If it is not specified, an interval of 1000 * milliseconds will be used. * * @param millis the interval in milliseconds * @return a reference to this builder */ public Builder<ENTITY> withInterval(long millis) { this.interval = millis; return this; } /** * Sets the maximum amount of rows that might be loaded at once from * the database. Setting a limit might be a good way to prevent the * reactor from clogging up the system during load. * <p> * This setting is optional. If it is not specified, the limit will be * 100 elements per load. * * @param count the maximum amount of rows that might be loaded at once * @return a reference to this builder */ public Builder<ENTITY> withLimit(long count) { this.limit = count; return this; } /** * Builds and starts this reactor. When this method is called, the * reactor will start polling the database at the specified interval. * The returned instance could be ignored, but it might be good to hold * on toit since that is the only way to stop the reactor once started. * * @return the running reactor */ public Reactor<ENTITY> build() { final Timer timer = new Timer(); final TimerTask task = new TimerTask() { @Override public void run() { final AtomicLong last = new AtomicLong(Long.MIN_VALUE); final String managerName = manager.getTable().getName(); final String fieldName = idField.getIdentifier() .columnName(); while (true) { final List<ENTITY> added = unmodifiableList( manager.stream() .filter(idField.greaterThan(last.get())) .limit(limit) .collect(toList()) ); if (added.isEmpty()) { break; } else { final ENTITY lastEntity = added.get(added.size() - 1); last.set(idField.get(lastEntity)); listeners.forEach( listener -> listener.accept(added) ); LOGGER.debug(String.format( "Downloaded %d row(s) from %s. Latest %s: %d.", added.size(), managerName, fieldName, last.get() )); } } } }; timer.scheduleAtFixedRate(task, 0, interval); return new Reactor<>(timer); } } }
/* * Copyright 2013-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.data.elasticsearch.core.query; import static org.apache.commons.lang.RandomStringUtils.*; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import static org.springframework.data.elasticsearch.utils.IndexBuilder.*; import java.util.ArrayList; import java.util.List; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.elasticsearch.core.ElasticsearchTemplate; import org.springframework.data.elasticsearch.entities.SampleEntity; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; /** * @author Rizwan Idrees * @author Mohsin Husen */ @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration("classpath:elasticsearch-template-test.xml") public class CriteriaQueryTests { @Autowired private ElasticsearchTemplate elasticsearchTemplate; @Before public void before() { elasticsearchTemplate.deleteIndex(SampleEntity.class); elasticsearchTemplate.createIndex(SampleEntity.class); elasticsearchTemplate.putMapping(SampleEntity.class); elasticsearchTemplate.refresh(SampleEntity.class); } @Test public void shouldPerformAndOperation() { // given String documentId = randomNumeric(5); SampleEntity sampleEntity = new SampleEntity(); sampleEntity.setId(documentId); sampleEntity.setMessage("some test message"); sampleEntity.setVersion(System.currentTimeMillis()); IndexQuery indexQuery = new IndexQuery(); indexQuery.setId(documentId); indexQuery.setObject(sampleEntity); elasticsearchTemplate.index(indexQuery); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("message").contains("test").and("message") .contains("some")); // when SampleEntity sampleEntity1 = elasticsearchTemplate.queryForObject(criteriaQuery, SampleEntity.class); // then assertThat(sampleEntity1, is(notNullValue())); } @Ignore("DATAES-30") @Test public void shouldPerformOrOperation() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setMessage("some message"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setMessage("test message"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("message").contains("some").or("message") .contains("test")); // when Page<SampleEntity> page = elasticsearchTemplate.queryForPage(criteriaQuery, SampleEntity.class); // then assertThat(page, is(notNullValue())); assertThat(page.getTotalElements(), is(greaterThanOrEqualTo(1L))); } @Test public void shouldPerformAndOperationWithinCriteria() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity = new SampleEntity(); sampleEntity.setId(documentId); sampleEntity.setMessage("some message"); sampleEntity.setVersion(System.currentTimeMillis()); IndexQuery indexQuery = new IndexQuery(); indexQuery.setId(documentId); indexQuery.setObject(sampleEntity); indexQueries.add(indexQuery); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria().and(new Criteria("message").contains("some"))); // when Page<SampleEntity> page = elasticsearchTemplate.queryForPage(criteriaQuery, SampleEntity.class); // then assertThat(page, is(notNullValue())); assertThat(page.getTotalElements(), is(greaterThanOrEqualTo(1L))); } @Ignore("DATAES-30") @Test public void shouldPerformOrOperationWithinCriteria() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity = new SampleEntity(); sampleEntity.setId(documentId); sampleEntity.setMessage("some message"); sampleEntity.setVersion(System.currentTimeMillis()); IndexQuery indexQuery = new IndexQuery(); indexQuery.setId(documentId); indexQuery.setObject(sampleEntity); indexQueries.add(indexQuery); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria().or(new Criteria("message").contains("some"))); // when Page<SampleEntity> page = elasticsearchTemplate.queryForPage(criteriaQuery, SampleEntity.class); // then assertThat(page, is(notNullValue())); assertThat(page.getTotalElements(), is(greaterThanOrEqualTo(1L))); } @Test public void shouldPerformIsOperation() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity = new SampleEntity(); sampleEntity.setId(documentId); sampleEntity.setMessage("some message"); sampleEntity.setVersion(System.currentTimeMillis()); IndexQuery indexQuery = new IndexQuery(); indexQuery.setId(documentId); indexQuery.setObject(sampleEntity); indexQueries.add(indexQuery); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("message").is("some message")); // when Page<SampleEntity> page = elasticsearchTemplate.queryForPage(criteriaQuery, SampleEntity.class); // then assertThat("message", is(criteriaQuery.getCriteria().getField().getName())); assertThat(page.getTotalElements(), is(greaterThanOrEqualTo(1L))); } @Test public void shouldPerformMultipleIsOperations() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setMessage("some message"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setMessage("test message"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("message").is("some message")); // when Page<SampleEntity> page = elasticsearchTemplate.queryForPage(criteriaQuery, SampleEntity.class); // then assertThat("message", is(criteriaQuery.getCriteria().getField().getName())); assertThat(page.getTotalElements(), is(equalTo(1L))); } @Test public void shouldPerformEndsWithOperation() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setMessage("some message"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setMessage("test message end"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); Criteria criteria = new Criteria("message").endsWith("end"); CriteriaQuery criteriaQuery = new CriteriaQuery(criteria); // when SampleEntity sampleEntity = elasticsearchTemplate.queryForObject(criteriaQuery, SampleEntity.class); // then assertThat("message", is(criteriaQuery.getCriteria().getField().getName())); assertThat(sampleEntity, is(notNullValue())); } @Test public void shouldPerformStartsWithOperation() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setMessage("start some message"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setMessage("test message"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); Criteria criteria = new Criteria("message").startsWith("start"); CriteriaQuery criteriaQuery = new CriteriaQuery(criteria); // when SampleEntity sampleEntity = elasticsearchTemplate.queryForObject(criteriaQuery, SampleEntity.class); // then assertThat("message", is(criteriaQuery.getCriteria().getField().getName())); assertThat(sampleEntity, is(notNullValue())); } @Test public void shouldPerformContainsOperation() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setMessage("contains some message"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setMessage("test message"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("message").contains("contains")); // when SampleEntity sampleEntity = elasticsearchTemplate.queryForObject(criteriaQuery, SampleEntity.class); // then assertThat("message", is(criteriaQuery.getCriteria().getField().getName())); assertThat(sampleEntity, is(notNullValue())); } @Test public void shouldExecuteExpression() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setMessage("elasticsearch search"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setMessage("test message"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("message").expression("+elasticsearch || test")); // when SampleEntity sampleEntity = elasticsearchTemplate.queryForObject(criteriaQuery, SampleEntity.class); // then assertThat("message", is(criteriaQuery.getCriteria().getField().getName())); assertThat(sampleEntity, is(notNullValue())); } @Test public void shouldExecuteCriteriaChain() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setMessage("some message search"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setMessage("test test message"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("message").startsWith("some").endsWith("search") .contains("message").is("some message search")); // when SampleEntity sampleEntity = elasticsearchTemplate.queryForObject(criteriaQuery, SampleEntity.class); // then assertThat("message", is(criteriaQuery.getCriteria().getField().getName())); assertThat(sampleEntity, is(notNullValue())); } @Test public void shouldPerformIsNotOperation() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setMessage("bar"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setMessage("foo"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("message").is("foo").not()); // when Page<SampleEntity> page = elasticsearchTemplate.queryForPage(criteriaQuery, SampleEntity.class); // then assertTrue(criteriaQuery.getCriteria().isNegating()); assertThat(page, is(notNullValue())); assertFalse(page.iterator().next().getMessage().contains("foo")); } @Test public void shouldPerformBetweenOperation() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setRate(100); sampleEntity1.setMessage("bar"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setRate(200); sampleEntity2.setMessage("foo"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("rate").between(100, 150)); // when SampleEntity sampleEntity = elasticsearchTemplate.queryForObject(criteriaQuery, SampleEntity.class); // then assertThat(sampleEntity, is(notNullValue())); } @Test public void shouldPerformBetweenOperationWithoutUpperBound() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setRate(300); sampleEntity1.setMessage("bar"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setRate(400); sampleEntity2.setMessage("foo"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("rate").between(350, null)); // when Page<SampleEntity> page = elasticsearchTemplate.queryForPage(criteriaQuery, SampleEntity.class); // then assertThat(page, is(notNullValue())); assertThat(page.getTotalElements(), is(greaterThanOrEqualTo(1L))); } @Test public void shouldPerformBetweenOperationWithoutLowerBound() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setRate(500); sampleEntity1.setMessage("bar"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setRate(600); sampleEntity2.setMessage("foo"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("rate").between(null, 550)); // when Page<SampleEntity> page = elasticsearchTemplate.queryForPage(criteriaQuery, SampleEntity.class); // then assertThat(page, is(notNullValue())); assertThat(page.getTotalElements(), is(greaterThanOrEqualTo(1L))); } @Test public void shouldPerformLessThanEqualOperation() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setRate(700); sampleEntity1.setMessage("bar"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setRate(800); sampleEntity2.setMessage("foo"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("rate").lessThanEqual(750)); // when Page<SampleEntity> page = elasticsearchTemplate.queryForPage(criteriaQuery, SampleEntity.class); // then assertThat(page, is(notNullValue())); assertThat(page.getTotalElements(), is(greaterThanOrEqualTo(1L))); } @Test public void shouldPerformGreaterThanEquals() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setRate(900); sampleEntity1.setMessage("bar"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setRate(1000); sampleEntity2.setMessage("foo"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("rate").greaterThanEqual(950)); // when Page<SampleEntity> page = elasticsearchTemplate.queryForPage(criteriaQuery, SampleEntity.class); // then assertThat(page, is(notNullValue())); assertThat(page.getTotalElements(), is(greaterThanOrEqualTo(1L))); } @Test public void shouldPerformBoostOperation() { // given List<IndexQuery> indexQueries = new ArrayList<>(); // first document String documentId = randomNumeric(5); SampleEntity sampleEntity1 = new SampleEntity(); sampleEntity1.setId(documentId); sampleEntity1.setRate(700); sampleEntity1.setMessage("bar foo"); sampleEntity1.setVersion(System.currentTimeMillis()); IndexQuery indexQuery1 = new IndexQuery(); indexQuery1.setId(documentId); indexQuery1.setObject(sampleEntity1); indexQueries.add(indexQuery1); // second document String documentId2 = randomNumeric(5); SampleEntity sampleEntity2 = new SampleEntity(); sampleEntity2.setId(documentId2); sampleEntity2.setRate(800); sampleEntity2.setMessage("foo"); sampleEntity2.setVersion(System.currentTimeMillis()); IndexQuery indexQuery2 = new IndexQuery(); indexQuery2.setId(documentId2); indexQuery2.setObject(sampleEntity2); indexQueries.add(indexQuery2); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("message").contains("foo").boost(1)); // when Page<SampleEntity> page = elasticsearchTemplate.queryForPage(criteriaQuery, SampleEntity.class); // then assertThat(page.getTotalElements(), is(greaterThanOrEqualTo(1L))); } @Test public void shouldReturnDocumentAboveMinimalScoreGivenCriteria() { // given List<IndexQuery> indexQueries = new ArrayList<>(); indexQueries.add(buildIndex(SampleEntity.builder().id("1").message("ab").build())); indexQueries.add(buildIndex(SampleEntity.builder().id("2").message("bc").build())); indexQueries.add(buildIndex(SampleEntity.builder().id("3").message("ac").build())); elasticsearchTemplate.bulkIndex(indexQueries); elasticsearchTemplate.refresh(SampleEntity.class); // when CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("message").contains("a").or(new Criteria("message").contains("b"))); criteriaQuery.setMinScore(2.0F); Page<SampleEntity> page = elasticsearchTemplate.queryForPage(criteriaQuery, SampleEntity.class); // then assertThat(page.getTotalElements(), is(1L)); assertThat(page.getContent().get(0).getMessage(), is("ab")); } }
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.buildeventservice; import static com.google.common.truth.Truth.assertThat; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import com.google.common.base.Function; import com.google.common.collect.FluentIterable; import com.google.common.eventbus.EventBus; import com.google.devtools.build.lib.authandtls.AuthAndTLSOptions; import com.google.devtools.build.lib.buildeventstream.PathConverter; import com.google.devtools.build.lib.buildeventstream.transports.BinaryFormatFileTransport; import com.google.devtools.build.lib.buildeventstream.transports.BuildEventStreamOptions; import com.google.devtools.build.lib.buildeventstream.transports.JsonFormatFileTransport; import com.google.devtools.build.lib.buildeventstream.transports.TextFormatFileTransport; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.events.Reporter; import com.google.devtools.build.lib.runtime.BlazeModule.ModuleEnvironment; import com.google.devtools.build.lib.runtime.BuildEventStreamer; import com.google.devtools.build.lib.runtime.Command; import com.google.devtools.build.lib.util.Clock; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.common.options.Options; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsParser; import com.google.devtools.common.options.OptionsProvider; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; /** Tests {@link BuildEventServiceModule}. **/ @RunWith(JUnit4.class) public class BazelBuildEventServiceModuleTest { private static final Function<Object, Class<?>> GET_CLASS = new Function<Object, Class<?>>() { @Override public Class<?> apply(Object o) { return o.getClass(); } }; private static final PathConverter PATH_CONVERTER = new PathConverter() { @Override public String apply(Path path) { return path.getPathString(); } }; private Reporter reporter; private BuildEventServiceOptions besOptions; @Rule public TemporaryFolder tmp = new TemporaryFolder(); @Mock public BuildEventStreamOptions bepOptions; @Mock public OptionsProvider optionsProvider; @Mock public ModuleEnvironment moduleEnvironment; @Mock public EventHandler commandLineReporter; @Mock public EventBus eventBus; @Mock public Clock clock; @Mock public Command command; @Before public void initMocks() { MockitoAnnotations.initMocks(this); // Reporter is final and thus can't be mocked. reporter = new Reporter(eventBus); besOptions = Options.getDefaults(BuildEventServiceOptions.class); when(optionsProvider.getOptions(BuildEventStreamOptions.class)).thenReturn(bepOptions); when(optionsProvider.getOptions(BuildEventServiceOptions.class)).thenReturn(besOptions); when(optionsProvider.getOptions(AuthAndTLSOptions.class)) .thenReturn(Options.getDefaults(AuthAndTLSOptions.class)); } @After public void validateMocks() { Mockito.validateMockitoUsage(); } @Test public void testReturnsBuildEventStreamerOptions() throws Exception { BazelBuildEventServiceModule module = new BazelBuildEventServiceModule(); Iterable<Class<? extends OptionsBase>> commandOptions = module.getCommandOptions(command); assertThat(commandOptions).isNotEmpty(); OptionsParser optionsParser = OptionsParser.newOptionsParser(commandOptions); optionsParser.parse( "--experimental_build_event_text_file", "/tmp/foo.txt", "--experimental_build_event_binary_file", "/tmp/foo.bin", "--experimental_build_event_json_file", "/tmp/foo.json"); BuildEventStreamOptions options = optionsParser.getOptions(BuildEventStreamOptions.class); assertThat(options.getBuildEventTextFile()).isEqualTo("/tmp/foo.txt"); assertThat(options.getBuildEventBinaryFile()).isEqualTo("/tmp/foo.bin"); assertThat(options.getBuildEventJsonFile()).isEqualTo("/tmp/foo.json"); } @Test public void testCreatesStreamerForTextFormatFileTransport() throws Exception { when(bepOptions.getBuildEventTextFile()).thenReturn(tmp.newFile().getAbsolutePath()); BazelBuildEventServiceModule module = new BazelBuildEventServiceModule(); BuildEventStreamer buildEventStreamer = module.tryCreateStreamer(optionsProvider, commandLineReporter, moduleEnvironment, clock, PATH_CONVERTER, reporter, "foo", "bar"); assertThat(buildEventStreamer).isNotNull(); verifyNoMoreInteractions(moduleEnvironment); assertThat(FluentIterable.from(buildEventStreamer.getTransports()).transform(GET_CLASS)) .containsExactly(TextFormatFileTransport.class); } @Test public void testCreatesStreamerForBinaryFormatFileTransport() throws Exception { when(bepOptions.getBuildEventBinaryFile()).thenReturn(tmp.newFile().getAbsolutePath()); BazelBuildEventServiceModule module = new BazelBuildEventServiceModule(); BuildEventStreamer buildEventStreamer = module.tryCreateStreamer(optionsProvider, commandLineReporter, moduleEnvironment, clock, PATH_CONVERTER, reporter, "foo", "bar"); assertThat(buildEventStreamer).isNotNull(); verifyNoMoreInteractions(moduleEnvironment); assertThat(FluentIterable.from(buildEventStreamer.getTransports()).transform(GET_CLASS)) .containsExactly(BinaryFormatFileTransport.class); } @Test public void testCreatesStreamerForJsonFormatFileTransport() throws Exception { when(bepOptions.getBuildEventJsonFile()).thenReturn(tmp.newFile().getAbsolutePath()); BazelBuildEventServiceModule module = new BazelBuildEventServiceModule(); BuildEventStreamer buildEventStreamer = module.tryCreateStreamer(optionsProvider, commandLineReporter, moduleEnvironment, clock, PATH_CONVERTER, reporter, "foo", "bar"); assertThat(buildEventStreamer).isNotNull(); verifyNoMoreInteractions(moduleEnvironment); assertThat(FluentIterable.from(buildEventStreamer.getTransports()).transform(GET_CLASS)) .containsExactly(JsonFormatFileTransport.class); } @Test public void testCreatesStreamerForBesTransport() throws Exception { besOptions.besBackend = "does.not.exist:1234"; BazelBuildEventServiceModule module = new BazelBuildEventServiceModule(); BuildEventStreamer buildEventStreamer = module.tryCreateStreamer(optionsProvider, commandLineReporter, moduleEnvironment, clock, PATH_CONVERTER, reporter, "foo", "bar"); assertThat(buildEventStreamer).isNotNull(); } @Test public void testCreatesStreamerForAllTransports() throws Exception { when(bepOptions.getBuildEventTextFile()).thenReturn(tmp.newFile().getAbsolutePath()); when(bepOptions.getBuildEventBinaryFile()).thenReturn(tmp.newFile().getAbsolutePath()); when(bepOptions.getBuildEventJsonFile()).thenReturn(tmp.newFile().getAbsolutePath()); besOptions.besBackend = "does.not.exist:1234"; BazelBuildEventServiceModule module = new BazelBuildEventServiceModule(); BuildEventStreamer buildEventStreamer = module.tryCreateStreamer(optionsProvider, commandLineReporter, moduleEnvironment, clock, PATH_CONVERTER, reporter, "foo", "bar"); assertThat(buildEventStreamer).isNotNull(); verifyNoMoreInteractions(moduleEnvironment); assertThat(FluentIterable.from(buildEventStreamer.getTransports()).transform(GET_CLASS)) .containsExactly(TextFormatFileTransport.class, BinaryFormatFileTransport.class, JsonFormatFileTransport.class, BuildEventServiceTransport.class); } @Test public void testDoesNotCreatesStreamerWithoutTransports() throws Exception { BazelBuildEventServiceModule module = new BazelBuildEventServiceModule(); BuildEventStreamer buildEventStreamer = module.tryCreateStreamer(optionsProvider, commandLineReporter, moduleEnvironment, clock, PATH_CONVERTER, reporter, "foo", "bar"); assertThat(buildEventStreamer).isNull(); } }
package org.apache.lucene.codecs.asserting; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.Iterator; import java.util.NoSuchElementException; import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.lucene45.Lucene45DocValuesFormat; import org.apache.lucene.index.AssertingAtomicReader; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.OpenBitSet; import org.apache.lucene.util.RamUsageEstimator; /** * Just like {@link Lucene45DocValuesFormat} but with additional asserts. */ public class AssertingDocValuesFormat extends DocValuesFormat { private final DocValuesFormat in = new Lucene45DocValuesFormat(); public AssertingDocValuesFormat() { super("Asserting"); } @Override public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException { DocValuesConsumer consumer = in.fieldsConsumer(state); assert consumer != null; return new AssertingDocValuesConsumer(consumer, state.segmentInfo.getDocCount()); } @Override public DocValuesProducer fieldsProducer(SegmentReadState state) throws IOException { assert state.fieldInfos.hasDocValues(); DocValuesProducer producer = in.fieldsProducer(state); assert producer != null; return new AssertingDocValuesProducer(producer, state.segmentInfo.getDocCount()); } static class AssertingDocValuesConsumer extends DocValuesConsumer { private final DocValuesConsumer in; private final int maxDoc; AssertingDocValuesConsumer(DocValuesConsumer in, int maxDoc) { this.in = in; this.maxDoc = maxDoc; } @Override public void addNumericField(FieldInfo field, Iterable<Number> values) throws IOException { int count = 0; for (Number v : values) { count++; } assert count == maxDoc; checkIterator(values.iterator(), maxDoc, true); in.addNumericField(field, values); } @Override public void addBinaryField(FieldInfo field, Iterable<BytesRef> values) throws IOException { int count = 0; for (BytesRef b : values) { assert b == null || b.isValid(); count++; } assert count == maxDoc; checkIterator(values.iterator(), maxDoc, true); in.addBinaryField(field, values); } @Override public void addSortedField(FieldInfo field, Iterable<BytesRef> values, Iterable<Number> docToOrd) throws IOException { int valueCount = 0; BytesRef lastValue = null; for (BytesRef b : values) { assert b != null; assert b.isValid(); if (valueCount > 0) { assert b.compareTo(lastValue) > 0; } lastValue = BytesRef.deepCopyOf(b); valueCount++; } assert valueCount <= maxDoc; FixedBitSet seenOrds = new FixedBitSet(valueCount); int count = 0; for (Number v : docToOrd) { assert v != null; int ord = v.intValue(); assert ord >= -1 && ord < valueCount; if (ord >= 0) { seenOrds.set(ord); } count++; } assert count == maxDoc; assert seenOrds.cardinality() == valueCount; checkIterator(values.iterator(), valueCount, false); checkIterator(docToOrd.iterator(), maxDoc, false); in.addSortedField(field, values, docToOrd); } @Override public void addSortedSetField(FieldInfo field, Iterable<BytesRef> values, Iterable<Number> docToOrdCount, Iterable<Number> ords) throws IOException { long valueCount = 0; BytesRef lastValue = null; for (BytesRef b : values) { assert b != null; assert b.isValid(); if (valueCount > 0) { assert b.compareTo(lastValue) > 0; } lastValue = BytesRef.deepCopyOf(b); valueCount++; } int docCount = 0; long ordCount = 0; OpenBitSet seenOrds = new OpenBitSet(valueCount); Iterator<Number> ordIterator = ords.iterator(); for (Number v : docToOrdCount) { assert v != null; int count = v.intValue(); assert count >= 0; docCount++; ordCount += count; long lastOrd = -1; for (int i = 0; i < count; i++) { Number o = ordIterator.next(); assert o != null; long ord = o.longValue(); assert ord >= 0 && ord < valueCount; assert ord > lastOrd : "ord=" + ord + ",lastOrd=" + lastOrd; seenOrds.set(ord); lastOrd = ord; } } assert ordIterator.hasNext() == false; assert docCount == maxDoc; assert seenOrds.cardinality() == valueCount; checkIterator(values.iterator(), valueCount, false); checkIterator(docToOrdCount.iterator(), maxDoc, false); checkIterator(ords.iterator(), ordCount, false); in.addSortedSetField(field, values, docToOrdCount, ords); } @Override public void close() throws IOException { in.close(); } } static class AssertingNormsConsumer extends DocValuesConsumer { private final DocValuesConsumer in; private final int maxDoc; AssertingNormsConsumer(DocValuesConsumer in, int maxDoc) { this.in = in; this.maxDoc = maxDoc; } @Override public void addNumericField(FieldInfo field, Iterable<Number> values) throws IOException { int count = 0; for (Number v : values) { assert v != null; count++; } assert count == maxDoc; checkIterator(values.iterator(), maxDoc, false); in.addNumericField(field, values); } @Override public void close() throws IOException { in.close(); } @Override public void addBinaryField(FieldInfo field, Iterable<BytesRef> values) throws IOException { throw new IllegalStateException(); } @Override public void addSortedField(FieldInfo field, Iterable<BytesRef> values, Iterable<Number> docToOrd) throws IOException { throw new IllegalStateException(); } @Override public void addSortedSetField(FieldInfo field, Iterable<BytesRef> values, Iterable<Number> docToOrdCount, Iterable<Number> ords) throws IOException { throw new IllegalStateException(); } } private static <T> void checkIterator(Iterator<T> iterator, long expectedSize, boolean allowNull) { for (long i = 0; i < expectedSize; i++) { boolean hasNext = iterator.hasNext(); assert hasNext; T v = iterator.next(); assert allowNull || v != null; try { iterator.remove(); throw new AssertionError("broken iterator (supports remove): " + iterator); } catch (UnsupportedOperationException expected) { // ok } } assert !iterator.hasNext(); try { iterator.next(); throw new AssertionError("broken iterator (allows next() when hasNext==false) " + iterator); } catch (NoSuchElementException expected) { // ok } } static class AssertingDocValuesProducer extends DocValuesProducer { private final DocValuesProducer in; private final int maxDoc; AssertingDocValuesProducer(DocValuesProducer in, int maxDoc) { this.in = in; this.maxDoc = maxDoc; } @Override public NumericDocValues getNumeric(FieldInfo field) throws IOException { assert field.getDocValuesType() == FieldInfo.DocValuesType.NUMERIC || field.getNormType() == FieldInfo.DocValuesType.NUMERIC; NumericDocValues values = in.getNumeric(field); assert values != null; return new AssertingAtomicReader.AssertingNumericDocValues(values, maxDoc); } @Override public BinaryDocValues getBinary(FieldInfo field) throws IOException { assert field.getDocValuesType() == FieldInfo.DocValuesType.BINARY; BinaryDocValues values = in.getBinary(field); assert values != null; return new AssertingAtomicReader.AssertingBinaryDocValues(values, maxDoc); } @Override public SortedDocValues getSorted(FieldInfo field) throws IOException { assert field.getDocValuesType() == FieldInfo.DocValuesType.SORTED; SortedDocValues values = in.getSorted(field); assert values != null; return new AssertingAtomicReader.AssertingSortedDocValues(values, maxDoc); } @Override public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { assert field.getDocValuesType() == FieldInfo.DocValuesType.SORTED_SET; SortedSetDocValues values = in.getSortedSet(field); assert values != null; return new AssertingAtomicReader.AssertingSortedSetDocValues(values, maxDoc); } @Override public Bits getDocsWithField(FieldInfo field) throws IOException { assert field.getDocValuesType() != null; Bits bits = in.getDocsWithField(field); assert bits != null; assert bits.length() == maxDoc; return new AssertingAtomicReader.AssertingBits(bits); } @Override public void close() throws IOException { in.close(); } @Override public long ramBytesUsed() { return in.ramBytesUsed(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.codec.prefixtree.encode.column; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; import org.apache.hadoop.hbase.codec.prefixtree.encode.other.ColumnNodeType; import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer; import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode; import org.apache.hadoop.hbase.util.CollectionUtils; import org.apache.hadoop.hbase.util.vint.UFIntTool; import com.google.common.collect.Lists; /** * <p> * Takes the tokenized family or qualifier data and flattens it into a stream of bytes. The family * section is written after the row section, and qualifier section after family section. * </p> * The family and qualifier tries, or "column tries", are structured differently than the row trie. * The trie cannot be reassembled without external data about the offsets of the leaf nodes, and * these external pointers are stored in the nubs and leaves of the row trie. For each cell in a * row, the row trie contains a list of offsets into the column sections (along with pointers to * timestamps and other per-cell fields). These offsets point to the last column node/token that * comprises the column name. To assemble the column name, the trie is traversed in reverse (right * to left), with the rightmost tokens pointing to the start of their "parent" node which is the * node to the left. * <p> * This choice was made to reduce the size of the column trie by storing the minimum amount of * offset data. As a result, to find a specific qualifier within a row, you must do a binary search * of the column nodes, reassembling each one as you search. Future versions of the PrefixTree might * encode the columns in both a forward and reverse trie, which would convert binary searches into * more efficient trie searches which would be beneficial for wide rows. * </p> */ @InterfaceAudience.Private public class ColumnSectionWriter { public static final int EXPECTED_NUBS_PLUS_LEAVES = 100; /****************** fields ****************************/ private PrefixTreeBlockMeta blockMeta; private ColumnNodeType nodeType; private Tokenizer tokenizer; private int numBytes = 0; private ArrayList<TokenizerNode> nonLeaves; private ArrayList<TokenizerNode> leaves; private ArrayList<TokenizerNode> allNodes; private ArrayList<ColumnNodeWriter> columnNodeWriters; private List<Integer> outputArrayOffsets; /*********************** construct *********************/ public ColumnSectionWriter() { this.nonLeaves = Lists.newArrayList(); this.leaves = Lists.newArrayList(); this.outputArrayOffsets = Lists.newArrayList(); } public ColumnSectionWriter(PrefixTreeBlockMeta blockMeta, Tokenizer builder, ColumnNodeType nodeType) { this();// init collections reconstruct(blockMeta, builder, nodeType); } public void reconstruct(PrefixTreeBlockMeta blockMeta, Tokenizer builder, ColumnNodeType nodeType) { this.blockMeta = blockMeta; this.tokenizer = builder; this.nodeType = nodeType; } public void reset() { numBytes = 0; nonLeaves.clear(); leaves.clear(); outputArrayOffsets.clear(); } /****************** methods *******************************/ public ColumnSectionWriter compile() { if (this.nodeType == ColumnNodeType.FAMILY) { // do nothing. max family length fixed at Byte.MAX_VALUE } else if (this.nodeType == ColumnNodeType.QUALIFIER) { blockMeta.setMaxQualifierLength(tokenizer.getMaxElementLength()); } else { blockMeta.setMaxTagsLength(tokenizer.getMaxElementLength()); } compilerInternals(); return this; } protected void compilerInternals() { tokenizer.setNodeFirstInsertionIndexes(); tokenizer.appendNodes(nonLeaves, true, false); tokenizer.appendNodes(leaves, false, true); allNodes = Lists.newArrayListWithCapacity(nonLeaves.size() + leaves.size()); allNodes.addAll(nonLeaves); allNodes.addAll(leaves); columnNodeWriters = Lists.newArrayListWithCapacity(CollectionUtils.nullSafeSize(allNodes)); for (int i = 0; i < allNodes.size(); ++i) { TokenizerNode node = allNodes.get(i); columnNodeWriters.add(new ColumnNodeWriter(blockMeta, node, this.nodeType)); } // leaf widths are known at this point, so add them up int totalBytesWithoutOffsets = 0; for (int i = allNodes.size() - 1; i >= 0; --i) { ColumnNodeWriter columnNodeWriter = columnNodeWriters.get(i); // leaves store all but their first token byte totalBytesWithoutOffsets += columnNodeWriter.getWidthUsingPlaceholderForOffsetWidth(0); } // figure out how wide our offset FInts are int parentOffsetWidth = 0; while (true) { ++parentOffsetWidth; int numBytesFinder = totalBytesWithoutOffsets + parentOffsetWidth * allNodes.size(); if (numBytesFinder < UFIntTool.maxValueForNumBytes(parentOffsetWidth)) { numBytes = numBytesFinder; break; }// it fits } if (this.nodeType == ColumnNodeType.FAMILY) { blockMeta.setFamilyOffsetWidth(parentOffsetWidth); } else if (this.nodeType == ColumnNodeType.QUALIFIER) { blockMeta.setQualifierOffsetWidth(parentOffsetWidth); } else { blockMeta.setTagsOffsetWidth(parentOffsetWidth); } int forwardIndex = 0; for (int i = 0; i < allNodes.size(); ++i) { TokenizerNode node = allNodes.get(i); ColumnNodeWriter columnNodeWriter = columnNodeWriters.get(i); int fullNodeWidth = columnNodeWriter .getWidthUsingPlaceholderForOffsetWidth(parentOffsetWidth); node.setOutputArrayOffset(forwardIndex); columnNodeWriter.setTokenBytes(node.getToken()); if (node.isRoot()) { columnNodeWriter.setParentStartPosition(0); } else { columnNodeWriter.setParentStartPosition(node.getParent().getOutputArrayOffset()); } forwardIndex += fullNodeWidth; } tokenizer.appendOutputArrayOffsets(outputArrayOffsets); } public void writeBytes(OutputStream os) throws IOException { for (ColumnNodeWriter columnNodeWriter : columnNodeWriters) { columnNodeWriter.writeBytes(os); } } /************* get/set **************************/ public ArrayList<ColumnNodeWriter> getColumnNodeWriters() { return columnNodeWriters; } public int getNumBytes() { return numBytes; } public int getOutputArrayOffset(int sortedIndex) { return outputArrayOffsets.get(sortedIndex); } public ArrayList<TokenizerNode> getNonLeaves() { return nonLeaves; } public ArrayList<TokenizerNode> getLeaves() { return leaves; } }
/* * Copyright 2012-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.freemarker; import java.io.StringWriter; import java.util.EnumSet; import java.util.Locale; import java.util.Map; import jakarta.servlet.DispatcherType; import jakarta.servlet.http.HttpServletRequest; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.test.util.TestPropertyValues; import org.springframework.boot.web.servlet.FilterRegistrationBean; import org.springframework.boot.web.servlet.context.AnnotationConfigServletWebApplicationContext; import org.springframework.boot.web.servlet.filter.OrderedCharacterEncodingFilter; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.mock.web.MockServletContext; import org.springframework.web.servlet.View; import org.springframework.web.servlet.resource.ResourceUrlEncodingFilter; import org.springframework.web.servlet.support.RequestContext; import org.springframework.web.servlet.view.AbstractTemplateViewResolver; import org.springframework.web.servlet.view.freemarker.FreeMarkerConfig; import org.springframework.web.servlet.view.freemarker.FreeMarkerConfigurer; import org.springframework.web.servlet.view.freemarker.FreeMarkerViewResolver; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link FreeMarkerAutoConfiguration} Servlet support. * * @author Andy Wilkinson * @author Kazuki Shimizu */ class FreeMarkerAutoConfigurationServletIntegrationTests { private AnnotationConfigServletWebApplicationContext context; @AfterEach void close() { if (this.context != null) { this.context.close(); } } @Test void defaultConfiguration() { load(); assertThat(this.context.getBean(FreeMarkerViewResolver.class)).isNotNull(); assertThat(this.context.getBean(FreeMarkerConfigurer.class)).isNotNull(); assertThat(this.context.getBean(FreeMarkerConfig.class)).isNotNull(); assertThat(this.context.getBean(freemarker.template.Configuration.class)).isNotNull(); } @Test void defaultViewResolution() throws Exception { load(); MockHttpServletResponse response = render("home"); String result = response.getContentAsString(); assertThat(result).contains("home"); assertThat(response.getContentType()).isEqualTo("text/html;charset=UTF-8"); } @Test void customContentType() throws Exception { load("spring.freemarker.contentType:application/json"); MockHttpServletResponse response = render("home"); String result = response.getContentAsString(); assertThat(result).contains("home"); assertThat(response.getContentType()).isEqualTo("application/json;charset=UTF-8"); } @Test void customPrefix() throws Exception { load("spring.freemarker.prefix:prefix/"); MockHttpServletResponse response = render("prefixed"); String result = response.getContentAsString(); assertThat(result).contains("prefixed"); } @Test void customSuffix() throws Exception { load("spring.freemarker.suffix:.freemarker"); MockHttpServletResponse response = render("suffixed"); String result = response.getContentAsString(); assertThat(result).contains("suffixed"); } @Test void customTemplateLoaderPath() throws Exception { load("spring.freemarker.templateLoaderPath:classpath:/custom-templates/"); MockHttpServletResponse response = render("custom"); String result = response.getContentAsString(); assertThat(result).contains("custom"); } @Test void disableCache() { load("spring.freemarker.cache:false"); assertThat(this.context.getBean(FreeMarkerViewResolver.class).getCacheLimit()).isEqualTo(0); } @Test void allowSessionOverride() { load("spring.freemarker.allow-session-override:true"); AbstractTemplateViewResolver viewResolver = this.context.getBean(FreeMarkerViewResolver.class); assertThat(viewResolver).hasFieldOrPropertyWithValue("allowSessionOverride", true); } @SuppressWarnings("deprecation") @Test void customFreeMarkerSettings() { load("spring.freemarker.settings.boolean_format:yup,nope"); assertThat(this.context.getBean(FreeMarkerConfigurer.class).getConfiguration().getSetting("boolean_format")) .isEqualTo("yup,nope"); } @Test void renderTemplate() throws Exception { load(); FreeMarkerConfigurer freemarker = this.context.getBean(FreeMarkerConfigurer.class); StringWriter writer = new StringWriter(); freemarker.getConfiguration().getTemplate("message.ftlh").process(new DataModel(), writer); assertThat(writer.toString()).contains("Hello World"); } @Test void registerResourceHandlingFilterDisabledByDefault() { load(); assertThat(this.context.getBeansOfType(FilterRegistrationBean.class)).isEmpty(); } @Test void registerResourceHandlingFilterOnlyIfResourceChainIsEnabled() { load("spring.web.resources.chain.enabled:true"); FilterRegistrationBean<?> registration = this.context.getBean(FilterRegistrationBean.class); assertThat(registration.getFilter()).isInstanceOf(ResourceUrlEncodingFilter.class); assertThat(registration).hasFieldOrPropertyWithValue("dispatcherTypes", EnumSet.of(DispatcherType.REQUEST, DispatcherType.ERROR)); } @Test @SuppressWarnings("rawtypes") void registerResourceHandlingFilterWithOtherRegistrationBean() { // gh-14897 load(FilterRegistrationOtherConfiguration.class, "spring.web.resources.chain.enabled:true"); Map<String, FilterRegistrationBean> beans = this.context.getBeansOfType(FilterRegistrationBean.class); assertThat(beans).hasSize(2); FilterRegistrationBean registration = beans.values().stream() .filter((r) -> r.getFilter() instanceof ResourceUrlEncodingFilter).findFirst().get(); assertThat(registration).hasFieldOrPropertyWithValue("dispatcherTypes", EnumSet.of(DispatcherType.REQUEST, DispatcherType.ERROR)); } @Test @SuppressWarnings("rawtypes") void registerResourceHandlingFilterWithResourceRegistrationBean() { // gh-14926 load(FilterRegistrationResourceConfiguration.class, "spring.web.resources.chain.enabled:true"); Map<String, FilterRegistrationBean> beans = this.context.getBeansOfType(FilterRegistrationBean.class); assertThat(beans).hasSize(1); FilterRegistrationBean registration = beans.values().stream() .filter((r) -> r.getFilter() instanceof ResourceUrlEncodingFilter).findFirst().get(); assertThat(registration).hasFieldOrPropertyWithValue("dispatcherTypes", EnumSet.of(DispatcherType.INCLUDE)); } private void load(String... env) { load(BaseConfiguration.class, env); } private void load(Class<?> config, String... env) { this.context = new AnnotationConfigServletWebApplicationContext(); this.context.setServletContext(new MockServletContext()); TestPropertyValues.of(env).applyTo(this.context); this.context.register(config); this.context.refresh(); } private MockHttpServletResponse render(String viewName) throws Exception { FreeMarkerViewResolver resolver = this.context.getBean(FreeMarkerViewResolver.class); View view = resolver.resolveViewName(viewName, Locale.UK); assertThat(view).isNotNull(); HttpServletRequest request = new MockHttpServletRequest(); request.setAttribute(RequestContext.WEB_APPLICATION_CONTEXT_ATTRIBUTE, this.context); MockHttpServletResponse response = new MockHttpServletResponse(); view.render(null, request, response); return response; } @Configuration(proxyBeanMethods = false) @ImportAutoConfiguration({ FreeMarkerAutoConfiguration.class, PropertyPlaceholderAutoConfiguration.class }) static class BaseConfiguration { } @Configuration(proxyBeanMethods = false) @Import(BaseConfiguration.class) static class FilterRegistrationResourceConfiguration { @Bean FilterRegistrationBean<ResourceUrlEncodingFilter> filterRegistration() { FilterRegistrationBean<ResourceUrlEncodingFilter> bean = new FilterRegistrationBean<>( new ResourceUrlEncodingFilter()); bean.setDispatcherTypes(EnumSet.of(DispatcherType.INCLUDE)); return bean; } } @Configuration(proxyBeanMethods = false) @Import(BaseConfiguration.class) static class FilterRegistrationOtherConfiguration { @Bean FilterRegistrationBean<OrderedCharacterEncodingFilter> filterRegistration() { return new FilterRegistrationBean<>(new OrderedCharacterEncodingFilter()); } } public static class DataModel { public String getGreeting() { return "Hello World"; } } }
/* * Copyright 2012 Matt Corallo. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.usecredits.credits.store; import net.usecredits.credits.core.*; import com.google.common.collect.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigInteger; import java.sql.*; import java.util.LinkedList; import java.util.List; // Originally written for Apache Derby, but its DELETE (and general) performance was awful /** * A full pruned block store using the H2 pure-java embedded database. * * Note that because of the heavy delete load on the database, during IBD, * you may see the database files grow quite large (around 1.5G). * H2 automatically frees some space at shutdown, so close()ing the database * decreases the space usage somewhat (to only around 1.3G). */ public class H2FullPrunedBlockStore implements FullPrunedBlockStore { private static final Logger log = LoggerFactory.getLogger(H2FullPrunedBlockStore.class); private Sha256Hash chainHeadHash; private StoredBlock chainHeadBlock; private Sha256Hash verifiedChainHeadHash; private StoredBlock verifiedChainHeadBlock; private NetworkParameters params; private ThreadLocal<Connection> conn; private List<Connection> allConnections; private String connectionURL; private int fullStoreDepth; static final String driver = "org.h2.Driver"; static final String CREATE_SETTINGS_TABLE = "CREATE TABLE settings ( " + "name VARCHAR(32) NOT NULL CONSTRAINT settings_pk PRIMARY KEY," + "value BLOB" + ")"; static final String CHAIN_HEAD_SETTING = "chainhead"; static final String VERIFIED_CHAIN_HEAD_SETTING = "verifiedchainhead"; static final String CREATE_HEADERS_TABLE = "CREATE TABLE headers ( " + "hash BINARY(28) NOT NULL CONSTRAINT headers_pk PRIMARY KEY," + "chainWork BLOB NOT NULL," + "height INT NOT NULL," + "header BLOB NOT NULL," + "wasUndoable BOOL NOT NULL" + ")"; static final String CREATE_UNDOABLE_TABLE = "CREATE TABLE undoableBlocks ( " + "hash BINARY(28) NOT NULL CONSTRAINT undoableBlocks_pk PRIMARY KEY," + "height INT NOT NULL," + "txOutChanges BLOB," + "transactions BLOB" + ")"; static final String CREATE_UNDOABLE_TABLE_INDEX = "CREATE INDEX heightIndex ON undoableBlocks (height)"; static final String CREATE_OPEN_OUTPUT_INDEX_TABLE = "CREATE TABLE openOutputsIndex (" + "hash BINARY(32) NOT NULL CONSTRAINT openOutputsIndex_pk PRIMARY KEY," + "height INT NOT NULL," + "id BIGINT NOT NULL AUTO_INCREMENT" + ")"; static final String CREATE_OPEN_OUTPUT_TABLE = "CREATE TABLE openOutputs (" + "id BIGINT NOT NULL," + "index INT NOT NULL," + "value BLOB NOT NULL," + "scriptBytes BLOB NOT NULL," + "PRIMARY KEY (id, index)," + "CONSTRAINT openOutputs_fk FOREIGN KEY (id) REFERENCES openOutputsIndex(id)" + ")"; /** * Creates a new H2FullPrunedBlockStore * @param params A copy of the NetworkParameters used * @param dbName The path to the database on disk * @param fullStoreDepth The number of blocks of history stored in full (something like 1000 is pretty safe) * @throws BlockStoreException if the database fails to open for any reason */ public H2FullPrunedBlockStore(NetworkParameters params, String dbName, int fullStoreDepth) throws BlockStoreException { this.params = params; this.fullStoreDepth = fullStoreDepth; connectionURL = "jdbc:h2:" + dbName + ";create=true"; conn = new ThreadLocal<Connection>(); allConnections = new LinkedList<Connection>(); try { Class.forName(driver); log.info(driver + " loaded. "); } catch (java.lang.ClassNotFoundException e) { log.error("check CLASSPATH for H2 jar ", e); } maybeConnect(); try { // Create tables if needed if (!tableExists("settings")) createTables(); initFromDatabase(); } catch (SQLException e) { throw new BlockStoreException(e); } } /** * Creates a new H2FullPrunedBlockStore with the given cache size * @param params A copy of the NetworkParameters used * @param dbName The path to the database on disk * @param fullStoreDepth The number of blocks of history stored in full (something like 1000 is pretty safe) * @param cacheSize The number of kilobytes to dedicate to H2 Cache (the default value of 16MB (16384) is a safe bet * to achieve good performance/cost when importing blocks from disk, past 32MB makes little sense, * and below 4MB sees a sharp drop in performance) * @throws BlockStoreException if the database fails to open for any reason */ public H2FullPrunedBlockStore(NetworkParameters params, String dbName, int fullStoreDepth, int cacheSize) throws BlockStoreException { this(params, dbName, fullStoreDepth); try { Statement s = conn.get().createStatement(); s.executeUpdate("SET CACHE_SIZE " + cacheSize); s.close(); } catch (SQLException e) { throw new BlockStoreException(e); } } private synchronized void maybeConnect() throws BlockStoreException { try { if (conn.get() != null) return; conn.set(DriverManager.getConnection(connectionURL)); allConnections.add(conn.get()); log.info("Made a new connection to database " + connectionURL); } catch (SQLException ex) { throw new BlockStoreException(ex); } } public synchronized void close() { for (Connection conn : allConnections) { try { conn.rollback(); } catch (SQLException ex) { throw new RuntimeException(ex); } } allConnections.clear(); } public void resetStore() throws BlockStoreException { maybeConnect(); try { Statement s = conn.get().createStatement(); s.executeUpdate("DROP TABLE settings"); s.executeUpdate("DROP TABLE headers"); s.executeUpdate("DROP TABLE undoableBlocks"); s.executeUpdate("DROP TABLE openOutputs"); s.executeUpdate("DROP TABLE openOutputsIndex"); s.close(); createTables(); initFromDatabase(); } catch (SQLException ex) { throw new RuntimeException(ex); } } private void createTables() throws SQLException, BlockStoreException { Statement s = conn.get().createStatement(); log.debug("H2FullPrunedBlockStore : CREATE headers table"); s.executeUpdate(CREATE_HEADERS_TABLE); log.debug("H2FullPrunedBlockStore : CREATE settings table"); s.executeUpdate(CREATE_SETTINGS_TABLE); log.debug("H2FullPrunedBlockStore : CREATE undoable block table"); s.executeUpdate(CREATE_UNDOABLE_TABLE); log.debug("H2FullPrunedBlockStore : CREATE undoable block index"); s.executeUpdate(CREATE_UNDOABLE_TABLE_INDEX); log.debug("H2FullPrunedBlockStore : CREATE open output index table"); s.executeUpdate(CREATE_OPEN_OUTPUT_INDEX_TABLE); log.debug("H2FullPrunedBlockStore : CREATE open output table"); s.executeUpdate(CREATE_OPEN_OUTPUT_TABLE); s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + CHAIN_HEAD_SETTING + "', NULL)"); s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + VERIFIED_CHAIN_HEAD_SETTING + "', NULL)"); s.close(); createNewStore(params); } private void initFromDatabase() throws SQLException, BlockStoreException { Statement s = conn.get().createStatement(); ResultSet rs = s.executeQuery("SELECT value FROM settings WHERE name = '" + CHAIN_HEAD_SETTING + "'"); if (!rs.next()) { throw new BlockStoreException("corrupt H2 block store - no chain head pointer"); } Sha256Hash hash = new Sha256Hash(rs.getBytes(1)); rs.close(); this.chainHeadBlock = get(hash); this.chainHeadHash = hash; if (this.chainHeadBlock == null) { throw new BlockStoreException("corrupt H2 block store - head block not found"); } rs = s.executeQuery("SELECT value FROM settings WHERE name = '" + VERIFIED_CHAIN_HEAD_SETTING + "'"); if (!rs.next()) { throw new BlockStoreException("corrupt H2 block store - no verified chain head pointer"); } hash = new Sha256Hash(rs.getBytes(1)); rs.close(); s.close(); this.verifiedChainHeadBlock = get(hash); this.verifiedChainHeadHash = hash; if (this.verifiedChainHeadBlock == null) { throw new BlockStoreException("corrupt H2 block store - verified head block not found"); } } private void createNewStore(NetworkParameters params) throws BlockStoreException { try { // Set up the genesis block. When we start out fresh, it is by // definition the top of the chain. StoredBlock storedGenesisHeader = new StoredBlock(params.genesisBlock.cloneAsHeader(), params.genesisBlock.getWork(), 0); // The coinbase in the genesis block is not spendable. This is because of how the reference client inits // its database - the genesis transaction isn't actually in the db so its spent flags can never be updated. List<Transaction> genesisTransactions = Lists.newLinkedList(); StoredUndoableBlock storedGenesis = new StoredUndoableBlock(params.genesisBlock.getHash(), genesisTransactions); put(storedGenesisHeader, storedGenesis); setChainHead(storedGenesisHeader); setVerifiedChainHead(storedGenesisHeader); } catch (VerificationException e) { throw new RuntimeException(e); // Cannot happen. } } private boolean tableExists(String table) throws SQLException { Statement s = conn.get().createStatement(); try { ResultSet results = s.executeQuery("SELECT * FROM " + table + " WHERE 1 = 2"); results.close(); return true; } catch (SQLException ex) { return false; } finally { s.close(); } } /** * Dumps information about the size of actual data in the database to standard output * The only truly useless data counted is printed in the form "N in id indexes" * This does not take database indexes into account */ public void dumpSizes() throws SQLException, BlockStoreException { maybeConnect(); Statement s = conn.get().createStatement(); long size = 0; long totalSize = 0; int count = 0; ResultSet rs = s.executeQuery("SELECT name, value FROM settings"); while (rs.next()) { size += rs.getString(1).length(); size += rs.getBytes(2).length; count++; } rs.close(); System.out.printf("Settings size: %d, count: %d, average size: %f%n", size, count, (double)size/count); totalSize += size; size = 0; count = 0; rs = s.executeQuery("SELECT chainWork, header FROM headers"); while (rs.next()) { size += 28; // hash size += rs.getBytes(1).length; size += 4; // height size += rs.getBytes(2).length; count++; } rs.close(); System.out.printf("Headers size: %d, count: %d, average size: %f%n", size, count, (double)size/count); totalSize += size; size = 0; count = 0; rs = s.executeQuery("SELECT txOutChanges, transactions FROM undoableBlocks"); while (rs.next()) { size += 28; // hash size += 4; // height byte[] txOutChanges = rs.getBytes(1); byte[] transactions = rs.getBytes(2); if (txOutChanges == null) size += transactions.length; else size += txOutChanges.length; // size += the space to represent NULL count++; } rs.close(); System.out.printf("Undoable Blocks size: %d, count: %d, average size: %f%n", size, count, (double)size/count); totalSize += size; size = 0; count = 0; rs = s.executeQuery("SELECT id FROM openOutputsIndex"); while (rs.next()) { size += 32; // hash size += 4; // height size += 8; // id count++; } rs.close(); System.out.printf("Open Outputs Index size: %d, count: %d, size in id indexes: %d%n", size, count, count * 8); totalSize += size; size = 0; count = 0; long scriptSize = 0; rs = s.executeQuery("SELECT value, scriptBytes FROM openOutputs"); while (rs.next()) { size += 8; // id size += 4; // index size += rs.getBytes(1).length; size += rs.getBytes(2).length; scriptSize += rs.getBytes(2).length; count++; } rs.close(); System.out.printf("Open Outputs size: %d, count: %d, average size: %f, average script size: %f (%d in id indexes)%n", size, count, (double)size/count, (double)scriptSize/count, count * 8); totalSize += size; System.out.println("Total Size: " + totalSize); s.close(); } private void putUpdateStoredBlock(StoredBlock storedBlock, boolean wasUndoable) throws SQLException { try { PreparedStatement s = conn.get().prepareStatement("INSERT INTO headers(hash, chainWork, height, header, wasUndoable)" + " VALUES(?, ?, ?, ?, ?)"); // We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes byte[] hashBytes = new byte[28]; System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28); s.setBytes(1, hashBytes); s.setBytes(2, storedBlock.getChainWork().toByteArray()); s.setInt(3, storedBlock.getHeight()); s.setBytes(4, storedBlock.getHeader().unsafeLitecoinSerialize()); s.setBoolean(5, wasUndoable); s.executeUpdate(); s.close(); } catch (SQLException e) { // It is possible we try to add a duplicate StoredBlock if we upgraded // In that case, we just update the entry to mark it wasUndoable if (e.getErrorCode() != 23505 || !wasUndoable) throw e; PreparedStatement s = conn.get().prepareStatement("UPDATE headers SET wasUndoable=? WHERE hash=?"); s.setBoolean(1, true); // We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes byte[] hashBytes = new byte[28]; System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28); s.setBytes(2, hashBytes); s.executeUpdate(); s.close(); } } public void put(StoredBlock storedBlock) throws BlockStoreException { maybeConnect(); try { putUpdateStoredBlock(storedBlock, false); } catch (SQLException e) { throw new BlockStoreException(e); } } public void put(StoredBlock storedBlock, StoredUndoableBlock undoableBlock) throws BlockStoreException { maybeConnect(); // We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes byte[] hashBytes = new byte[28]; System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28); int height = storedBlock.getHeight(); byte[] transactions = null; byte[] txOutChanges = null; try { ByteArrayOutputStream bos = new ByteArrayOutputStream(); if (undoableBlock.getTxOutChanges() != null) { undoableBlock.getTxOutChanges().serializeToStream(bos); txOutChanges = bos.toByteArray(); } else { int numTxn = undoableBlock.getTransactions().size(); bos.write((int) (0xFF & (numTxn >> 0))); bos.write((int) (0xFF & (numTxn >> 8))); bos.write((int) (0xFF & (numTxn >> 16))); bos.write((int) (0xFF & (numTxn >> 24))); for (Transaction tx : undoableBlock.getTransactions()) tx.litecoinSerialize(bos); transactions = bos.toByteArray(); } bos.close(); } catch (IOException e) { throw new BlockStoreException(e); } try { try { PreparedStatement s = conn.get().prepareStatement("INSERT INTO undoableBlocks(hash, height, txOutChanges, transactions)" + " VALUES(?, ?, ?, ?)"); s.setBytes(1, hashBytes); s.setInt(2, height); if (transactions == null) { s.setBytes(3, txOutChanges); s.setNull(4, Types.BLOB); } else { s.setNull(3, Types.BLOB); s.setBytes(4, transactions); } s.executeUpdate(); s.close(); try { putUpdateStoredBlock(storedBlock, true); } catch (SQLException e) { throw new BlockStoreException(e); } } catch (SQLException e) { if (e.getErrorCode() != 23505) throw new BlockStoreException(e); // There is probably an update-or-insert statement, but it wasn't obvious from the docs PreparedStatement s = conn.get().prepareStatement("UPDATE undoableBlocks SET txOutChanges=?, transactions=?" + " WHERE hash = ?"); s.setBytes(3, hashBytes); if (transactions == null) { s.setBytes(1, txOutChanges); s.setNull(2, Types.BLOB); } else { s.setNull(1, Types.BLOB); s.setBytes(2, transactions); } s.executeUpdate(); s.close(); } } catch (SQLException ex) { throw new BlockStoreException(ex); } } public StoredBlock get(Sha256Hash hash, boolean wasUndoableOnly) throws BlockStoreException { // Optimize for chain head if (chainHeadHash != null && chainHeadHash.equals(hash)) return chainHeadBlock; if (verifiedChainHeadHash != null && verifiedChainHeadHash.equals(hash)) return verifiedChainHeadBlock; maybeConnect(); PreparedStatement s = null; try { s = conn.get() .prepareStatement("SELECT chainWork, height, header, wasUndoable FROM headers WHERE hash = ?"); // We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes byte[] hashBytes = new byte[28]; System.arraycopy(hash.getBytes(), 3, hashBytes, 0, 28); s.setBytes(1, hashBytes); ResultSet results = s.executeQuery(); if (!results.next()) { return null; } // Parse it. if (wasUndoableOnly && !results.getBoolean(4)) return null; BigInteger chainWork = new BigInteger(results.getBytes(1)); int height = results.getInt(2); Block b = new Block(params, results.getBytes(3)); b.verifyHeader(); StoredBlock stored = new StoredBlock(b, chainWork, height); return stored; } catch (SQLException ex) { throw new BlockStoreException(ex); } catch (ProtocolException e) { // Corrupted database. throw new BlockStoreException(e); } catch (VerificationException e) { // Should not be able to happen unless the database contains bad // blocks. throw new BlockStoreException(e); } finally { if (s != null) try { s.close(); } catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); } } } public StoredBlock get(Sha256Hash hash) throws BlockStoreException { return get(hash, false); } public StoredBlock getOnceUndoableStoredBlock(Sha256Hash hash) throws BlockStoreException { return get(hash, true); } public StoredUndoableBlock getUndoBlock(Sha256Hash hash) throws BlockStoreException { maybeConnect(); PreparedStatement s = null; try { s = conn.get() .prepareStatement("SELECT txOutChanges, transactions FROM undoableBlocks WHERE hash = ?"); // We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes byte[] hashBytes = new byte[28]; System.arraycopy(hash.getBytes(), 3, hashBytes, 0, 28); s.setBytes(1, hashBytes); ResultSet results = s.executeQuery(); if (!results.next()) { return null; } // Parse it. byte[] txOutChanges = results.getBytes(1); byte[] transactions = results.getBytes(2); StoredUndoableBlock block; if (txOutChanges == null) { int offset = 0; int numTxn = ((transactions[offset++] & 0xFF) << 0) | ((transactions[offset++] & 0xFF) << 8) | ((transactions[offset++] & 0xFF) << 16) | ((transactions[offset++] & 0xFF) << 24); List<Transaction> transactionList = new LinkedList<Transaction>(); for (int i = 0; i < numTxn; i++) { Transaction tx = new Transaction(params, transactions, offset); transactionList.add(tx); offset += tx.getMessageSize(); } block = new StoredUndoableBlock(hash, transactionList); } else { TransactionOutputChanges outChangesObject = new TransactionOutputChanges(new ByteArrayInputStream(txOutChanges)); block = new StoredUndoableBlock(hash, outChangesObject); } return block; } catch (SQLException ex) { throw new BlockStoreException(ex); } catch (NullPointerException e) { // Corrupted database. throw new BlockStoreException(e); } catch (ClassCastException e) { // Corrupted database. throw new BlockStoreException(e); } catch (ProtocolException e) { // Corrupted database. throw new BlockStoreException(e); } catch (IOException e) { // Corrupted database. throw new BlockStoreException(e); } finally { if (s != null) try { s.close(); } catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); } } } public StoredBlock getChainHead() throws BlockStoreException { return chainHeadBlock; } public void setChainHead(StoredBlock chainHead) throws BlockStoreException { Sha256Hash hash = chainHead.getHeader().getHash(); this.chainHeadHash = hash; this.chainHeadBlock = chainHead; maybeConnect(); try { PreparedStatement s = conn.get() .prepareStatement("UPDATE settings SET value = ? WHERE name = ?"); s.setString(2, CHAIN_HEAD_SETTING); s.setBytes(1, hash.getBytes()); s.executeUpdate(); s.close(); } catch (SQLException ex) { throw new BlockStoreException(ex); } } public StoredBlock getVerifiedChainHead() throws BlockStoreException { return verifiedChainHeadBlock; } public void setVerifiedChainHead(StoredBlock chainHead) throws BlockStoreException { Sha256Hash hash = chainHead.getHeader().getHash(); this.verifiedChainHeadHash = hash; this.verifiedChainHeadBlock = chainHead; maybeConnect(); try { PreparedStatement s = conn.get() .prepareStatement("UPDATE settings SET value = ? WHERE name = ?"); s.setString(2, VERIFIED_CHAIN_HEAD_SETTING); s.setBytes(1, hash.getBytes()); s.executeUpdate(); s.close(); } catch (SQLException ex) { throw new BlockStoreException(ex); } if (this.chainHeadBlock.getHeight() < chainHead.getHeight()) setChainHead(chainHead); removeUndoableBlocksWhereHeightIsLessThan(chainHead.getHeight() - fullStoreDepth); } private void removeUndoableBlocksWhereHeightIsLessThan(int height) throws BlockStoreException { try { PreparedStatement s = conn.get() .prepareStatement("DELETE FROM undoableBlocks WHERE height <= ?"); s.setInt(1, height); s.executeUpdate(); s.close(); } catch (SQLException ex) { throw new BlockStoreException(ex); } } public StoredTransactionOutput getTransactionOutput(Sha256Hash hash, long index) throws BlockStoreException { maybeConnect(); PreparedStatement s = null; try { s = conn.get() .prepareStatement("SELECT openOutputsIndex.height, openOutputs.value, openOutputs.scriptBytes " + "FROM openOutputsIndex NATURAL JOIN openOutputs " + "WHERE openOutputsIndex.hash = ? AND openOutputs.index = ?"); s.setBytes(1, hash.getBytes()); // index is actually an unsigned int s.setInt(2, (int)index); ResultSet results = s.executeQuery(); if (!results.next()) { return null; } // Parse it. int height = results.getInt(1); BigInteger value = new BigInteger(results.getBytes(2)); // Tell the StoredTransactionOutput that we are a coinbase, as that is encoded in height StoredTransactionOutput txout = new StoredTransactionOutput(hash, index, value, height, true, results.getBytes(3)); return txout; } catch (SQLException ex) { throw new BlockStoreException(ex); } finally { if (s != null) try { s.close(); } catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); } } } public void addUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException { maybeConnect(); PreparedStatement s = null; try { try { s = conn.get().prepareStatement("INSERT INTO openOutputsIndex(hash, height)" + " VALUES(?, ?)"); s.setBytes(1, out.getHash().getBytes()); s.setInt(2, out.getHeight()); s.executeUpdate(); } catch (SQLException e) { if (e.getErrorCode() != 23505) throw e; } finally { if (s != null) s.close(); } s = conn.get().prepareStatement("INSERT INTO openOutputs (id, index, value, scriptBytes) " + "VALUES ((SELECT id FROM openOutputsIndex WHERE hash = ?), " + "?, ?, ?)"); s.setBytes(1, out.getHash().getBytes()); // index is actually an unsigned int s.setInt(2, (int)out.getIndex()); s.setBytes(3, out.getValue().toByteArray()); s.setBytes(4, out.getScriptBytes()); s.executeUpdate(); s.close(); } catch (SQLException e) { if (e.getErrorCode() != 23505) throw new BlockStoreException(e); } finally { if (s != null) try { s.close(); } catch (SQLException e) { throw new BlockStoreException(e); } } } public void removeUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException { maybeConnect(); // TODO: This should only need one query (maybe a stored procedure) if (getTransactionOutput(out.getHash(), out.getIndex()) == null) throw new BlockStoreException("Tried to remove a StoredTransactionOutput from H2FullPrunedBlockStore that it didn't have!"); try { PreparedStatement s = conn.get() .prepareStatement("DELETE FROM openOutputs " + "WHERE id = (SELECT id FROM openOutputsIndex WHERE hash = ?) AND index = ?"); s.setBytes(1, out.getHash().getBytes()); // index is actually an unsigned int s.setInt(2, (int)out.getIndex()); s.executeUpdate(); s.close(); // This is quite an ugly query, is there no better way? s = conn.get().prepareStatement("DELETE FROM openOutputsIndex " + "WHERE hash = ? AND 1 = (CASE WHEN ((SELECT COUNT(*) FROM openOutputs WHERE id =" + "(SELECT id FROM openOutputsIndex WHERE hash = ?)) = 0) THEN 1 ELSE 0 END)"); s.setBytes(1, out.getHash().getBytes()); s.setBytes(2, out.getHash().getBytes()); s.executeUpdate(); s.close(); } catch (SQLException e) { throw new BlockStoreException(e); } } public void beginDatabaseBatchWrite() throws BlockStoreException { maybeConnect(); try { conn.get().setAutoCommit(false); } catch (SQLException e) { throw new BlockStoreException(e); } } public void commitDatabaseBatchWrite() throws BlockStoreException { maybeConnect(); try { conn.get().commit(); conn.get().setAutoCommit(true); } catch (SQLException e) { throw new BlockStoreException(e); } } public void abortDatabaseBatchWrite() throws BlockStoreException { maybeConnect(); try { conn.get().rollback(); conn.get().setAutoCommit(true); } catch (SQLException e) { throw new BlockStoreException(e); } } public boolean hasUnspentOutputs(Sha256Hash hash, int numOutputs) throws BlockStoreException { maybeConnect(); PreparedStatement s = null; try { s = conn.get() .prepareStatement("SELECT COUNT(*) FROM openOutputsIndex " + "WHERE hash = ?"); s.setBytes(1, hash.getBytes()); ResultSet results = s.executeQuery(); if (!results.next()) { throw new BlockStoreException("Got no results from a COUNT(*) query"); } int count = results.getInt(1); return count != 0; } catch (SQLException ex) { throw new BlockStoreException(ex); } finally { if (s != null) try { s.close(); } catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); } } } }
/* The MIT License (MIT) Copyright (c) 2014, Groupon, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.groupon.jenkins.dynamic.build; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.groupon.jenkins.SetupConfig; import com.groupon.jenkins.branchhistory.BranchHistoryWidget; import com.groupon.jenkins.dynamic.build.repository.DynamicBuildRepository; import com.groupon.jenkins.dynamic.build.repository.DynamicProjectRepository; import com.groupon.jenkins.dynamic.buildtype.BuildType; import com.groupon.jenkins.dynamic.buildtype.BuildTypeProperty; import com.groupon.jenkins.dynamic.organizationcontainer.OrganizationContainer; import com.groupon.jenkins.github.GithubRepoProperty; import com.infradna.tool.bridge_method_injector.WithBridgeMethods; import hudson.Extension; import hudson.PermalinkList; import hudson.matrix.Combination; import hudson.model.Descriptor; import hudson.model.DescriptorVisibilityFilter; import hudson.model.Item; import hudson.model.ItemGroup; import hudson.model.Queue.Task; import hudson.model.Saveable; import hudson.model.TopLevelItem; import hudson.util.CaseInsensitiveComparator; import hudson.util.CopyOnWriteMap; import hudson.util.RunList; import hudson.widgets.HistoryWidget; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import jenkins.model.Jenkins; import org.kohsuke.stapler.Stapler; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import javax.servlet.ServletException; public class DynamicProject extends DbBackedProject<DynamicProject, DynamicBuild> implements TopLevelItem, Saveable, IdentifableItemGroup<DynamicSubProject> { private transient Map<String, DynamicSubProject> items; @Extension public static final DescriptorImpl DESCRIPTOR = new DescriptorImpl(); protected DynamicProject(ItemGroup parent, String name) { super(parent, name); init(); } private void init() { Iterable<DynamicSubProject> projects = new DynamicProjectRepository().getChildren(this); items = new CopyOnWriteMap.Tree<String, DynamicSubProject>(CaseInsensitiveComparator.INSTANCE); for (DynamicSubProject dbBackedProject : projects) { items.put(dbBackedProject.getName(), dbBackedProject); } } @Override public void onLoad(ItemGroup<? extends Item> parent, String name) throws IOException { super.onLoad(parent, name); init(); } @Override public DescriptorImpl getDescriptor() { return DESCRIPTOR; } public static final class DescriptorImpl extends AbstractProjectDescriptor { /** * We are hiding the "DotCI" project from "/newJob" page, because we'll * have our own flow for doing this ... */ @Extension public static class FilterDotCIProjectTypeFromNewJobPage extends DescriptorVisibilityFilter { @Override public boolean filter(Object context, Descriptor descriptor) { return !(descriptor instanceof DynamicProject.DescriptorImpl); } } @Override public String getDisplayName() { return "DotCi Project"; } @Override public TopLevelItem newInstance(ItemGroup parent, String name) { return new DynamicProject(parent, name); } } @Override public PermalinkList getPermalinks() { PermalinkList permalinks = super.getPermalinks(); permalinks.add(new LastSuccessfulMasterPermalink()); return permalinks; } public Iterable<BuildType> getBuildTypes(){ return SetupConfig.get().getBuildTypes(); } public String getBuildType(){ return getProperty(BuildTypeProperty.class) == null ? null : getProperty(BuildTypeProperty.class).getBuildType(); } @Override @WithBridgeMethods(value = Jenkins.class, castRequired = true) public OrganizationContainer getParent() { return (OrganizationContainer) super.getParent(); } public void doMasterBuilds(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException, InterruptedException { req.getSession().setAttribute("branchView" + this.getName(), "master"); rsp.forwardToPreviousPage(req); } public void doMyBuilds(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException, InterruptedException { req.getSession().setAttribute("branchView" + this.getName(), "mine"); rsp.forwardToPreviousPage(req); } public void doAllBuilds(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException, InterruptedException { req.getSession().removeAttribute("branchView" + this.getName()); rsp.forwardToPreviousPage(req); } @Override protected HistoryWidget createHistoryWidget() { return new BranchHistoryWidget(this, new RunList(), HISTORY_ADAPTER, new DynamicBuildRepository(), getCurrentBranch()); } protected String getCurrentBranch() { return (String) Stapler.getCurrentRequest().getSession().getAttribute("branchView" + getName()); } @Override public Object getDynamic(String token, StaplerRequest req, StaplerResponse rsp) { if ("sha".equals(token)) { String sha = req.getParameter("value"); return dynamicBuildRepository.getBuildBySha(this, sha); } Object permalink = super.getDynamic(token, req, rsp); if (permalink == null) { DynamicSubProject item = getItem(token); return item; } return permalink; } @Override protected Class<DynamicBuild> getBuildClass() { return DynamicBuild.class; } @Override public String getUrlChildPrefix() { return "."; } private DynamicSubProject createNewSubProject(Combination requestedCombination) { DynamicSubProject project = new DynamicSubProject(this, requestedCombination); try { project.save(); } catch (IOException e) { throw new RuntimeException(e); } items.put(project.getName(), project); return project; } public Iterable<DynamicSubProject> getSubProjects(Iterable<Combination> subBuildCombinations) { return Iterables.transform(subBuildCombinations, new Function<Combination, DynamicSubProject>() { @Override public DynamicSubProject apply(final Combination requestedCombination) { DynamicSubProject subProject = Iterables.find(getItems(), new Predicate<DynamicSubProject>() { @Override public boolean apply(DynamicSubProject subProject) { return requestedCombination.equals(subProject.getCombination()); } }, null); return subProject == null? DynamicProject.this.createNewSubProject(requestedCombination): subProject; } }); } public Task getItem(Combination combination) { return null; } @Override public DynamicSubProject getItem(String name) { return dynamicProjectRepository.getChild(this, name); } private File getConfigurationsDir() { return new File(getRootDir(), "configurations"); } @Override public File getRootDirFor(DynamicSubProject child) { File f = new File(getConfigurationsDir(), child.getName()); f.getParentFile().mkdirs(); return f; } @Override public Collection<DynamicSubProject> getItems() { return items == null ? new ArrayList<DynamicSubProject>() : this.items.values(); } public String getGithubRepoUrl() { return getProperty(GithubRepoProperty.class) == null ? null : getProperty(GithubRepoProperty.class).getRepoUrl(); } @Override public void onRenamed(DynamicSubProject item, String oldName, String newName) throws IOException { throw new IllegalStateException("Renaming not allowed outside .ci.yml"); } @Override public void onDeleted(DynamicSubProject item) throws IOException { throw new IllegalStateException("Cannot delete Sub Project without deleting the parent"); } @Override public DynamicBuild getLastBuild() { return super.getLastBuild(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.viewfs; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystemTestHelper; import org.apache.hadoop.fs.FsConstants; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSNNTopology; import org.apache.hadoop.test.GenericTestUtils; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.viewfs.RegexMountPoint.INTERCEPTOR_INTERNAL_SEP; import static org.junit.Assert.assertSame; /** * Test linkRegex node type for view file system. */ public class TestViewFileSystemLinkRegex extends ViewFileSystemBaseTest { public static final Logger LOGGER = LoggerFactory.getLogger(TestViewFileSystemLinkRegex.class); private static FileSystem fsDefault; private static MiniDFSCluster cluster; private static Configuration clusterConfig; private static final int NAME_SPACES_COUNT = 3; private static final int DATA_NODES_COUNT = 3; private static final int FS_INDEX_DEFAULT = 0; private static final FileSystem[] FS_HDFS = new FileSystem[NAME_SPACES_COUNT]; private static final String CLUSTER_NAME = "TestViewFileSystemLinkRegexCluster"; private static final File TEST_DIR = GenericTestUtils .getTestDir(TestViewFileSystemLinkRegex.class.getSimpleName()); private static final String TEST_BASE_PATH = "/tmp/TestViewFileSystemLinkRegex"; @Override protected FileSystemTestHelper createFileSystemHelper() { return new FileSystemTestHelper(TEST_BASE_PATH); } @BeforeClass public static void clusterSetupAtBeginning() throws IOException { SupportsBlocks = true; clusterConfig = ViewFileSystemTestSetup.createConfig(); clusterConfig.setBoolean( DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true); cluster = new MiniDFSCluster.Builder(clusterConfig).nnTopology( MiniDFSNNTopology.simpleFederatedTopology(NAME_SPACES_COUNT)) .numDataNodes(DATA_NODES_COUNT).build(); cluster.waitClusterUp(); for (int i = 0; i < NAME_SPACES_COUNT; i++) { FS_HDFS[i] = cluster.getFileSystem(i); } fsDefault = FS_HDFS[FS_INDEX_DEFAULT]; } @AfterClass public static void clusterShutdownAtEnd() throws Exception { if (cluster != null) { cluster.shutdown(); } } @Override @Before public void setUp() throws Exception { fsTarget = fsDefault; super.setUp(); } /** * Override this so that we don't set the targetTestRoot to any path under the * root of the FS, and so that we don't try to delete the test dir, but rather * only its contents. */ @Override void initializeTargetTestRoot() throws IOException { targetTestRoot = fsDefault.makeQualified(new Path("/")); for (FileStatus status : fsDefault.listStatus(targetTestRoot)) { fsDefault.delete(status.getPath(), true); } } @Override void setupMountPoints() { super.setupMountPoints(); } @Override int getExpectedDelegationTokenCount() { return 1; // all point to the same fs so 1 unique token } @Override int getExpectedDelegationTokenCountWithCredentials() { return 1; } public String buildReplaceInterceptorSettingString(String srcRegex, String replaceString) { return RegexMountPointInterceptorType.REPLACE_RESOLVED_DST_PATH.getConfigName() + INTERCEPTOR_INTERNAL_SEP + srcRegex + INTERCEPTOR_INTERNAL_SEP + replaceString; } public String linkInterceptorSettings( List<String> interceptorSettingStrList) { StringBuilder stringBuilder = new StringBuilder(); int listSize = interceptorSettingStrList.size(); for (int i = 0; i < listSize; ++i) { stringBuilder.append(interceptorSettingStrList.get(i)); if (i < listSize - 1) { stringBuilder.append(RegexMountPoint.INTERCEPTOR_SEP); } } return stringBuilder.toString(); } private void createDirWithChildren( FileSystem fileSystem, Path dir, List<Path> childrenFiles) throws IOException { Assert.assertTrue(fileSystem.mkdirs(dir)); int index = 0; for (Path childFile : childrenFiles) { createFile(fileSystem, childFile, index, true); } } private void createFile( FileSystem fileSystem, Path file, int dataLenToWrite, boolean overwrite) throws IOException { FSDataOutputStream outputStream = null; try { outputStream = fileSystem.create(file, overwrite); for (int i = 0; i < dataLenToWrite; ++i) { outputStream.writeByte(i); } outputStream.close(); } finally { if (outputStream != null) { outputStream.close(); } } } private void createDirWithChildren( FileSystem fileSystem, Path dir, int childrenFilesCnt) throws IOException { List<Path> childrenFiles = new ArrayList<>(childrenFilesCnt); for (int i = 0; i < childrenFilesCnt; ++i) { childrenFiles.add(new Path(dir, "file" + i)); } createDirWithChildren(fileSystem, dir, childrenFiles); } /** * The function used to test regex mountpoints. * @param config - get mountable config from this conf * @param regexStr - the src path regex expression that applies to this config * @param dstPathStr - the string of target path * @param interceptorSettings - the serialized interceptor string to be * applied while resolving the mapping * @param dirPathBeforeMountPoint - the src path user passed in to be mapped. * @param expectedResolveResult - the expected path after resolve * dirPathBeforeMountPoint via regex mountpint. * @param childrenFilesCnt - the child files under dirPathBeforeMountPoint to * be created * @throws IOException * @throws URISyntaxException */ private void testRegexMountpoint( Configuration config, String regexStr, String dstPathStr, String interceptorSettings, Path dirPathBeforeMountPoint, Path expectedResolveResult, int childrenFilesCnt) throws IOException, URISyntaxException { // Set up test env createDirWithChildren( fsTarget, expectedResolveResult, childrenFilesCnt); ConfigUtil.addLinkRegex( config, CLUSTER_NAME, regexStr, dstPathStr, interceptorSettings); // Asserts URI viewFsUri = new URI( FsConstants.VIEWFS_SCHEME, CLUSTER_NAME, "/", null, null); try (FileSystem vfs = FileSystem.get(viewFsUri, config)) { Assert.assertEquals(expectedResolveResult.toString(), vfs.resolvePath(dirPathBeforeMountPoint).toString()); Assert.assertTrue( vfs.getFileStatus(dirPathBeforeMountPoint).isDirectory()); Assert.assertEquals( childrenFilesCnt, vfs.listStatus(dirPathBeforeMountPoint).length); // Test Inner cache, the resolved result's filesystem should be the same. ViewFileSystem viewFileSystem = (ViewFileSystem) vfs; ChRootedFileSystem target1 = (ChRootedFileSystem) viewFileSystem.fsState .resolve(viewFileSystem.getUriPath(dirPathBeforeMountPoint), true) .targetFileSystem; ChRootedFileSystem target2 = (ChRootedFileSystem) viewFileSystem.fsState .resolve(viewFileSystem.getUriPath(dirPathBeforeMountPoint), true) .targetFileSystem; assertSame(target1.getMyFs(), target2.getMyFs()); } } /** * Test regex mount points which use capture group index for mapping. * * @throws Exception */ @Test public void testConfLinkRegexIndexMapping() throws Exception { // Config: // <property> // <name> // fs.viewfs.mounttable.TestViewFileSystemLinkRegexCluster // .linkRegex.^/(\w+)</name> // <value>/targetTestRoot/$1</value> // </property> // Dir path to test: /testConfLinkRegexIndexMapping1 // Expect path: /targetTestRoot/testConfLinkRegexIndexMapping1 String regexStr = "^/(\\w+)"; String dstPathStr = targetTestRoot + "$1"; Path srcPath = new Path("/testConfLinkRegexIndexMapping1"); Path expectedResolveResult = new Path(dstPathStr.replace( "$1", "testConfLinkRegexIndexMapping1")); testRegexMountpoint( new Configuration(conf), regexStr, dstPathStr, null, srcPath, expectedResolveResult, 3); // Config: // <property> // <name>fs.viewfs.mounttable.TestViewFileSystemLinkRegexCluster // .linkRegex.^/(\w+)</name> // <value>/targetTestRoot/${1}</value> // </property> // Dir path to test: /testConfLinkRegexIndexMapping2 // Expect path: /targetTestRoot/testConfLinkRegexIndexMapping2 dstPathStr = targetTestRoot + "${1}"; srcPath = new Path("/testConfLinkRegexIndexMapping2"); expectedResolveResult = new Path( dstPathStr.replace("${1}", "testConfLinkRegexIndexMapping2")); testRegexMountpoint( new Configuration(conf), regexStr, dstPathStr, null, srcPath, expectedResolveResult, 4); // Config: // <property> // <name>fs.viewfs.mounttable.TestViewFileSystemLinkRegexCluster // .linkRegex.^/(\w+)</name> // <value>/targetTestRoot/$1</value> // </property> // Dir path to test: /testConfLinkRegexIndexMapping3/dir1 // Expect path: /targetTestRoot/testConfLinkRegexIndexMapping3/dir1 dstPathStr = targetTestRoot + "$1"; srcPath = new Path("/testConfLinkRegexIndexMapping3/dir1"); expectedResolveResult = new Path( dstPathStr.replace("$1", "testConfLinkRegexIndexMapping3/dir1")); testRegexMountpoint( new Configuration(conf), regexStr, dstPathStr, null, srcPath, expectedResolveResult, 5); // Config: // <property> // <name>fs.viewfs.mounttable.TestViewFileSystemLinkRegexCluster // .linkRegex.^/(\w+)</name> // <value>/targetTestRoot/${1}/</value> // </property> // Dir path to test: /testConfLinkRegexIndexMapping4/dir1 // Expect path: /targetTestRoot/testConfLinkRegexIndexMapping4/dir1 dstPathStr = targetTestRoot + "${1}/"; srcPath = new Path("/testConfLinkRegexIndexMapping4/dir1"); expectedResolveResult = new Path( dstPathStr.replace("${1}", "testConfLinkRegexIndexMapping4/dir1")); testRegexMountpoint( new Configuration(conf), regexStr, dstPathStr, null, srcPath, expectedResolveResult, 6); } /** * Test regex mount pointes with named capture group. * @throws Exception */ @Test public void testConfLinkRegexNamedGroupMapping() throws Exception { // Config: // <property> // <name>fs.viewfs.mounttable.TestViewFileSystemLinkRegexCluster // .linkRegex.^/(?<firstDir>\w+)</name> // <value>/targetTestRoot/$firstDir</value> // </property> // Dir path to test: /testConfLinkRegexNamedGroupMapping1 // Expect path: /targetTestRoot/testConfLinkRegexNamedGroupMapping1 URI viewFsUri = new URI( FsConstants.VIEWFS_SCHEME, CLUSTER_NAME, "/", null, null); String regexStr = "^/(?<firstDir>\\w+)"; String dstPathStr = targetTestRoot + "$firstDir"; Path srcPath = new Path("/testConfLinkRegexNamedGroupMapping1"); Path expectedResolveResult = new Path( dstPathStr.replace("$firstDir", "testConfLinkRegexNamedGroupMapping1")); testRegexMountpoint( new Configuration(conf), regexStr, dstPathStr, null, srcPath, expectedResolveResult, 3); // Config: // <property> // <name>fs.viewfs.mounttable.TestViewFileSystemLinkRegexCluster // .linkRegex.^/(?<firstDir>\w+)</name> // <value>/targetTestRoot/${firstDir}</value> // </property> // Dir path to test: /testConfLinkRegexNamedGroupMapping2 // Expect path: /targetTestRoot/testConfLinkRegexNamedGroupMapping2 dstPathStr = targetTestRoot + "${firstDir}"; srcPath = new Path("/testConfLinkRegexNamedGroupMapping2"); expectedResolveResult = new Path( dstPathStr.replace( "${firstDir}", "testConfLinkRegexNamedGroupMapping2")); testRegexMountpoint( new Configuration(conf), regexStr, dstPathStr, null, srcPath, expectedResolveResult, 5); } /** * Test cases when the destination is fixed paths. * @throws Exception */ @Test public void testConfLinkRegexFixedDestMapping() throws Exception { // Config: // <property> // <name>fs.viewfs.mounttable.TestViewFileSystemLinkRegexCluster // .linkRegex.^/(?<firstDir>\w+)</name> // <value>/targetTestRoot/${firstDir}</value> // </property> // Dir path to test: /misc1 // Expect path: /targetTestRoot/testConfLinkRegexFixedDestMappingFile // Dir path to test: /misc2 // Expect path: /targetTestRoot/testConfLinkRegexFixedDestMappingFile String regexStr = "^/\\w+"; String dstPathStr = targetTestRoot + "testConfLinkRegexFixedDestMappingFile"; Path expectedResolveResult = new Path(dstPathStr); testRegexMountpoint( new Configuration(conf), regexStr, dstPathStr, null, new Path("/misc1"), expectedResolveResult, 5); testRegexMountpoint( new Configuration(conf), regexStr, dstPathStr, null, new Path("/misc2"), expectedResolveResult, 6); } /** * Test regex mount point config with a single interceptor. * */ @Test public void testConfLinkRegexWithSingleInterceptor() throws Exception { // Config: // <property> // <name>fs.viewfs.mounttable.TestViewFileSystemLinkRegexCluster // .linkRegex.replaceresolveddstpath:_:-#.^/user/(?<username>\w+)</name> // <value>/targetTestRoot/$username</value> // </property> // Dir path to test: /user/hadoop_user1/hadoop_dir1 // Expect path: /targetTestRoot/hadoop-user1/hadoop_dir1 String regexStr = "^/user/(?<username>\\w+)"; String dstPathStr = targetTestRoot + "$username"; // Replace "_" with "-" String settingString = buildReplaceInterceptorSettingString("_", "-"); Path srcPath = new Path("/user/hadoop_user1/hadoop_dir1"); Path expectedResolveResult = new Path( targetTestRoot, "hadoop-user1/hadoop_dir1"); testRegexMountpoint( new Configuration(conf), regexStr, dstPathStr, settingString, srcPath, expectedResolveResult, 2); } /** * Test regex mount point config with multiple interceptors. * */ @Test public void testConfLinkRegexWithInterceptors() throws Exception { // Config: // <property> // <name>fs.viewfs.mounttable.TestViewFileSystemLinkRegexCluster // .linkRegex // .replaceresolveddstpath:_:-; // replaceresolveddstpath:hadoop:hdfs#.^/user/(?<username>\w+)</name> // <value>/targetTestRoot/$username</value> // </property> // Dir path to test: /user/hadoop_user1/hadoop_dir1 // Expect path: /targetTestRoot/hdfs-user1/hadoop_dir1 String regexStr = "^/user/(?<username>\\w+)/"; String dstPathStr = targetTestRoot + "$username"; // Replace "_" with "-" String interceptor1 = buildReplaceInterceptorSettingString("_", "-"); // Replace "hadoop" with "hdfs" String interceptor2 = buildReplaceInterceptorSettingString("hadoop", "hdfs"); String interceptors = linkInterceptorSettings(Arrays.asList(interceptor1, interceptor2)); Path srcPath = new Path("/user/hadoop_user1/hadoop_dir1"); Path expectedResolveResult = new Path(targetTestRoot, "hdfs-user1/hadoop_dir1"); testRegexMountpoint( new Configuration(conf), regexStr, dstPathStr, interceptors, srcPath, expectedResolveResult, 2); } }
/* This file is part of SableCC ( http://sablecc.org ). * * See the NOTICE file distributed with this work for copyright information. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sablecc.sablecc.semantics; import java.util.*; import org.sablecc.exception.*; import org.sablecc.sablecc.syntax3.node.*; public class Grammar extends Declaration { private AGrammar declaration; private Map<Node, Object> nodeMap = new HashMap<Node, Object>(); private Map<TIdentifier, Declaration> declarationResolutionMap = new HashMap<TIdentifier, Declaration>(); private Map<Token, Expression> inlinedExpressionResolutionMap = new HashMap<Token, Expression>(); private Map<TIdentifier, Alternative> alternativeResolutionMap = new HashMap<TIdentifier, Alternative>(); private Map<PAlternativeReference, AlternativeReference> alternativeReferenceResolutionMap = new HashMap<PAlternativeReference, AlternativeReference>(); private Map<PElementReference, ElementReference> elementReferenceResolutionMap = new HashMap<PElementReference, ElementReference>(); private Map<PElementBody, Type> typeResolutionMap = new HashMap<PElementBody, Type>(); private Map<PTransformationElement, TransformationElement> transformationElementMap = new HashMap<PTransformationElement, TransformationElement>(); private NameSpace parserNameSpace = new NameSpace(); private NameSpace treeNameSpace = new NameSpace(); private int nextInternalNameIndex = 1; // Cached values private String name; private Token location; Grammar( AGrammar declaration) { this.declaration = declaration; if (this.nodeMap.containsKey(declaration)) { throw new InternalException("it was already added."); } this.nodeMap.put(declaration, this); this.parserNameSpace.add(this); this.treeNameSpace.add(this); } @Override public String getName() { if (this.name == null) { this.name = getLocation().getText(); } return this.name; } @Override public String getLookupName() { return getName(); } @Override public String getDisplayName() { return getLocation().getText(); } @Override public Token getLocation() { if (this.location == null) { this.location = this.declaration.getName(); } return this.location; } public Expression getExpression( Node declaration) { return (Expression) this.nodeMap.get(declaration); } public Production getProduction( PParserProduction declaration) { return (Production) this.nodeMap.get(declaration); } public Production getProduction( PTreeProduction declaration) { return (Production) this.nodeMap.get(declaration); } public Alternative getAlternative( PAlternative declaration) { return (Alternative) this.nodeMap.get(declaration); } public Element getElement( PElement declaration) { return (Element) this.nodeMap.get(declaration); } public Declaration getDeclarationResolution( TIdentifier identifier) { return this.declarationResolutionMap.get(identifier); } public Expression getExpressionResolution( TIdentifierChar node) { return this.inlinedExpressionResolutionMap.get(node); } public Expression getExpressionResolution( TChar node) { return this.inlinedExpressionResolutionMap.get(node); } public Expression getExpressionResolution( TIdentifierString node) { return this.inlinedExpressionResolutionMap.get(node); } public Expression getExpressionResolution( TString node) { return this.inlinedExpressionResolutionMap.get(node); } public Expression getExpressionResolution( TEndKeyword node) { return this.inlinedExpressionResolutionMap.get(node); } public Alternative getAlternativeResolution( TIdentifier identifier) { return this.alternativeResolutionMap.get(identifier); } public AlternativeReference getAlternativeReferenceResolution( PAlternativeReference alternativeReference) { return this.alternativeReferenceResolutionMap.get(alternativeReference); } public ElementReference getElementReferenceResolution( PElementReference elementReference) { return this.elementReferenceResolutionMap.get(elementReference); } public Type getTypeResolution( PElementBody elementBody) { return this.typeResolutionMap.get(elementBody); } public TransformationElement getTransformationElementResolution( PTransformationElement transformationElement) { return this.transformationElementMap.get(transformationElement); } public Production getTreeProduction( String name) { return (Production) this.treeNameSpace.get(name); } void addExpression( Node declaration) { Expression expression = new Expression(this, declaration); expression.setInternalName(expression.getName()); if (this.nodeMap.containsKey(declaration)) { throw new InternalException("it was already added."); } this.nodeMap.put(declaration, expression); this.parserNameSpace.add(expression); this.treeNameSpace.add(expression); } void addInlinedExpression( Node declaration) { Expression expression = new Expression(this, declaration); String name = expression.getName(); if (name != null) { expression.setInternalName(name); } else { expression.setInternalName("." + this.nextInternalNameIndex++); } Declaration previousDeclaration = this.parserNameSpace.get(expression .getLookupName()); // only add new expression if it's a new declaration or if it redeclares // a normal expression if (previousDeclaration == null || previousDeclaration.getLocation() instanceof TIdentifier) { if (this.nodeMap.containsKey(declaration)) { throw new InternalException("it was already added."); } this.nodeMap.put(declaration, expression); this.parserNameSpace.add(expression); this.treeNameSpace.add(expression); } else { if (this.nodeMap.containsKey(declaration)) { throw new InternalException("it was already added."); } this.nodeMap.put(declaration, previousDeclaration); } } void addProduction( AParserProduction declaration) { Production production = new Production(this, declaration); production.setInternalName(production.getName()); if (this.nodeMap.containsKey(declaration)) { throw new InternalException("it was already added."); } this.nodeMap.put(declaration, production); this.parserNameSpace.add(production); } void addProduction( ATreeProduction declaration) { Production production = new Production(this, declaration); production.setInternalName(production.getName()); if (this.nodeMap.containsKey(declaration)) { throw new InternalException("it was already added."); } this.nodeMap.put(declaration, production); this.treeNameSpace.add(production); } void addAlternative( Production production, AAlternative declaration) { Alternative alternative = new Alternative(this, production, declaration); if (this.nodeMap.containsKey(declaration)) { throw new InternalException("it was already added."); } this.nodeMap.put(declaration, alternative); } void addElement( Alternative alternative, AElement declaration) { Element element = new Element(this, alternative, declaration); if (this.nodeMap.containsKey(declaration)) { throw new InternalException("it was already added."); } this.nodeMap.put(declaration, element); } void resolveExpression( ANameExpression nameExpression) { TIdentifier nameIdentifier = nameExpression.getIdentifier(); String name = nameIdentifier.getText(); Declaration declaration = this.parserNameSpace.get(name); if (declaration == null) { declaration = this.treeNameSpace.get(name); if (declaration == null) { throw SemanticException.semanticError("No \"" + name + "\" has been declared.", nameIdentifier); } if (!(declaration instanceof Expression)) { throw SemanticException.semanticError("\"" + name + "\" is not an expression.", nameIdentifier); } throw new InternalException( "an expression must be in both parser and tree name spaces"); } if (!(declaration instanceof Expression)) { throw SemanticException.semanticError("\"" + name + "\" is not an expression.", nameIdentifier); } if (this.declarationResolutionMap.containsKey(nameIdentifier)) { throw new InternalException("it was already resolved."); } this.declarationResolutionMap.put(nameIdentifier, declaration); } void resolveTreeNameUnit( ANameUnit nameUnit) { TIdentifier nameIdentifier = nameUnit.getIdentifier(); String name = nameIdentifier.getText(); Declaration declaration = this.treeNameSpace.get(name); if (declaration == null) { declaration = this.parserNameSpace.get(name); if (declaration == null) { throw SemanticException.semanticError("No \"" + name + "\" has been declared.", nameIdentifier); } throw SemanticException.semanticError("\"" + name + "\" is not a tree production.", nameIdentifier); } if (this.declarationResolutionMap.containsKey(nameIdentifier)) { throw new InternalException("it was already resolved."); } this.declarationResolutionMap.put(nameIdentifier, declaration); } void resolveIdentifierCharUnit( AIdentifierCharUnit identifierCharUnit) { TIdentifierChar identifierChar = identifierCharUnit.getIdentifierChar(); String text = identifierChar.getText(); String name = text.substring(1, text.length() - 1); resolveInlinedExpression(name, identifierChar); } void resolveCharUnit( ACharUnit charUnit) { TChar charToken = charUnit.getChar(); String name = charToken.getText(); resolveInlinedExpression(name, charToken); } void resolveIdentifierStringUnit( AIdentifierStringUnit identifierStringUnit) { TIdentifierString identifierString = identifierStringUnit .getIdentifierString(); String text = identifierString.getText(); String name = text.substring(1, text.length() - 1); resolveInlinedExpression(name, identifierString); } void resolveStringUnit( AStringUnit stringUnit) { TString stringToken = stringUnit.getString(); String name = stringToken.getText(); resolveInlinedExpression(name, stringToken); } void resolveEndUnit( AEndUnit endUnit) { TEndKeyword endKeyword = endUnit.getEndKeyword(); resolveInlinedExpression("end", endKeyword); } void resolveParserIdentifier( TIdentifier identifier) { String name = identifier.getText(); Declaration declaration = this.parserNameSpace.get(name); if (declaration == null) { declaration = this.treeNameSpace.get(name); if (declaration == null) { throw SemanticException.semanticError("No \"" + name + "\" has been declared.", identifier); } throw SemanticException.semanticError("\"" + name + "\" is not a parser production.", identifier); } if (!(declaration instanceof Production) && !(declaration instanceof Expression)) { throw SemanticException.semanticError("\"" + name + "\" is not a production or an expression.", identifier); } if (this.declarationResolutionMap.containsKey(identifier)) { throw new InternalException("it was already resolved."); } this.declarationResolutionMap.put(identifier, declaration); } void resolveTreeIdentifier( TIdentifier identifier) { String name = identifier.getText(); Declaration declaration = this.treeNameSpace.get(name); if (declaration == null) { declaration = this.parserNameSpace.get(name); if (declaration == null) { throw SemanticException.semanticError("No \"" + name + "\" has been declared.", identifier); } throw SemanticException.semanticError("\"" + name + "\" is not a tree production.", identifier); } if (!(declaration instanceof Production) && !(declaration instanceof Expression)) { throw SemanticException.semanticError("\"" + name + "\" is not a production or an expression.", identifier); } if (this.declarationResolutionMap.containsKey(identifier)) { throw new InternalException("it was already resolved."); } this.declarationResolutionMap.put(identifier, declaration); } void resolveAlternativeIdentifiers( Production production, LinkedList<TIdentifier> identifiers) { for (TIdentifier identifier : identifiers) { resolveAlternativeIdentifier(production, identifier); } } void resolveAlternativeIdentifier( Production production, TIdentifier identifier) { String name = identifier.getText(); Alternative alternative = production.getAlternative(name); if (alternative == null) { if (production.hasAlternative(name)) { throw SemanticException.semanticError("Production \"" + production.getName() + "\" has two \"" + name + "\" alternatives (or more).", identifier); } throw SemanticException.semanticError( "\"" + name + "\" is not an alternative of production \"" + production.getName() + "\".", identifier); } if (this.alternativeResolutionMap.containsKey(identifier)) { throw new InternalException("it was already resolved."); } this.alternativeResolutionMap.put(identifier, alternative); } void resolveAlternativeReference( AUnnamedAlternativeReference alternativeReference) { Production production = (Production) getDeclarationResolution(alternativeReference .getProduction()); Alternative alternative = production.getAlternative(""); if (alternative == null) { throw SemanticException.semanticError( "The alternative name is missing.", alternativeReference.getProduction()); } if (this.alternativeReferenceResolutionMap .containsKey(alternativeReference)) { throw new InternalException("It was already resolved."); } this.alternativeReferenceResolutionMap.put(alternativeReference, AlternativeReference.createDeclaredAlternativeReference(this, alternative, alternativeReference.getProduction())); } void resolveAlternativeReference( ANamedAlternativeReference alternativeReference) { if (this.alternativeReferenceResolutionMap .containsKey(alternativeReference)) { throw new InternalException("It was already resolved."); } this.alternativeReferenceResolutionMap.put(alternativeReference, AlternativeReference.createDeclaredAlternativeReference(this, getAlternativeResolution(alternativeReference .getAlternative()), alternativeReference .getProduction())); } void resolveElementReference( ANaturalElementReference elementReference) { if (this.elementReferenceResolutionMap.containsKey(elementReference)) { throw new InternalException("It was already resolved."); } this.elementReferenceResolutionMap.put(elementReference, ElementReference.createDeclaredElementReference(this, elementReference)); } void resolveElementReference( ATransformedElementReference elementReference) { if (this.elementReferenceResolutionMap.containsKey(elementReference)) { throw new InternalException("It was already resolved."); } this.elementReferenceResolutionMap.put(elementReference, ElementReference.createDeclaredElementReference(this, elementReference)); } void resolveType( PElementBody node) { if (this.typeResolutionMap.containsKey(node)) { throw new InternalException("It was already resolved."); } this.typeResolutionMap.put(node, new Type(this, node)); } void resolveTransformationElement( ANullTransformationElement node) { if (this.transformationElementMap.containsKey(node)) { throw new InternalException("It was already resolved."); } this.transformationElementMap.put(node, TransformationElement .createDeclaredNullTransformationElement(this, node)); } void resolveTransformationElement( AReferenceTransformationElement node) { if (this.transformationElementMap.containsKey(node)) { throw new InternalException("It was already resolved."); } this.transformationElementMap.put(node, TransformationElement .createDeclaredReferenceTransformationElement(this, node)); } void resolveTransformationElement( ADeleteTransformationElement node) { if (this.transformationElementMap.containsKey(node)) { throw new InternalException("It was already resolved."); } this.transformationElementMap.put(node, TransformationElement .createDeclaredDeleteTransformationElement(this, node)); } void resolveTransformationElement( ANewTransformationElement node) { if (this.transformationElementMap.containsKey(node)) { throw new InternalException("It was already resolved."); } this.transformationElementMap.put(node, TransformationElement .createDeclaredNewTransformationElement(this, node)); } void resolveTransformationElement( AListTransformationElement node) { if (this.transformationElementMap.containsKey(node)) { throw new InternalException("It was already resolved."); } this.transformationElementMap.put(node, TransformationElement .createDeclaredListTransformationElement(this, node)); } void resolveTransformationElement( ALeftTransformationElement node) { if (this.transformationElementMap.containsKey(node)) { throw new InternalException("It was already resolved."); } this.transformationElementMap.put(node, TransformationElement .createDeclaredLeftTransformationElement(this, node)); } void resolveTransformationElement( ARightTransformationElement node) { if (this.transformationElementMap.containsKey(node)) { throw new InternalException("It was already resolved."); } this.transformationElementMap.put(node, TransformationElement .createDeclaredRightTransformationElement(this, node)); } private void resolveInlinedExpression( String name, Token location) { Declaration declaration = this.parserNameSpace.get(name); if (declaration == null) { declaration = this.treeNameSpace.get(name); if (declaration == null) { throw SemanticException.semanticError("No \"" + name + "\" has been declared.", location); } if (!(declaration instanceof Expression)) { throw SemanticException.semanticError("\"" + name + "\" is not an expression.", location); } throw new InternalException( "an expression must be in both parser and tree name spaces"); } if (!(declaration instanceof Expression)) { throw SemanticException.semanticError("\"" + name + "\" is not an expression.", location); } if (this.inlinedExpressionResolutionMap.containsKey(location)) { throw new InternalException("it was already resolved."); } this.inlinedExpressionResolutionMap.put(location, (Expression) declaration); } }
$output.javaTest("${configuration.rootPackage}.rest", "BookResourceUnitTest")## $output.require("static org.hamcrest.CoreMatchers.is")## $output.require("static org.hamcrest.CoreMatchers.nullValue")## $output.require("static org.hamcrest.Matchers.hasSize")## $output.require("static org.junit.Assert.fail")## $output.require("static org.mockito.Matchers.any")## $output.require("static org.mockito.Mockito.times")## $output.require("static org.mockito.Mockito.verify")## $output.require("static org.mockito.Mockito.verifyNoMoreInteractions")## $output.require("static org.mockito.Mockito.when")## $output.require("static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get")## $output.require("static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put")## $output.require("static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print")## $output.require("static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content")## $output.require("static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath")## $output.require("static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status")## $output.require("java.util.ArrayList")## $output.require("java.util.List")## $output.require("javax.inject.Inject")## $output.require("org.apache.catalina.filters.CorsFilter")## $output.require("org.junit.Before")## $output.require("org.junit.Ignore")## $output.require("org.junit.Test")## $output.require("org.mockito.InjectMocks")## $output.require("org.mockito.Mock")## $output.require("org.mockito.MockitoAnnotations")## $output.require("org.slf4j.Logger")## $output.require("org.slf4j.LoggerFactory")## $output.require("org.springframework.data.domain.Page")## $output.require("org.springframework.data.domain.PageImpl")## $output.require("org.springframework.data.domain.Pageable")## $output.require("org.springframework.data.web.PageableHandlerMethodArgumentResolver")## $output.require("org.springframework.http.MediaType")## $output.require("org.springframework.test.web.servlet.MockMvc")## $output.require("org.springframework.test.web.servlet.setup.MockMvcBuilders")## $output.require("com.jaxio.demo.domain.Book")## $output.require("com.jaxio.demo.repository.BookRepository")## $output.require("com.jaxio.demo.rest.BookResource")## $output.require("com.jaxio.demo.rest.BookResourceUnitTest")## $output.require("com.jaxio.demo.searchrepository.BookSearchRepository")## $output.require("com.jaxio.demo.utils.JsonUtils")## /** * Unit tests for class BookRepository. * */ public class BookResourceUnitTest { private final Logger log = LoggerFactory.getLogger(BookResourceUnitTest.class); private MockMvc mockMvc; @Inject private PageableHandlerMethodArgumentResolver pageableArgumentResolver; @Mock private BookRepository bookRepository; @Mock private BookSearchRepository bookSearchRepository; @InjectMocks private BookResource bookResource; @Before public void init(){ MockitoAnnotations.initMocks(this); mockMvc = MockMvcBuilders .standaloneSetup(bookResource) .addFilters(new CorsFilter()) /* to handle Pageable automatically */ .setCustomArgumentResolvers(pageableArgumentResolver) .build(); } /** * Tests the create method on BookResource. */ @Test @Ignore public void testCreate() { Book book = new Book(); book.setId("1"); try { when(bookRepository.save(book)).thenReturn(book); mockMvc.perform(put("/api/books/") .contentType(MediaType.APPLICATION_JSON_UTF8_VALUE) /* add a request parameter */ .content(JsonUtils.convertObjectToJsonBytes(book)) ) .andExpect(status().isOk()) /* tp print the PUT result */ .andDo(print()) // to validate json result content .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)) .andExpect(jsonPath("$.price", nullValue())); verify(bookRepository, times(1)).save(book); verifyNoMoreInteractions(bookRepository); } catch (Exception e) { log.error("", e); fail(); } } /** * Tests the update method on BookResource. */ @Test @Ignore public void testUpdate() { Book book = new Book(); book.setId("1"); try { when(bookRepository.save(book)).thenReturn(book); mockMvc.perform(put("/api/books/") .contentType(MediaType.APPLICATION_JSON_UTF8_VALUE) .content(JsonUtils.convertObjectToJsonBytes(book)) ) .andExpect(status().isOk()) /* tp print the PUT result */ .andDo(print()) // to validate json result content .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)) .andExpect(jsonPath("$.price", nullValue())); verify(bookRepository, times(1)).save(book); verifyNoMoreInteractions(bookRepository); } catch (Exception e) { log.error("", e); fail(); } } /** * Tests the findAll method on BookResource. */ @Test @Ignore public void testFindAll() { Book book = new Book(); book.setId("1"); List<Book> books = new ArrayList<Book>(); books.add(book); book = new Book(); book.setId("2"); books.add(book); try { when(bookRepository.findAll()).thenReturn(books); mockMvc.perform(get("/api/books/")) .andExpect(status().isOk()) .andDo(print()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)) .andExpect(jsonPath("$", hasSize(2))) .andExpect(jsonPath("$[0].id", is("1"))) .andExpect(jsonPath("$[1].id", is("2"))); verify(bookRepository, times(1)).findAll(); verifyNoMoreInteractions(bookRepository); } catch (Exception e) { log.error("", e); fail(); } } /** * Tests the findAllByPage method on BookResource. */ @Test public void testFindAllByPage() { Book book = new Book(); book.setId("1"); final List<Book> books = new ArrayList<Book>(); books.add(book); book = new Book(); book.setId("2"); books.add(book); try { Page<Book> expectedPage = new PageImpl<Book>(books); when(bookRepository.findAll(any(Pageable.class))).thenReturn(expectedPage); //new PageableImpl(); mockMvc.perform(get("/api/books/bypage") .param("page", "0") .param("size", "2") .contentType(MediaType.APPLICATION_JSON_UTF8_VALUE) /* add a request parameter */ .content(JsonUtils.convertObjectToJsonBytes(expectedPage)) ) .andExpect(status().isOk()) .andDo(print());/* .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)) .andExpect(jsonPath("$", hasSize(2))) .andExpect(jsonPath("$[0].id", is("1"))) .andExpect(jsonPath("$[1].id", is("2")));*/ verify(bookRepository, times(1)).findAll(); verifyNoMoreInteractions(bookRepository); } catch (Exception e) { log.error("", e); fail(); } } /** * Tests the count method on BookResource. */ @Test @Ignore public void testCount() { final String count = "1"; try { when(bookRepository.count()).thenReturn(Long.valueOf(count)); mockMvc.perform(get("/api/books/count")) .andExpect(status().isOk()) .andDo(print()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)) .andExpect(content().string(count)); verify(bookRepository, times(1)).count(); verifyNoMoreInteractions(bookRepository); } catch (Exception e) { log.error("", e); fail(); } } }
/** * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.swing.sources; import org.junit.Test; import rx.Subscription; import rx.functions.Action0; import rx.observers.TestSubscriber; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import static junit.framework.Assert.assertEquals; public class ListSelectionEventSourceTest { @Test public void jtableRowSelectionObservingSelectionEvents() throws Throwable { SwingTestHelper.create().runInEventDispatchThread(new Action0() { @Override public void call() { TestSubscriber<ListSelectionEvent> testSubscriber = TestSubscriber.create(); JTable table = createJTable(); ListSelectionEventSource .fromListSelectionEventsOf(table.getSelectionModel()) .subscribe(testSubscriber); testSubscriber.assertNoErrors(); testSubscriber.assertNoValues(); table.getSelectionModel().setSelectionInterval(0, 0); testSubscriber.assertNoErrors(); testSubscriber.assertValueCount(1); assertListSelectionEventEquals( new ListSelectionEvent( table.getSelectionModel(), 0 /* start of region with selection changes */, 0 /* end of region with selection changes */, false), testSubscriber.getOnNextEvents().get(0)); table.getSelectionModel().setSelectionInterval(2, 2); testSubscriber.assertNoErrors(); testSubscriber.assertValueCount(2); assertListSelectionEventEquals( new ListSelectionEvent( table.getSelectionModel(), 0 /* start of region with selection changes */, 2 /* end of region with selection changes */, false), testSubscriber.getOnNextEvents().get(1)); } }).awaitTerminal(); } @Test public void jtableColumnSelectionObservingSelectionEvents() throws Throwable { SwingTestHelper.create().runInEventDispatchThread(new Action0() { @Override public void call() { TestSubscriber<ListSelectionEvent> testSubscriber = TestSubscriber.create(); JTable table = createJTable(); table.getColumnModel().getSelectionModel().setSelectionMode(ListSelectionModel.SINGLE_INTERVAL_SELECTION); ListSelectionEventSource .fromListSelectionEventsOf(table.getColumnModel().getSelectionModel()) .subscribe(testSubscriber); testSubscriber.assertNoErrors(); testSubscriber.assertNoValues(); table.getColumnModel().getSelectionModel().setSelectionInterval(0, 0); testSubscriber.assertNoErrors(); testSubscriber.assertValueCount(1); assertListSelectionEventEquals( new ListSelectionEvent( table.getColumnModel().getSelectionModel(), 0 /* start of region with selection changes */, 0 /* end of region with selection changes */, false), testSubscriber.getOnNextEvents().get(0)); table.getColumnModel().getSelectionModel().setSelectionInterval(2, 2); testSubscriber.assertNoErrors(); testSubscriber.assertValueCount(2); assertListSelectionEventEquals( new ListSelectionEvent( table.getColumnModel().getSelectionModel(), 0 /* start of region with selection changes */, 2 /* end of region with selection changes */, false), testSubscriber.getOnNextEvents().get(1)); } }).awaitTerminal(); } @Test public void jlistSelectionObservingSelectionEvents() throws Throwable { SwingTestHelper.create().runInEventDispatchThread(new Action0() { @Override public void call() { TestSubscriber<ListSelectionEvent> testSubscriber = TestSubscriber.create(); JList<String> jList = new JList<String>(new String[]{"a", "b", "c", "d", "e", "f"}); jList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); ListSelectionEventSource .fromListSelectionEventsOf(jList.getSelectionModel()) .subscribe(testSubscriber); testSubscriber.assertNoErrors(); testSubscriber.assertNoValues(); jList.getSelectionModel().setSelectionInterval(0, 0); testSubscriber.assertNoErrors(); testSubscriber.assertValueCount(1); assertListSelectionEventEquals( new ListSelectionEvent( jList.getSelectionModel(), 0 /* start of region with selection changes */, 0 /* end of region with selection changes */, false), testSubscriber.getOnNextEvents().get(0)); jList.getSelectionModel().setSelectionInterval(2, 2); testSubscriber.assertNoErrors(); testSubscriber.assertValueCount(2); assertListSelectionEventEquals( new ListSelectionEvent( jList.getSelectionModel(), 0 /* start of region with selection changes */, 2 /* end of region with selection changes */, false), testSubscriber.getOnNextEvents().get(1)); } }).awaitTerminal(); } @Test public void jtableRowSelectionUnsubscribeRemovesRowSelectionListener() throws Throwable { SwingTestHelper.create().runInEventDispatchThread(new Action0() { @Override public void call() { TestSubscriber<ListSelectionEvent> testSubscriber = TestSubscriber.create(); JTable table = createJTable(); int numberOfListenersBefore = getNumberOfRowListSelectionListeners(table); Subscription sub = ListSelectionEventSource .fromListSelectionEventsOf(table.getSelectionModel()) .subscribe(testSubscriber); testSubscriber.assertNoErrors(); testSubscriber.assertNoValues(); sub.unsubscribe(); testSubscriber.assertUnsubscribed(); table.getSelectionModel().setSelectionInterval(0, 0); testSubscriber.assertNoErrors(); testSubscriber.assertNoValues(); assertEquals(numberOfListenersBefore, getNumberOfRowListSelectionListeners(table)); } }).awaitTerminal(); } private static int getNumberOfRowListSelectionListeners(final JTable table) { return ((DefaultListSelectionModel) table.getSelectionModel()).getListSelectionListeners().length; } private static JTable createJTable() { return new JTable(new Object[][]{ {"A1", "B1", "C1"}, {"A2", "B2", "C2"}, {"A3", "B3", "C3"}, }, new String[]{ "A", "B", "C" }); } private static void assertListSelectionEventEquals(ListSelectionEvent expected, ListSelectionEvent actual) { if (expected == null) { throw new IllegalArgumentException("missing expected"); } if (actual == null) { throw new AssertionError("Expected " + expected + ", but was: " + actual); } if (!expected.getSource().equals(actual.getSource())) { throw new AssertionError("Expected " + expected + ", but was: " + actual + ". Different source."); } if (expected.getFirstIndex() != actual.getFirstIndex()) { throw new AssertionError("Expected " + expected + ", but was: " + actual + ". Different first index."); } if (expected.getLastIndex() != actual.getLastIndex()) { throw new AssertionError("Expected " + expected + ", but was: " + actual + ". Different last index."); } if (expected.getValueIsAdjusting() != actual.getValueIsAdjusting()) { throw new AssertionError("Expected " + expected + ", but was: " + actual + ". Different ValueIsAdjusting."); } } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.device.bluetooth; import android.annotation.TargetApi; import android.bluetooth.BluetoothAdapter; import android.bluetooth.le.ScanSettings; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Build; import android.os.ParcelUuid; import org.chromium.base.Log; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.JNINamespace; import org.chromium.components.location.LocationUtils; import java.util.List; /** * Exposes android.bluetooth.BluetoothAdapter as necessary for C++ * device::BluetoothAdapterAndroid, which implements the cross platform * device::BluetoothAdapter. * * Lifetime is controlled by device::BluetoothAdapterAndroid. */ @JNINamespace("device") @TargetApi(Build.VERSION_CODES.M) final class ChromeBluetoothAdapter extends BroadcastReceiver { private static final String TAG = "Bluetooth"; private long mNativeBluetoothAdapterAndroid; // mAdapter is final to ensure registerReceiver is followed by unregisterReceiver. private final Wrappers.BluetoothAdapterWrapper mAdapter; private ScanCallback mScanCallback; // --------------------------------------------------------------------------------------------- // Construction and handler for C++ object destruction. /** * Constructs a ChromeBluetoothAdapter. * @param nativeBluetoothAdapterAndroid Is the associated C++ * BluetoothAdapterAndroid pointer value. * @param adapterWrapper Wraps the default android.bluetooth.BluetoothAdapter, * but may be either null if an adapter is not available * or a fake for testing. */ public ChromeBluetoothAdapter( long nativeBluetoothAdapterAndroid, Wrappers.BluetoothAdapterWrapper adapterWrapper) { mNativeBluetoothAdapterAndroid = nativeBluetoothAdapterAndroid; mAdapter = adapterWrapper; registerBroadcastReceiver(); if (adapterWrapper == null) { Log.i(TAG, "ChromeBluetoothAdapter created with no adapterWrapper."); } else { Log.i(TAG, "ChromeBluetoothAdapter created with provided adapterWrapper."); } } /** * Handles C++ object being destroyed. */ @CalledByNative private void onBluetoothAdapterAndroidDestruction() { stopScan(); mNativeBluetoothAdapterAndroid = 0; unregisterBroadcastReceiver(); } // --------------------------------------------------------------------------------------------- // BluetoothAdapterAndroid methods implemented in java: // Implements BluetoothAdapterAndroid::Create. // 'Object' type must be used for |adapterWrapper| because inner class // Wrappers.BluetoothAdapterWrapper reference is not handled by jni_generator.py JavaToJni. // http://crbug.com/505554 @CalledByNative private static ChromeBluetoothAdapter create( long nativeBluetoothAdapterAndroid, Object adapterWrapper) { return new ChromeBluetoothAdapter( nativeBluetoothAdapterAndroid, (Wrappers.BluetoothAdapterWrapper) adapterWrapper); } // Implements BluetoothAdapterAndroid::GetAddress. @CalledByNative private String getAddress() { if (isPresent()) { return mAdapter.getAddress(); } else { return ""; } } // Implements BluetoothAdapterAndroid::GetName. @CalledByNative private String getName() { if (isPresent()) { return mAdapter.getName(); } else { return ""; } } // Implements BluetoothAdapterAndroid::IsPresent. @CalledByNative private boolean isPresent() { return mAdapter != null; } // Implements BluetoothAdapterAndroid::IsPowered. @CalledByNative private boolean isPowered() { return isPresent() && mAdapter.isEnabled(); } // Implements BluetoothAdapterAndroid::SetPowered. @CalledByNative private boolean setPowered(boolean powered) { if (powered) { return isPresent() && mAdapter.enable(); } else { return isPresent() && mAdapter.disable(); } } // Implements BluetoothAdapterAndroid::IsDiscoverable. @CalledByNative private boolean isDiscoverable() { return isPresent() && mAdapter.getScanMode() == BluetoothAdapter.SCAN_MODE_CONNECTABLE_DISCOVERABLE; } // Implements BluetoothAdapterAndroid::IsDiscovering. @CalledByNative private boolean isDiscovering() { return isPresent() && (mAdapter.isDiscovering() || mScanCallback != null); } /** * Starts a Low Energy scan. * @return True on success. */ @CalledByNative private boolean startScan() { Wrappers.BluetoothLeScannerWrapper scanner = mAdapter.getBluetoothLeScanner(); if (scanner == null) { return false; } if (!canScan()) { return false; } // scanMode note: SCAN_FAILED_FEATURE_UNSUPPORTED is caused (at least on some devices) if // setReportDelay() is used or if SCAN_MODE_LOW_LATENCY isn't used. int scanMode = ScanSettings.SCAN_MODE_LOW_LATENCY; assert mScanCallback == null; mScanCallback = new ScanCallback(); try { scanner.startScan(null /* filters */, scanMode, mScanCallback); } catch (IllegalArgumentException e) { Log.e(TAG, "Cannot start scan: " + e); mScanCallback = null; return false; } catch (IllegalStateException e) { Log.e(TAG, "Adapter is off. Cannot start scan: " + e); mScanCallback = null; return false; } return true; } /** * Stops the Low Energy scan. * @return True if a scan was in progress. */ @CalledByNative private boolean stopScan() { if (mScanCallback == null) { return false; } try { Wrappers.BluetoothLeScannerWrapper scanner = mAdapter.getBluetoothLeScanner(); if (scanner != null) { scanner.stopScan(mScanCallback); } } catch (IllegalArgumentException e) { Log.e(TAG, "Cannot stop scan: " + e); } catch (IllegalStateException e) { Log.e(TAG, "Adapter is off. Cannot stop scan: " + e); } mScanCallback = null; return true; } // --------------------------------------------------------------------------------------------- // Implementation details: /** * @return true if Chromium has permission to scan for Bluetooth devices and location services * are on. */ private boolean canScan() { LocationUtils locationUtils = LocationUtils.getInstance(); return locationUtils.hasAndroidLocationPermission() && locationUtils.isSystemLocationSettingEnabled(); } private void registerBroadcastReceiver() { if (mAdapter != null) { mAdapter.getContext().registerReceiver( this, new IntentFilter(BluetoothAdapter.ACTION_STATE_CHANGED)); } } private void unregisterBroadcastReceiver() { if (mAdapter != null) { mAdapter.getContext().unregisterReceiver(this); } } /** * Implements callbacks used during a Low Energy scan by notifying upon * devices discovered or detecting a scan failure. */ private class ScanCallback extends Wrappers.ScanCallbackWrapper { @Override public void onBatchScanResult(List<Wrappers.ScanResultWrapper> results) { Log.v(TAG, "onBatchScanResults"); } @Override public void onScanResult(int callbackType, Wrappers.ScanResultWrapper result) { Log.v(TAG, "onScanResult %d %s %s", callbackType, result.getDevice().getAddress(), result.getDevice().getName()); String[] uuid_strings; List<ParcelUuid> uuids = result.getScanRecord_getServiceUuids(); if (uuids == null) { uuid_strings = new String[] {}; } else { uuid_strings = new String[uuids.size()]; for (int i = 0; i < uuids.size(); i++) { uuid_strings[i] = uuids.get(i).toString(); } } nativeCreateOrUpdateDeviceOnScan(mNativeBluetoothAdapterAndroid, result.getDevice().getAddress(), result.getDevice(), result.getRssi(), uuid_strings, result.getScanRecord_getTxPowerLevel()); } @Override public void onScanFailed(int errorCode) { Log.w(TAG, "onScanFailed: %d", errorCode); nativeOnScanFailed(mNativeBluetoothAdapterAndroid); } } @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); if (isPresent() && BluetoothAdapter.ACTION_STATE_CHANGED.equals(action)) { int state = intent.getIntExtra(BluetoothAdapter.EXTRA_STATE, BluetoothAdapter.ERROR); Log.w(TAG, "onReceive: BluetoothAdapter.ACTION_STATE_CHANGED: %s", getBluetoothStateString(state)); switch (state) { case BluetoothAdapter.STATE_ON: nativeOnAdapterStateChanged(mNativeBluetoothAdapterAndroid, true); break; case BluetoothAdapter.STATE_OFF: nativeOnAdapterStateChanged(mNativeBluetoothAdapterAndroid, false); break; default: // do nothing } } } private String getBluetoothStateString(int state) { switch (state) { case BluetoothAdapter.STATE_OFF: return "STATE_OFF"; case BluetoothAdapter.STATE_ON: return "STATE_ON"; case BluetoothAdapter.STATE_TURNING_OFF: return "STATE_TURNING_OFF"; case BluetoothAdapter.STATE_TURNING_ON: return "STATE_TURNING_ON"; default: assert false; return "illegal state: " + state; } } // --------------------------------------------------------------------------------------------- // BluetoothAdapterAndroid C++ methods declared for access from java: // Binds to BluetoothAdapterAndroid::OnScanFailed. private native void nativeOnScanFailed(long nativeBluetoothAdapterAndroid); // Binds to BluetoothAdapterAndroid::CreateOrUpdateDeviceOnScan. // 'Object' type must be used for |bluetoothDeviceWrapper| because inner class // Wrappers.BluetoothDeviceWrapper reference is not handled by jni_generator.py JavaToJni. // http://crbug.com/505554 private native void nativeCreateOrUpdateDeviceOnScan(long nativeBluetoothAdapterAndroid, String address, Object bluetoothDeviceWrapper, int rssi, String[] advertisedUuids, int txPower); // Binds to BluetoothAdapterAndroid::nativeOnAdapterStateChanged private native void nativeOnAdapterStateChanged( long nativeBluetoothAdapterAndroid, boolean powered); }
/** * Copyright 2017-2019 The GreyCat Authors. All rights reserved. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package greycat.internal.heap; import greycat.Constants; import greycat.internal.CoreConstants; import greycat.struct.Buffer; import greycat.struct.IMatrix; import greycat.struct.LMatrix; import greycat.utility.Base64; import java.util.Arrays; import java.util.Random; class HeapIMatrix implements IMatrix { private static final int INDEX_ROWS = 0; private static final int INDEX_COLUMNS = 1; private static final int INDEX_MAX_COLUMN = 2; private static final int INDEX_OFFSET = 3; private final HeapContainer parent; private int[] _backend = null; private boolean aligned = true; HeapIMatrix(final HeapContainer p_parent, final HeapIMatrix origin) { parent = p_parent; if (origin != null) { aligned = false; _backend = origin._backend; } } @Override public final IMatrix init(final int rows, final int columns) { synchronized (parent) { internal_init(rows, columns); } parent.declareDirty(); return this; } private void internal_init(final int rows, final int columns) { //clean _backend for OffHeap version _backend = new int[rows * columns + INDEX_OFFSET]; _backend[INDEX_ROWS] = rows; _backend[INDEX_COLUMNS] = columns; _backend[INDEX_MAX_COLUMN] = columns;//direct allocation aligned = true; } @Override public final IMatrix appendColumn(int[] newColumn) { synchronized (parent) { internal_appendColumn(newColumn); parent.declareDirty(); } return this; } private void internal_appendColumn(int[] newColumn) { int nbRows; int nbColumns; int nbMaxColumn; if (_backend == null) { nbRows = newColumn.length; nbColumns = Constants.MAP_INITIAL_CAPACITY; nbMaxColumn = 0; _backend = new int[nbRows * nbColumns + INDEX_OFFSET]; _backend[INDEX_ROWS] = nbRows; _backend[INDEX_COLUMNS] = nbColumns; _backend[INDEX_MAX_COLUMN] = nbMaxColumn; } else { nbColumns = (int) _backend[INDEX_COLUMNS]; nbRows = (int) _backend[INDEX_ROWS]; nbMaxColumn = (int) _backend[INDEX_MAX_COLUMN]; } if (!aligned || nbMaxColumn == nbColumns) { if (nbMaxColumn == nbColumns) { nbColumns = nbColumns * 2; _backend[INDEX_COLUMNS] = nbColumns; final int newLength = nbColumns * nbRows + INDEX_OFFSET; int[] next_backend = new int[newLength]; System.arraycopy(_backend, 0, next_backend, 0, _backend.length); _backend = next_backend; aligned = true; } else { //direct copy int[] next_backend = new int[_backend.length]; System.arraycopy(_backend, 0, next_backend, 0, _backend.length); _backend = next_backend; aligned = true; } } //just insert System.arraycopy(newColumn, 0, _backend, (nbMaxColumn * nbRows) + INDEX_OFFSET, newColumn.length); _backend[INDEX_MAX_COLUMN] = nbMaxColumn + 1; } @Override public final IMatrix fill(int value) { synchronized (parent) { internal_fill(value); } return this; } private void internal_fill(int value) { if (_backend != null) { if (!aligned) { int[] next_backend = new int[_backend.length]; System.arraycopy(_backend, 0, next_backend, 0, _backend.length); _backend = next_backend; aligned = true; } Arrays.fill(_backend, INDEX_OFFSET, _backend.length - INDEX_OFFSET, value); _backend[INDEX_MAX_COLUMN] = _backend[INDEX_COLUMNS]; parent.declareDirty(); } } @Override public IMatrix fillWith(int[] values) { synchronized (parent) { internal_fillWith(values); } return this; } private void internal_fillWith(int[] values) { if (_backend != null) { if (!aligned) { int[] next_backend = new int[_backend.length]; System.arraycopy(_backend, 0, next_backend, 0, _backend.length); _backend = next_backend; aligned = true; } //reInit ? System.arraycopy(values, 0, _backend, INDEX_OFFSET, values.length); parent.declareDirty(); } } @Override public IMatrix fillWithRandom(int min, int max, int seed) { synchronized (parent) { internal_fillWithRandom(min, max, seed); } return this; } private void internal_fillWithRandom(int min, int max, int seed) { Random rand = new Random(); rand.setSeed(seed); if (_backend != null) { if (!aligned) { int[] next_backend = new int[_backend.length]; System.arraycopy(_backend, 0, next_backend, 0, _backend.length); _backend = next_backend; aligned = true; } for (int i = 0; i < _backend[INDEX_ROWS] * _backend[INDEX_COLUMNS]; i++) { _backend[i + INDEX_OFFSET] = rand.nextInt() * (max - min) + min; } parent.declareDirty(); } } @SuppressWarnings("Duplicates") @Override public final int rows() { int result = 0; synchronized (parent) { if (_backend != null) { result = (int) _backend[INDEX_ROWS]; } } return result; } @SuppressWarnings("Duplicates") @Override public final int columns() { int result = 0; synchronized (parent) { if (_backend != null) { result = (int) _backend[INDEX_MAX_COLUMN]; } } return result; } @Override public final int[] column(int index) { int[] result; synchronized (parent) { final int nbRows = (int) _backend[INDEX_ROWS]; result = new int[nbRows]; System.arraycopy(_backend, INDEX_OFFSET + (index * nbRows), result, 0, nbRows); } return result; } @Override public final int get(int rowIndex, int columnIndex) { int result = 0; synchronized (parent) { if (_backend != null) { final int nbRows = (int) _backend[INDEX_ROWS]; result = _backend[INDEX_OFFSET + rowIndex + columnIndex * nbRows]; } } return result; } @Override public final IMatrix set(int rowIndex, int columnIndex, int value) { synchronized (parent) { internal_set(rowIndex, columnIndex, value); } return this; } private void internal_set(int rowIndex, int columnIndex, int value) { if (_backend != null) { if (!aligned) { int[] next_backend = new int[_backend.length]; System.arraycopy(_backend, 0, next_backend, 0, _backend.length); _backend = next_backend; aligned = true; } final int nbRows = (int) _backend[INDEX_ROWS]; _backend[INDEX_OFFSET + rowIndex + columnIndex * nbRows] = value; parent.declareDirty(); } } @Override public IMatrix add(int rowIndex, int columnIndex, int value) { synchronized (parent) { internal_add(rowIndex, columnIndex, value); } return this; } private void internal_add(int rowIndex, int columnIndex, int value) { if (_backend != null) { if (!aligned) { int[] next_backend = new int[_backend.length]; System.arraycopy(_backend, 0, next_backend, 0, _backend.length); _backend = next_backend; aligned = true; } final int nbRows = (int) _backend[INDEX_ROWS]; _backend[INDEX_OFFSET + rowIndex + columnIndex * nbRows] = value + _backend[INDEX_OFFSET + rowIndex + columnIndex * nbRows]; parent.declareDirty(); } } @Override public final int[] data() { int[] copy = null; synchronized (parent) { if (_backend != null) { copy = new int[_backend.length - INDEX_OFFSET]; System.arraycopy(_backend, INDEX_OFFSET, copy, 0, _backend.length - INDEX_OFFSET); } } return copy; } @Override public int leadingDimension() { if (_backend == null) { return 0; } return (int) Math.max(_backend[INDEX_COLUMNS], _backend[INDEX_ROWS]); } @Override public int unsafeGet(int index) { int result = 0; synchronized (parent) { if (_backend != null) { result = _backend[INDEX_OFFSET + index]; } } return result; } @Override public IMatrix unsafeSet(int index, int value) { synchronized (parent) { internal_unsafeSet(index, value); } return this; } private void internal_unsafeSet(int index, int value) { if (_backend != null) { if (!aligned) { int[] next_backend = new int[_backend.length]; System.arraycopy(_backend, 0, next_backend, 0, _backend.length); _backend = next_backend; aligned = true; } _backend[INDEX_OFFSET + index] = value; parent.declareDirty(); } } int[] unsafe_data() { return _backend; } void unsafe_init(int size) { _backend = new int[size]; _backend[INDEX_ROWS] = 0; _backend[INDEX_COLUMNS] = 0; aligned = true; } void unsafe_set(int index, int value) { _backend[(int) index] = value; } public final void save(final Buffer buffer) { if (_backend != null) { Base64.encodeIntToBuffer(_backend.length, buffer); for (int j = 0; j < _backend.length; j++) { buffer.write(CoreConstants.CHUNK_VAL_SEP); Base64.encodeLongToBuffer(_backend[j], buffer); } } else { Base64.encodeIntToBuffer(0, buffer); } } public final long load(final Buffer buffer, final long offset, final long max) { long cursor = offset; byte current = buffer.read(cursor); boolean isFirst = true; long previous = offset; int elemIndex = 0; while (cursor < max && current != Constants.CHUNK_SEP && current != Constants.BLOCK_CLOSE) { if (current == Constants.CHUNK_VAL_SEP) { if (isFirst) { unsafe_init(Base64.decodeToIntWithBounds(buffer, previous, cursor)); isFirst = false; } else { unsafe_set(elemIndex, Base64.decodeToIntWithBounds(buffer, previous, cursor)); elemIndex++; } previous = cursor + 1; } cursor++; if (cursor < max) { current = buffer.read(cursor); } } if (previous == cursor) { unsafe_init(0); } else if (isFirst) { unsafe_init(Base64.decodeToIntWithBounds(buffer, previous, cursor)); } else { unsafe_set(elemIndex, Base64.decodeToIntWithBounds(buffer, previous, cursor)); } return cursor; } }
package randoop.instrument; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; // import harpoon.ClassFile.HMethod; import java.lang.instrument.ClassFileTransformer; import java.lang.instrument.Instrumentation; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Enumeration; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarFile; import plume.Option; import plume.Options; import plume.SimpleLog; public class Premain { @Option("print debug information") public static boolean debug = false; @Option("print progress information") public static boolean verbose = false; @Option("file containing methods calls to map to substitute methods") public static File map_calls = null; @Option("Use first BCEL on classpath rather than PAG's version") public static boolean default_bcel = true; /** * {@inheritDoc} * This method is the entry point of the java agent. Its main purpose is to * set up the transformer so that when classes from the target app are loaded, * they are first transformed. */ public static void premain(String agentArgs, Instrumentation inst) throws IOException { System.out.format( "In premain, agentargs ='%s', " + "Instrumentation = '%s'%n", agentArgs, inst); // Parse our arguments Options options = new Options(Premain.class); String[] target_args = options.parse_or_usage(agentArgs); if (target_args.length > 0) { System.err.printf("Unexpected agent arguments %s%n", Arrays.toString(target_args)); System.exit(1); } // Setup the transformer Object transformer = null; if (default_bcel) { transformer = new Instrument(); } else { // use a special classloader to ensure our files are used ClassLoader loader = new BCELLoader(); try { transformer = loader.loadClass("randoop.instrument.Instrument").newInstance(); @SuppressWarnings("unchecked") Class<Instrument> c = (Class<Instrument>) transformer.getClass(); // System.out.printf ("Classloader of tranformer = %s%n", // c.getClassLoader()); } catch (Exception e) { throw new RuntimeException("Unexpected error loading Instrument", e); } } // Read the map file if (map_calls != null) { Instrument instrument = (Instrument) transformer; instrument.read_map_file(map_calls); instrument.add_map_file_shutdown_hook(); } // Instrument transformer = new Instrument(); inst.addTransformer((ClassFileTransformer) transformer); } /** * Reads purity file. Each line should contain exactly one method. Care must * be taken to supply the correct format. * * From the Sun JDK API: * * "The string is formatted as the method access modifiers, if any, followed * by the method return type, followed by a space, followed by the class * declaring the method, followed by a period, followed by the method name, * followed by a parenthesized, comma-separated list of the method's formal * parameter types. If the method throws checked exceptions, the parameter * list is followed by a space, followed by the word throws followed by a * comma-separated list of the thrown exception types. For example: * * public boolean java.lang.Object.equals(java.lang.Object) * * The access modifiers are placed in canonical order as specified by * "The Java Language Specification". This is public, protected or private * first, and then other modifiers in the following order: abstract, static, * final, synchronized native." */ private static HashSet<String> readPurityFile(File purityFileName, File pathLoc) throws IOException { HashSet<String> pureMethods = new LinkedHashSet<String>(); BufferedReader reader = new BufferedReader(new FileReader(new File(pathLoc, purityFileName.getPath()))); if (true) System.out.printf("Reading '%s' for pure methods %n", purityFileName); for (String line = reader.readLine(); line != null; line = reader.readLine()) { pureMethods.add(line.trim()); } reader.close(); return pureMethods; } /** * Classloader for the BCEL code. Using this classloader guarantees that we * get the PAG version of the BCEL code and not a possible incompatible * version from elsewhere on the users classpath. We also load * randoop.instrument.Instrument via this (since that class is the user of all * of the BCEL classes). All references to BCEL must be within that class (so * that all references to BCEL will get resolved by this classloader). * * The PAG version of BCEL is identified by the presence of the PAG marker * class (org.apache.bcel.PAGMarker). Other versions of BCEL will not contain * this class. If other versions of BCEL are present, they must appear before * the PAG versions in the classpath (so that the users application will see * them first). If only the PAG version is in the classpath, then the normal * loader is used for all of the classes. */ public static class BCELLoader extends ClassLoader { /** Jar file that contains BCEL. If null, use the normal classpath **/ JarFile bcel_jar = null; public static final SimpleLog debug = new SimpleLog(verbose); public BCELLoader() throws IOException { String bcel_classname = "org.apache.bcel.Constants"; String pag_marker_classname = "org.apache.bcel.PAGMarker"; List<URL> bcel_urls = get_resource_list(bcel_classname); List<URL> pag_urls = get_resource_list(pag_marker_classname); if (pag_urls.size() == 0) { System.err.printf( "%nBCEL must be in the classpath. " + "Normally it is found in daikon.jar .%n"); System.exit(1); } if (bcel_urls.size() < pag_urls.size()) { System.err.printf("%nCorrupted BCEL library, bcel %s, pag %s%n", bcel_urls, pag_urls); System.exit(1); } // No need to do anything if only our versions of bcel are present if (bcel_urls.size() == pag_urls.size()) return; int bcel_index = 0; int pag_index = 0; while (bcel_index < bcel_urls.size()) { URL bcel = bcel_urls.get(bcel_index); URL pag = pag_urls.get(pag_index); if (!pag.getProtocol().equals("jar")) { System.err.printf("%nPAG BCEL must be in jar file. " + " Found at %s%n", pag); System.exit(1); } if (same_location(bcel, pag)) { if (bcel_index == pag_index) { URL first_bcel = bcel; while ((pag != null) && same_location(bcel, pag)) { bcel = bcel_urls.get(++bcel_index); pag_index++; pag = (pag_index < pag_urls.size()) ? pag_urls.get(pag_index) : null; } System.err.printf( "%nPAG BCEL (%s) appears before target BCEL " + "(%s).%nPlease reorder classpath to put randoop.jar at " + "the end.%n", first_bcel, bcel); System.exit(1); } else { bcel_jar = new JarFile(extract_jar_path(pag)); debug.log("PAG BCEL found in jar %s%n", bcel_jar.getName()); break; } } else { // non pag bcel found debug.log("Found non-pag BCEL at %s%n", bcel); bcel_index++; } } } /** * Returns whether or not the two URL represent the same location for * org.apache.bcel. Two locations match if they refer to the same jar file * or the same directory in the filesystem. */ private boolean same_location(URL url1, URL url2) { if (!url1.getProtocol().equals(url2.getProtocol())) return false; if (url1.getProtocol().equals("jar")) { // System.out.printf ("url1 = %s, file=%s, path=%s, protocol=%s, %s%n", // url1, url1.getFile(), url1.getPath(), // url1.getProtocol(), url1.getClass()); // System.out.printf ("url2 = %s, file=%s, path=%s, protocol=%s, %s%n", // url2, url2.getFile(), url2.getPath(), // url2.getProtocol(), url1.getClass()); String jar1 = extract_jar_path(url1); String jar2 = extract_jar_path(url2); return (jar1.equals(jar2)); } else if (url1.getProtocol().equals("file")) { String loc1 = url1.getFile().replaceFirst("org\\.apache\\.bcel\\..*$", ""); String loc2 = url2.getFile().replaceFirst("org\\.apache\\.bcel\\..*$", ""); return (loc1.equals(loc2)); } else { assert false : "unexpected protocol " + url1.getProtocol(); } return (false); } /** * Returns the pathname of a jar file specified in the URL. The protocol * must be 'jar'. Only file jars are supported. */ private String extract_jar_path(URL url) { assert url.getProtocol().equals("jar") : url.toString(); // Remove the preceeding 'file:' and trailing '!filename' String path = url.getFile(); path = path.replaceFirst("^[^:]*:", ""); path = path.replaceFirst("![^!]*$", ""); return path; } /** * Get all of the URLs that match the specified name in the classpath. The * name should be in normal classname format (eg, * org.apache.bcel.Constants). An empty list is returned if no names match. */ List<URL> get_resource_list(String classname) throws IOException { String name = classname_to_resource_name(classname); Enumeration<URL> enum_urls = ClassLoader.getSystemResources(name); List<URL> urls = new ArrayList<URL>(); while (enum_urls.hasMoreElements()) { urls.add(enum_urls.nextElement()); } return (urls); } /** * Changs a class name in the normal format (eg, org.apache.bcel.Constants) * to that used to lookup resources (eg. org/apache/bcel/Constants.class) */ private String classname_to_resource_name(String name) { return (name.replace(".", "/") + ".class"); } @Override protected Class<?> loadClass(String name, boolean resolve) throws java.lang.ClassNotFoundException { // If we are not loading from our jar, just use the normal mechanism if (bcel_jar == null) return super.loadClass(name, resolve); // Load non-bcel files via the normal mechanism if (!name.startsWith("org.apache.bcel") && (!name.startsWith("daikon.chicory.Instrument"))) { // System.out.printf ("loading standard %s%n", name); return super.loadClass(name, resolve); } // If we've already loaded the class, just return that one Class<?> c = findLoadedClass(name); if (c != null) { if (resolve) resolveClass(c); return c; } // Find our version of the class and return it. try { InputStream is = null; if (name.startsWith("daikon.chicory.Instrument")) { String resource_name = classname_to_resource_name(name); URL url = ClassLoader.getSystemResource(resource_name); is = url.openStream(); } else { // Read the BCEL class from the jar file String entry_name = classname_to_resource_name(name); JarEntry entry = bcel_jar.getJarEntry(entry_name); assert entry != null : "Can't find " + entry_name; is = bcel_jar.getInputStream(entry); } int available = is.available(); byte[] bytes = new byte[available]; int total = 0; while (total < available) { int len = is.read(bytes, total, available - total); total += len; } assert total == bytes.length : "only read " + total; assert is.read() == -1 : "more data left in stream"; // System.out.printf ("Defining class %s size %d%n", name, available); c = defineClass(name, bytes, 0, bytes.length); if (resolve) resolveClass(c); return c; } catch (Exception e) { throw new RuntimeException("Unexpected exception loading class " + name, e); } } } }
package org.hisp.dhis.importexport.dxf.converter; /* * Copyright (c) 2004-2015, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import org.amplecode.quick.BatchHandler; import org.amplecode.quick.BatchHandlerFactory; import org.amplecode.staxwax.reader.XMLReader; import org.amplecode.staxwax.writer.XMLWriter; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hisp.dhis.cache.HibernateCacheManager; import org.hisp.dhis.chart.ChartService; import org.hisp.dhis.common.ProcessState; import org.hisp.dhis.constant.Constant; import org.hisp.dhis.constant.ConstantService; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementCategory; import org.hisp.dhis.dataelement.DataElementCategoryCombo; import org.hisp.dhis.dataelement.DataElementCategoryOption; import org.hisp.dhis.dataelement.DataElementCategoryService; import org.hisp.dhis.dataelement.DataElementGroup; import org.hisp.dhis.dataelement.DataElementGroupSet; import org.hisp.dhis.dataelement.DataElementService; import org.hisp.dhis.dataset.DataSet; import org.hisp.dhis.dataset.DataSetService; import org.hisp.dhis.expression.ExpressionService; import org.hisp.dhis.importexport.ExportParams; import org.hisp.dhis.importexport.GroupMemberAssociation; import org.hisp.dhis.importexport.ImportObjectService; import org.hisp.dhis.importexport.ImportParams; import org.hisp.dhis.importexport.XMLConverter; import org.hisp.dhis.importexport.analysis.DefaultImportAnalyser; import org.hisp.dhis.importexport.analysis.ImportAnalyser; import org.hisp.dhis.importexport.invoker.ConverterInvoker; import org.hisp.dhis.importexport.mapping.NameMappingUtil; import org.hisp.dhis.importexport.mapping.ObjectMappingGenerator; import org.hisp.dhis.indicator.Indicator; import org.hisp.dhis.indicator.IndicatorGroup; import org.hisp.dhis.indicator.IndicatorGroupSet; import org.hisp.dhis.indicator.IndicatorService; import org.hisp.dhis.indicator.IndicatorType; import org.hisp.dhis.jdbc.batchhandler.CategoryCategoryOptionAssociationBatchHandler; import org.hisp.dhis.jdbc.batchhandler.CategoryComboCategoryAssociationBatchHandler; import org.hisp.dhis.jdbc.batchhandler.ConstantBatchHandler; import org.hisp.dhis.jdbc.batchhandler.DataElementBatchHandler; import org.hisp.dhis.jdbc.batchhandler.DataElementCategoryBatchHandler; import org.hisp.dhis.jdbc.batchhandler.DataElementCategoryComboBatchHandler; import org.hisp.dhis.jdbc.batchhandler.DataElementCategoryOptionBatchHandler; import org.hisp.dhis.jdbc.batchhandler.DataElementGroupBatchHandler; import org.hisp.dhis.jdbc.batchhandler.DataElementGroupMemberBatchHandler; import org.hisp.dhis.jdbc.batchhandler.DataElementGroupSetBatchHandler; import org.hisp.dhis.jdbc.batchhandler.DataElementGroupSetMemberBatchHandler; import org.hisp.dhis.jdbc.batchhandler.DataSetBatchHandler; import org.hisp.dhis.jdbc.batchhandler.DataSetMemberBatchHandler; import org.hisp.dhis.jdbc.batchhandler.DataSetSourceAssociationBatchHandler; import org.hisp.dhis.jdbc.batchhandler.GroupSetBatchHandler; import org.hisp.dhis.jdbc.batchhandler.GroupSetMemberBatchHandler; import org.hisp.dhis.jdbc.batchhandler.IndicatorBatchHandler; import org.hisp.dhis.jdbc.batchhandler.IndicatorGroupBatchHandler; import org.hisp.dhis.jdbc.batchhandler.IndicatorGroupMemberBatchHandler; import org.hisp.dhis.jdbc.batchhandler.IndicatorGroupSetBatchHandler; import org.hisp.dhis.jdbc.batchhandler.IndicatorGroupSetMemberBatchHandler; import org.hisp.dhis.jdbc.batchhandler.IndicatorTypeBatchHandler; import org.hisp.dhis.jdbc.batchhandler.OrganisationUnitBatchHandler; import org.hisp.dhis.jdbc.batchhandler.OrganisationUnitGroupBatchHandler; import org.hisp.dhis.jdbc.batchhandler.OrganisationUnitGroupMemberBatchHandler; import org.hisp.dhis.jdbc.batchhandler.PeriodBatchHandler; import org.hisp.dhis.jdbc.batchhandler.ReportTableBatchHandler; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.organisationunit.OrganisationUnitGroup; import org.hisp.dhis.organisationunit.OrganisationUnitGroupService; import org.hisp.dhis.organisationunit.OrganisationUnitGroupSet; import org.hisp.dhis.organisationunit.OrganisationUnitService; import org.hisp.dhis.period.Period; import org.hisp.dhis.period.PeriodService; import org.hisp.dhis.report.ReportService; import org.hisp.dhis.reporttable.ReportTable; import org.hisp.dhis.reporttable.ReportTableService; import org.hisp.dhis.validation.ValidationRuleService; /** * DXFConverter class This does NOT implement XMLConverter, because we need to * pass ProcessState in read() method. * * @author bobj * @version created 13-Feb-2010 */ public class DXFConverter { public static final String DXFROOT = "dxf"; public static final String ATTRIBUTE_MINOR_VERSION = "minorVersion"; public static final String ATTRIBUTE_EXPORTED = "exported"; public static final String NAMESPACE_10 = "http://dhis2.org/schema/dxf/1.0"; public static final String MINOR_VERSION_10 = "1.0"; public static final String MINOR_VERSION_11 = "1.1"; public static final String MINOR_VERSION_12 = "1.2"; public static final String MINOR_VERSION_13 = "1.3"; private final Log log = LogFactory.getLog( DXFConverter.class ); // ------------------------------------------------------------------------- // Dependencies // ------------------------------------------------------------------------- private ImportObjectService importObjectService; public void setImportObjectService( ImportObjectService importObjectService ) { this.importObjectService = importObjectService; } private ConstantService constantService; public void setConstantService( ConstantService constantService ) { this.constantService = constantService; } private DataElementService dataElementService; public void setDataElementService( DataElementService dataElementService ) { this.dataElementService = dataElementService; } private DataElementCategoryService categoryService; public void setCategoryService( DataElementCategoryService categoryService ) { this.categoryService = categoryService; } private IndicatorService indicatorService; public void setIndicatorService( IndicatorService indicatorService ) { this.indicatorService = indicatorService; } private DataSetService dataSetService; public void setDataSetService( DataSetService dataSetService ) { this.dataSetService = dataSetService; } private OrganisationUnitService organisationUnitService; public void setOrganisationUnitService( OrganisationUnitService organisationUnitService ) { this.organisationUnitService = organisationUnitService; } private OrganisationUnitGroupService organisationUnitGroupService; public void setOrganisationUnitGroupService( OrganisationUnitGroupService organisationUnitGroupService ) { this.organisationUnitGroupService = organisationUnitGroupService; } private PeriodService periodService; public void setPeriodService( PeriodService periodService ) { this.periodService = periodService; } private ExpressionService expressionService; public void setExpressionService( ExpressionService expressionService ) { this.expressionService = expressionService; } private ValidationRuleService validationRuleService; public void setValidationRuleService( ValidationRuleService validationRuleService ) { this.validationRuleService = validationRuleService; } private ReportService reportService; public void setReportService( ReportService reportService ) { this.reportService = reportService; } private ReportTableService reportTableService; public void setReportTableService( ReportTableService reportTableService ) { this.reportTableService = reportTableService; } private ChartService chartService; public void setChartService( ChartService chartService ) { this.chartService = chartService; } private BatchHandlerFactory batchHandlerFactory; public void setBatchHandlerFactory( BatchHandlerFactory batchHandlerFactory ) { this.batchHandlerFactory = batchHandlerFactory; } private ObjectMappingGenerator objectMappingGenerator; public void setObjectMappingGenerator( ObjectMappingGenerator objectMappingGenerator ) { this.objectMappingGenerator = objectMappingGenerator; } private HibernateCacheManager cacheManager; public void setCacheManager( HibernateCacheManager cacheManager ) { this.cacheManager = cacheManager; } private ConverterInvoker converterInvoker; public void setConverterInvoker( ConverterInvoker converterInvoker ) { this.converterInvoker = converterInvoker; } public void write( XMLWriter writer, ExportParams params, ProcessState state ) { throw new UnsupportedOperationException( "Not supported yet." ); } public void read( XMLReader reader, ImportParams params, ProcessState state ) { ImportAnalyser importAnalyser = new DefaultImportAnalyser( expressionService ); NameMappingUtil.clearMapping(); if ( params.isPreview() ) { importObjectService.deleteImportObjects(); log.info( "Deleted previewed objects" ); } if ( !reader.moveToStartElement( DXFROOT, DXFROOT ) ) { throw new RuntimeException( "Couldn't find dxf root element" ); } params.setNamespace( NAMESPACE_10 ); String version = reader.getAttributeValue( ATTRIBUTE_MINOR_VERSION ); params.setMinorVersion( version != null ? version : MINOR_VERSION_10 ); log.debug( "Importing dxf1 minor version " + version ); while ( reader.next() ) { if ( reader.isStartElement( ConstantConverter.COLLECTION_NAME ) ) { log.debug( "Starting Constants import" ) ; state.setMessage( "importing_constants" ); BatchHandler<Constant> batchHandler = batchHandlerFactory.createBatchHandler( ConstantBatchHandler.class ).init(); XMLConverter converter = new ConstantConverter( batchHandler, importObjectService, constantService ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported Constants" ); } else if ( reader.isStartElement( DataElementCategoryOptionConverter.COLLECTION_NAME ) ) { log.debug("Starting DataElementCategoryOptions import "); state.setMessage( "importing_data_element_category_options" ); BatchHandler<DataElementCategoryOption> batchHandler = batchHandlerFactory.createBatchHandler( DataElementCategoryOptionBatchHandler.class ).init(); XMLConverter converter = new DataElementCategoryOptionConverter( batchHandler, importObjectService, categoryService ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported DataElementCategoryOptions" ); } else if ( reader.isStartElement( DataElementCategoryConverter.COLLECTION_NAME ) ) { log.debug("Starting DataElementCategories import"); state.setMessage( "importing_data_element_categories" ); BatchHandler<DataElementCategory> batchHandler = batchHandlerFactory.createBatchHandler( DataElementCategoryBatchHandler.class ).init(); XMLConverter converter = new DataElementCategoryConverter( batchHandler, importObjectService, categoryService ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported DataElementCategories" ); } else if ( reader.isStartElement( DataElementCategoryComboConverter.COLLECTION_NAME ) ) { log.debug("Starting DataElementCategoryCombos import"); state.setMessage( "importing_data_element_category_combos" ); BatchHandler<DataElementCategoryCombo> batchHandler = batchHandlerFactory.createBatchHandler( DataElementCategoryComboBatchHandler.class ).init(); XMLConverter converter = new DataElementCategoryComboConverter( batchHandler, importObjectService, categoryService ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported DataElementCategoryCombos" ); } else if ( reader.isStartElement( DataElementCategoryOptionComboConverter.COLLECTION_NAME ) ) { log.debug("Starting DataElementCategoryOptionCombos import"); state.setMessage( "importing_data_element_category_option_combos" ); XMLConverter converter = new DataElementCategoryOptionComboConverter( importObjectService, objectMappingGenerator.getCategoryComboMapping( params.skipMapping() ), objectMappingGenerator .getCategoryOptionMapping( params.skipMapping() ), categoryService ); converterInvoker.invokeRead( converter, reader, params ); log.info( "Imported DataElementCategoryOptionCombos" ); } else if ( reader.isStartElement( CategoryCategoryOptionAssociationConverter.COLLECTION_NAME ) ) { log.debug("Starting CategoryCategoryOption associations import"); state.setMessage( "importing_data_element_category_members" ); BatchHandler<GroupMemberAssociation> batchHandler = batchHandlerFactory.createBatchHandler( CategoryCategoryOptionAssociationBatchHandler.class ).init(); XMLConverter converter = new CategoryCategoryOptionAssociationConverter( batchHandler, importObjectService, objectMappingGenerator.getCategoryMapping( params.skipMapping() ), objectMappingGenerator.getCategoryOptionMapping( params.skipMapping() ) ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported CategoryCategoryOption associations" ); } else if ( reader.isStartElement( CategoryComboCategoryAssociationConverter.COLLECTION_NAME ) ) { log.debug("Starting CategoryComboCategory associations import"); state.setMessage( "importing_data_element_category_combo_members" ); BatchHandler<GroupMemberAssociation> batchHandler = batchHandlerFactory.createBatchHandler( CategoryComboCategoryAssociationBatchHandler.class ).init(); XMLConverter converter = new CategoryComboCategoryAssociationConverter( batchHandler, importObjectService, objectMappingGenerator.getCategoryComboMapping( params.skipMapping() ), objectMappingGenerator.getCategoryMapping( params.skipMapping() ) ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported CategoryComboCategory associations" ); } else if ( reader.isStartElement( DataElementConverter.COLLECTION_NAME ) ) { log.debug("Starting DataElements import"); state.setMessage( "importing_data_elements" ); BatchHandler<DataElement> batchHandler = batchHandlerFactory.createBatchHandler( DataElementBatchHandler.class ).init(); XMLConverter converter = new DataElementConverter( batchHandler, importObjectService, objectMappingGenerator.getCategoryComboMapping( params.skipMapping() ), dataElementService, importAnalyser ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported DataElements" ); } else if ( reader.isStartElement( DataElementGroupConverter.COLLECTION_NAME ) ) { log.debug("Starting DataElementGroups import"); state.setMessage( "importing_data_element_groups" ); BatchHandler<DataElementGroup> batchHandler = batchHandlerFactory.createBatchHandler( DataElementGroupBatchHandler.class ).init(); XMLConverter converter = new DataElementGroupConverter( batchHandler, importObjectService, dataElementService ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported DataElementGroups" ); } else if ( reader.isStartElement( DataElementGroupMemberConverter.COLLECTION_NAME ) ) { log.debug("Starting DataElementGroup members import"); state.setMessage( "importing_data_element_group_members" ); BatchHandler<GroupMemberAssociation> batchHandler = batchHandlerFactory.createBatchHandler( DataElementGroupMemberBatchHandler.class ).init(); XMLConverter converter = new DataElementGroupMemberConverter( batchHandler, importObjectService, objectMappingGenerator.getDataElementMapping( params.skipMapping() ), objectMappingGenerator .getDataElementGroupMapping( params.skipMapping() ) ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported DataElementGroup members" ); } else if ( reader.isStartElement( DataElementGroupSetConverter.COLLECTION_NAME ) ) { log.debug("Starting DataElementGroupSets import"); state.setMessage( "importing_data_element_group_sets" ); BatchHandler<DataElementGroupSet> batchHandler = batchHandlerFactory.createBatchHandler( DataElementGroupSetBatchHandler.class ).init(); XMLConverter converter = new DataElementGroupSetConverter( batchHandler, importObjectService, dataElementService ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported DataElementGroupSets" ); } else if ( reader.isStartElement( DataElementGroupSetMemberConverter.COLLECTION_NAME ) ) { log.debug("Starting Imported DataElementGroupSet members import"); state.setMessage( "importing_data_element_group_set_members" ); BatchHandler<GroupMemberAssociation> batchHandler = batchHandlerFactory.createBatchHandler( DataElementGroupSetMemberBatchHandler.class ).init(); XMLConverter converter = new DataElementGroupSetMemberConverter( batchHandler, importObjectService, objectMappingGenerator.getDataElementGroupMapping( params.skipMapping() ), objectMappingGenerator .getDataElementGroupSetMapping( params.skipMapping() ) ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported DataElementGroupSet members" ); } else if ( reader.isStartElement( IndicatorTypeConverter.COLLECTION_NAME ) ) { log.debug("Starting IndicatorTypes import"); state.setMessage( "importing_indicator_types" ); BatchHandler<IndicatorType> batchHandler = batchHandlerFactory.createBatchHandler( IndicatorTypeBatchHandler.class ).init(); XMLConverter converter = new IndicatorTypeConverter( batchHandler, importObjectService, indicatorService ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported IndicatorTypes" ); } else if ( reader.isStartElement( IndicatorConverter.COLLECTION_NAME ) ) { log.debug("Starting Indicators import"); state.setMessage( "importing_indicators" ); BatchHandler<Indicator> batchHandler = batchHandlerFactory.createBatchHandler( IndicatorBatchHandler.class ).init(); XMLConverter converter = new IndicatorConverter( batchHandler, importObjectService, indicatorService, objectMappingGenerator.getIndicatorTypeMapping( params.skipMapping() ), importAnalyser ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported Indicators" ); } else if ( reader.isStartElement( IndicatorGroupConverter.COLLECTION_NAME ) ) { log.debug("Starting IndicatorGroups import"); state.setMessage( "importing_indicator_groups" ); BatchHandler<IndicatorGroup> batchHandler = batchHandlerFactory.createBatchHandler( IndicatorGroupBatchHandler.class ).init(); XMLConverter converter = new IndicatorGroupConverter( batchHandler, importObjectService, indicatorService ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported IndicatorGroups" ); } else if ( reader.isStartElement( IndicatorGroupMemberConverter.COLLECTION_NAME ) ) { log.debug("Starting IndicatorGroup members import"); state.setMessage( "importing_indicator_group_members" ); BatchHandler<GroupMemberAssociation> batchHandler = batchHandlerFactory.createBatchHandler( IndicatorGroupMemberBatchHandler.class ).init(); XMLConverter converter = new IndicatorGroupMemberConverter( batchHandler, importObjectService, objectMappingGenerator.getIndicatorMapping( params.skipMapping() ), objectMappingGenerator .getIndicatorGroupMapping( params.skipMapping() ) ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported IndicatorGroup members" ); } else if ( reader.isStartElement( IndicatorGroupSetConverter.COLLECTION_NAME ) ) { log.debug("Starting IndicatorGroupSets import"); state.setMessage( "importing_indicator_group_sets" ); BatchHandler<IndicatorGroupSet> batchHandler = batchHandlerFactory.createBatchHandler( IndicatorGroupSetBatchHandler.class ).init(); XMLConverter converter = new IndicatorGroupSetConverter( batchHandler, importObjectService, indicatorService ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported IndicatorGroupSets" ); } else if ( reader.isStartElement( IndicatorGroupSetMemberConverter.COLLECTION_NAME ) ) { log.debug("Starting IndicatorGroupSet import"); state.setMessage( "importing_indicator_group_set_members" ); BatchHandler<GroupMemberAssociation> batchHandler = batchHandlerFactory.createBatchHandler( IndicatorGroupSetMemberBatchHandler.class ).init(); XMLConverter converter = new IndicatorGroupSetMemberConverter( batchHandler, importObjectService, objectMappingGenerator.getIndicatorGroupMapping( params.skipMapping() ), objectMappingGenerator .getIndicatorGroupSetMapping( params.skipMapping() ) ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported IndicatorGroupSet members" ); } else if ( reader.isStartElement( DataSetConverter.COLLECTION_NAME ) ) { log.debug("Starting DataSets import"); state.setMessage( "importing_data_sets" ); BatchHandler<DataSet> batchHandler = batchHandlerFactory.createBatchHandler( DataSetBatchHandler.class ) .init(); XMLConverter converter = new DataSetConverter( batchHandler, importObjectService, dataSetService, objectMappingGenerator.getPeriodTypeMapping() ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported DataSets" ); } else if ( reader.isStartElement( DataSetMemberConverter.COLLECTION_NAME ) ) { log.debug("Starting DataSet members import"); state.setMessage( "importing_data_set_members" ); BatchHandler<GroupMemberAssociation> batchHandler = batchHandlerFactory.createBatchHandler( DataSetMemberBatchHandler.class ).init(); XMLConverter converter = new DataSetMemberConverter( batchHandler, importObjectService, objectMappingGenerator.getDataElementMapping( params.skipMapping() ), objectMappingGenerator .getDataSetMapping( params.skipMapping() ) ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported DataSet members" ); } else if ( reader.isStartElement( OrganisationUnitConverter.COLLECTION_NAME ) ) { log.debug("Starting OrganisationUnits import"); state.setMessage( "importing_organisation_units" ); BatchHandler<OrganisationUnit> batchHandler = batchHandlerFactory.createBatchHandler( OrganisationUnitBatchHandler.class ).init(); XMLConverter converter = new OrganisationUnitConverter( batchHandler, importObjectService, organisationUnitService, importAnalyser ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported OrganisationUnits" ); } else if ( reader.isStartElement( OrganisationUnitRelationshipConverter.COLLECTION_NAME ) ) { log.debug("Starting OrganisationUnit relationships import"); state.setMessage( "importing_organisation_unit_relationships" ); BatchHandler<OrganisationUnit> batchHandler = batchHandlerFactory.createBatchHandler( OrganisationUnitBatchHandler.class ).init(); XMLConverter converter = new OrganisationUnitRelationshipConverter( batchHandler, importObjectService, organisationUnitService, objectMappingGenerator.getOrganisationUnitMapping( params.skipMapping() ) ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported OrganisationUnit relationships" ); } else if ( reader.isStartElement( OrganisationUnitGroupConverter.COLLECTION_NAME ) ) { log.info("Starting OrganisationUnitGroups import "); state.setMessage( "importing_organisation_unit_groups" ); BatchHandler<OrganisationUnitGroup> batchHandler = batchHandlerFactory.createBatchHandler( OrganisationUnitGroupBatchHandler.class ).init(); XMLConverter converter = new OrganisationUnitGroupConverter( batchHandler, importObjectService, organisationUnitGroupService ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported OrganisationUnitGroups" ); } else if ( reader.isStartElement( OrganisationUnitGroupMemberConverter.COLLECTION_NAME ) ) { log.debug("Starting OrganisationUnitGroup members import"); state.setMessage( "importing_organisation_unit_group_members" ); BatchHandler<GroupMemberAssociation> batchHandler = batchHandlerFactory.createBatchHandler( OrganisationUnitGroupMemberBatchHandler.class ).init(); XMLConverter converter = new OrganisationUnitGroupMemberConverter( batchHandler, importObjectService, objectMappingGenerator.getOrganisationUnitMapping( params.skipMapping() ), objectMappingGenerator .getOrganisationUnitGroupMapping( params.skipMapping() ) ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported OrganisationUnitGroup members" ); } else if ( reader.isStartElement( GroupSetConverter.COLLECTION_NAME ) ) { log.debug("Starting OrganisationUnitGroupSets import"); state.setMessage( "importing_organisation_unit_group_sets" ); BatchHandler<OrganisationUnitGroupSet> batchHandler = batchHandlerFactory.createBatchHandler( GroupSetBatchHandler.class ).init(); XMLConverter converter = new GroupSetConverter( batchHandler, importObjectService, organisationUnitGroupService ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported OrganisationUnitGroupSets" ); } else if ( reader.isStartElement( GroupSetMemberConverter.COLLECTION_NAME ) ) { log.debug("Starting OrganisationUnitGroupSet members import"); state.setMessage( "importing_organisation_unit_group_set_members" ); BatchHandler<GroupMemberAssociation> batchHandler = batchHandlerFactory.createBatchHandler( GroupSetMemberBatchHandler.class ).init(); XMLConverter converter = new GroupSetMemberConverter( batchHandler, importObjectService, objectMappingGenerator.getOrganisationUnitGroupMapping( params.skipMapping() ), objectMappingGenerator.getOrganisationUnitGroupSetMapping( params.skipMapping() ) ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported OrganisationUnitGroupSet members" ); } else if ( reader.isStartElement( OrganisationUnitLevelConverter.COLLECTION_NAME ) ) { log.debug("Starting OrganisationUnitLevels import"); state.setMessage( "importing_organisation_unit_levels" ); XMLConverter converter = new OrganisationUnitLevelConverter( organisationUnitService, importObjectService ); converterInvoker.invokeRead( converter, reader, params ); log.info( "Imported OrganisationUnitLevels" ); } else if ( reader.isStartElement( DataSetSourceAssociationConverter.COLLECTION_NAME ) ) { log.debug("Starting DataSet Source associations import"); state.setMessage( "importing_data_set_source_associations" ); BatchHandler<GroupMemberAssociation> batchHandler = batchHandlerFactory.createBatchHandler( DataSetSourceAssociationBatchHandler.class ).init(); XMLConverter converter = new DataSetSourceAssociationConverter( batchHandler, importObjectService, objectMappingGenerator.getDataSetMapping( params.skipMapping() ), objectMappingGenerator .getOrganisationUnitMapping( params.skipMapping() ) ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported DataSet Source associations" ); } else if ( reader.isStartElement( ValidationRuleConverter.COLLECTION_NAME ) ) { log.debug("Starting ValidationRules import"); state.setMessage( "importing_validation_rules" ); XMLConverter converter = new ValidationRuleConverter( importObjectService, validationRuleService, expressionService ); converterInvoker.invokeRead( converter, reader, params ); log.info( "Imported ValidationRules" ); } else if ( reader.isStartElement( PeriodConverter.COLLECTION_NAME ) ) { log.debug("Starting Periods import"); state.setMessage( "importing_periods" ); BatchHandler<Period> batchHandler = batchHandlerFactory.createBatchHandler( PeriodBatchHandler.class ) .init(); XMLConverter converter = new PeriodConverter( batchHandler, importObjectService, periodService, objectMappingGenerator.getPeriodTypeMapping() ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported Periods" ); } else if ( reader.isStartElement( ReportConverter.COLLECTION_NAME ) ) { log.debug("Starting Reports import"); state.setMessage( "importing_reports" ); XMLConverter converter = new ReportConverter( reportService, importObjectService ); converterInvoker.invokeRead( converter, reader, params ); log.info( "Imported Reports" ); } else if ( reader.isStartElement( ReportTableConverter.COLLECTION_NAME ) ) { log.debug("Starting ReportTables import"); state.setMessage( "importing_report_tables" ); BatchHandler<ReportTable> batchHandler = batchHandlerFactory.createBatchHandler( ReportTableBatchHandler.class ).init(); XMLConverter converter = new ReportTableConverter( reportTableService, importObjectService, dataElementService, categoryService, indicatorService, dataSetService, periodService, organisationUnitService, objectMappingGenerator.getDataElementMapping( params.skipMapping() ), objectMappingGenerator.getCategoryComboMapping( params.skipMapping() ), objectMappingGenerator .getIndicatorMapping( params.skipMapping() ), objectMappingGenerator.getDataSetMapping( params .skipMapping() ), objectMappingGenerator.getPeriodMapping( params.skipMapping() ), objectMappingGenerator.getOrganisationUnitMapping( params.skipMapping() ) ); converterInvoker.invokeRead( converter, reader, params ); batchHandler.flush(); log.info( "Imported ReportTables" ); } else if ( reader.isStartElement( ChartConverter.COLLECTION_NAME ) ) { log.debug("Starting Charts import"); state.setMessage( "importing_charts" ); XMLConverter converter = new ChartConverter( chartService, importObjectService, indicatorService, organisationUnitService, objectMappingGenerator.getIndicatorMapping( params .skipMapping() ), objectMappingGenerator.getOrganisationUnitMapping( params.skipMapping() ) ); converterInvoker.invokeRead( converter, reader, params ); log.info( "Imported Charts" ); } } if ( params.isAnalysis() ) { state.setOutput( importAnalyser.getImportAnalysis() ); } NameMappingUtil.clearMapping(); cacheManager.clearCache(); } }
package org.apache.maven; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.apache.maven.artifact.ArtifactUtils; import org.apache.maven.project.MavenProject; import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.repository.WorkspaceReader; import org.eclipse.aether.repository.WorkspaceRepository; import org.eclipse.aether.util.artifact.ArtifactIdUtils; /** * An implementation of a workspace reader that knows how to search the Maven reactor for artifacts. * * @author Jason van Zyl */ class ReactorReader implements WorkspaceReader { private static final Collection<String> COMPILE_PHASE_TYPES = Arrays.asList( "jar", "ejb-client" ); private Map<String, MavenProject> projectsByGAV; private Map<String, List<MavenProject>> projectsByGA; private WorkspaceRepository repository; public ReactorReader( Map<String, MavenProject> reactorProjects ) { projectsByGAV = reactorProjects; projectsByGA = new HashMap<String, List<MavenProject>>( reactorProjects.size() * 2 ); for ( MavenProject project : reactorProjects.values() ) { String key = ArtifactUtils.versionlessKey( project.getGroupId(), project.getArtifactId() ); List<MavenProject> projects = projectsByGA.get( key ); if ( projects == null ) { projects = new ArrayList<MavenProject>( 1 ); projectsByGA.put( key, projects ); } projects.add( project ); } repository = new WorkspaceRepository( "reactor", new HashSet<String>( projectsByGAV.keySet() ) ); } // // Public API // public WorkspaceRepository getRepository() { return repository; } public File findArtifact( Artifact artifact ) { String projectKey = ArtifactUtils.key( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() ); MavenProject project = projectsByGAV.get( projectKey ); if ( project != null ) { File file = find( project, artifact ); if ( file == null && project != project.getExecutionProject() ) { file = find( project.getExecutionProject(), artifact ); } return file; } return null; } public List<String> findVersions( Artifact artifact ) { String key = ArtifactUtils.versionlessKey( artifact.getGroupId(), artifact.getArtifactId() ); List<MavenProject> projects = projectsByGA.get( key ); if ( projects == null || projects.isEmpty() ) { return Collections.emptyList(); } List<String> versions = new ArrayList<String>(); for ( MavenProject project : projects ) { if ( find( project, artifact ) != null ) { versions.add( project.getVersion() ); } } return Collections.unmodifiableList( versions ); } // // Implementation // private File find( MavenProject project, Artifact artifact ) { if ( "pom".equals( artifact.getExtension() ) ) { return project.getFile(); } Artifact projectArtifact = findMatchingArtifact( project, artifact ); if ( hasArtifactFileFromPackagePhase( projectArtifact ) ) { return projectArtifact.getFile(); } else if ( !hasBeenPackaged( project ) ) { // fallback to loose class files only if artifacts haven't been packaged yet // and only for plain old jars. Not war files, not ear files, not anything else. if ( isTestArtifact( artifact ) ) { if ( project.hasLifecyclePhase( "test-compile" ) ) { return new File( project.getBuild().getTestOutputDirectory() ); } } else { String type = artifact.getProperty( "type", "" ); if ( project.hasLifecyclePhase( "compile" ) && COMPILE_PHASE_TYPES.contains( type ) ) { return new File( project.getBuild().getOutputDirectory() ); } } } // The fall-through indicates that the artifact cannot be found; // for instance if package produced nothing or classifier problems. return null; } private boolean hasArtifactFileFromPackagePhase( Artifact projectArtifact ) { return projectArtifact != null && projectArtifact.getFile() != null && projectArtifact.getFile().exists(); } private boolean hasBeenPackaged( MavenProject project ) { return project.hasLifecyclePhase( "package" ) || project.hasLifecyclePhase( "install" ) || project.hasLifecyclePhase( "deploy" ); } /** * Tries to resolve the specified artifact from the artifacts of the given project. * * @param project The project to try to resolve the artifact from, must not be <code>null</code>. * @param requestedArtifact The artifact to resolve, must not be <code>null</code>. * @return The matching artifact from the project or <code>null</code> if not found. * * Note that this */ private Artifact findMatchingArtifact( MavenProject project, Artifact requestedArtifact ) { String requestedRepositoryConflictId = ArtifactIdUtils.toVersionlessId( requestedArtifact ); Artifact mainArtifact = RepositoryUtils.toArtifact( project.getArtifact() ); if ( requestedRepositoryConflictId.equals( ArtifactIdUtils.toVersionlessId( mainArtifact ) ) ) { return mainArtifact; } for ( Artifact attachedArtifact : RepositoryUtils.toArtifacts( project.getAttachedArtifacts() ) ) { if ( attachedArtifactComparison ( requestedArtifact, attachedArtifact ) ) { return attachedArtifact; } } return null; } private boolean attachedArtifactComparison( Artifact requested, Artifact attached ) { // // We are taking as much as we can from the DefaultArtifact.equals(). The requested artifact has no file so // we want to remove that from the comparision. // return requested.getArtifactId().equals( attached.getArtifactId() ) && requested.getGroupId().equals( attached.getGroupId() ) && requested.getVersion().equals( attached.getVersion() ) && requested.getExtension().equals( attached.getExtension() ) && requested.getClassifier().equals( attached.getClassifier() ); } /** * Determines whether the specified artifact refers to test classes. * * @param artifact The artifact to check, must not be {@code null}. * @return {@code true} if the artifact refers to test classes, {@code false} otherwise. */ private static boolean isTestArtifact( Artifact artifact ) { return ( "test-jar".equals( artifact.getProperty( "type", "" ) ) ) || ( "jar".equals( artifact.getExtension() ) && "tests".equals( artifact.getClassifier() ) ); } }
// ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.xpack.eql.parser; import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor; /** * This class provides an empty implementation of {@link EqlBaseVisitor}, * which can be extended to create a visitor which only needs to handle a subset * of the available methods. * * @param <T> The return type of the visit operation. Use {@link Void} for * operations with no return type. */ class EqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements EqlBaseVisitor<T> { /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitSingleStatement(EqlBaseParser.SingleStatementContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitSingleExpression(EqlBaseParser.SingleExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitStatement(EqlBaseParser.StatementContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitQuery(EqlBaseParser.QueryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitSequenceParams(EqlBaseParser.SequenceParamsContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitSequence(EqlBaseParser.SequenceContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitJoin(EqlBaseParser.JoinContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitPipe(EqlBaseParser.PipeContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitJoinKeys(EqlBaseParser.JoinKeysContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitJoinTerm(EqlBaseParser.JoinTermContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitSequenceTerm(EqlBaseParser.SequenceTermContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitSubquery(EqlBaseParser.SubqueryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitEventQuery(EqlBaseParser.EventQueryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitEventFilter(EqlBaseParser.EventFilterContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitExpression(EqlBaseParser.ExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitLogicalNot(EqlBaseParser.LogicalNotContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitBooleanDefault(EqlBaseParser.BooleanDefaultContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitProcessCheck(EqlBaseParser.ProcessCheckContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitLogicalBinary(EqlBaseParser.LogicalBinaryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitValueExpressionDefault(EqlBaseParser.ValueExpressionDefaultContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitComparison(EqlBaseParser.ComparisonContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitOperatorExpressionDefault(EqlBaseParser.OperatorExpressionDefaultContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitArithmeticBinary(EqlBaseParser.ArithmeticBinaryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitArithmeticUnary(EqlBaseParser.ArithmeticUnaryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitPredicate(EqlBaseParser.PredicateContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitConstantDefault(EqlBaseParser.ConstantDefaultContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitFunction(EqlBaseParser.FunctionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitDereference(EqlBaseParser.DereferenceContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitParenthesizedExpression(EqlBaseParser.ParenthesizedExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitFunctionExpression(EqlBaseParser.FunctionExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitNullLiteral(EqlBaseParser.NullLiteralContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitNumericLiteral(EqlBaseParser.NumericLiteralContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitBooleanLiteral(EqlBaseParser.BooleanLiteralContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitStringLiteral(EqlBaseParser.StringLiteralContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitComparisonOperator(EqlBaseParser.ComparisonOperatorContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitBooleanValue(EqlBaseParser.BooleanValueContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitQualifiedName(EqlBaseParser.QualifiedNameContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitIdentifier(EqlBaseParser.IdentifierContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitTimeUnit(EqlBaseParser.TimeUnitContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitDecimalLiteral(EqlBaseParser.DecimalLiteralContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitIntegerLiteral(EqlBaseParser.IntegerLiteralContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */ @Override public T visitString(EqlBaseParser.StringContext ctx) { return visitChildren(ctx); } }
/* * Copyright (C) 2014 The Dagger Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dagger.internal.codegen.binding; import static com.google.auto.common.MoreElements.asType; import static com.google.auto.common.MoreTypes.asTypeElement; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.Iterables.getOnlyElement; import static dagger.internal.codegen.base.ComponentAnnotation.subcomponentAnnotation; import static dagger.internal.codegen.base.Scopes.productionScope; import static dagger.internal.codegen.base.Scopes.scopesOf; import static dagger.internal.codegen.binding.ComponentCreatorAnnotation.creatorAnnotationsFor; import static dagger.internal.codegen.binding.ComponentDescriptor.isComponentContributionMethod; import static dagger.internal.codegen.binding.ConfigurationAnnotations.enclosedAnnotatedTypes; import static dagger.internal.codegen.binding.ConfigurationAnnotations.isSubcomponentCreator; import static dagger.internal.codegen.extension.DaggerStreams.toImmutableSet; import static javax.lang.model.type.TypeKind.DECLARED; import static javax.lang.model.type.TypeKind.VOID; import static javax.lang.model.util.ElementFilter.methodsIn; import com.google.auto.common.MoreTypes; import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import dagger.internal.codegen.base.ComponentAnnotation; import dagger.internal.codegen.base.ModuleAnnotation; import dagger.internal.codegen.binding.ComponentDescriptor.ComponentMethodDescriptor; import dagger.internal.codegen.langmodel.DaggerElements; import dagger.internal.codegen.langmodel.DaggerTypes; import dagger.spi.model.Scope; import java.util.Optional; import java.util.function.Function; import javax.inject.Inject; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.TypeElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.ExecutableType; import javax.lang.model.type.TypeMirror; /** A factory for {@link ComponentDescriptor}s. */ public final class ComponentDescriptorFactory { private final DaggerElements elements; private final DaggerTypes types; private final DependencyRequestFactory dependencyRequestFactory; private final ModuleDescriptor.Factory moduleDescriptorFactory; private final InjectionAnnotations injectionAnnotations; @Inject ComponentDescriptorFactory( DaggerElements elements, DaggerTypes types, DependencyRequestFactory dependencyRequestFactory, ModuleDescriptor.Factory moduleDescriptorFactory, InjectionAnnotations injectionAnnotations) { this.elements = elements; this.types = types; this.dependencyRequestFactory = dependencyRequestFactory; this.moduleDescriptorFactory = moduleDescriptorFactory; this.injectionAnnotations = injectionAnnotations; } /** Returns a descriptor for a root component type. */ public ComponentDescriptor rootComponentDescriptor(TypeElement typeElement) { return create( typeElement, checkAnnotation( typeElement, ComponentAnnotation::rootComponentAnnotation, "must have a component annotation")); } /** Returns a descriptor for a subcomponent type. */ public ComponentDescriptor subcomponentDescriptor(TypeElement typeElement) { return create( typeElement, checkAnnotation( typeElement, ComponentAnnotation::subcomponentAnnotation, "must have a subcomponent annotation")); } /** * Returns a descriptor for a fictional component based on a module type in order to validate its * bindings. */ public ComponentDescriptor moduleComponentDescriptor(TypeElement typeElement) { return create( typeElement, ComponentAnnotation.fromModuleAnnotation( checkAnnotation( typeElement, ModuleAnnotation::moduleAnnotation, "must have a module annotation"))); } private static <A> A checkAnnotation( TypeElement typeElement, Function<TypeElement, Optional<A>> annotationFunction, String message) { return annotationFunction .apply(typeElement) .orElseThrow(() -> new IllegalArgumentException(typeElement + " " + message)); } private ComponentDescriptor create( TypeElement typeElement, ComponentAnnotation componentAnnotation) { ImmutableSet<ComponentRequirement> componentDependencies = componentAnnotation.dependencyTypes().stream() .map(ComponentRequirement::forDependency) .collect(toImmutableSet()); ImmutableMap.Builder<ExecutableElement, ComponentRequirement> dependenciesByDependencyMethod = ImmutableMap.builder(); for (ComponentRequirement componentDependency : componentDependencies) { for (ExecutableElement dependencyMethod : methodsIn(elements.getAllMembers(componentDependency.typeElement()))) { if (isComponentContributionMethod(elements, dependencyMethod)) { dependenciesByDependencyMethod.put(dependencyMethod, componentDependency); } } } // Start with the component's modules. For fictional components built from a module, start with // that module. ImmutableSet<TypeElement> modules = componentAnnotation.isRealComponent() ? componentAnnotation.modules() : ImmutableSet.of(typeElement); ImmutableSet<ModuleDescriptor> transitiveModules = moduleDescriptorFactory.transitiveModules(modules); ImmutableSet.Builder<ComponentDescriptor> subcomponentsFromModules = ImmutableSet.builder(); for (ModuleDescriptor module : transitiveModules) { for (SubcomponentDeclaration subcomponentDeclaration : module.subcomponentDeclarations()) { TypeElement subcomponent = subcomponentDeclaration.subcomponentType(); subcomponentsFromModules.add(subcomponentDescriptor(subcomponent)); } } ImmutableSet.Builder<ComponentMethodDescriptor> componentMethodsBuilder = ImmutableSet.builder(); ImmutableBiMap.Builder<ComponentMethodDescriptor, ComponentDescriptor> subcomponentsByFactoryMethod = ImmutableBiMap.builder(); ImmutableBiMap.Builder<ComponentMethodDescriptor, ComponentDescriptor> subcomponentsByBuilderMethod = ImmutableBiMap.builder(); if (componentAnnotation.isRealComponent()) { ImmutableSet<ExecutableElement> unimplementedMethods = elements.getUnimplementedMethods(typeElement); for (ExecutableElement componentMethod : unimplementedMethods) { ComponentMethodDescriptor componentMethodDescriptor = getDescriptorForComponentMethod(typeElement, componentAnnotation, componentMethod); componentMethodsBuilder.add(componentMethodDescriptor); componentMethodDescriptor .subcomponent() .ifPresent( subcomponent -> { // If the dependency request is present, that means the method returns the // subcomponent factory. if (componentMethodDescriptor.dependencyRequest().isPresent()) { subcomponentsByBuilderMethod.put(componentMethodDescriptor, subcomponent); } else { subcomponentsByFactoryMethod.put(componentMethodDescriptor, subcomponent); } }); } } // Validation should have ensured that this set will have at most one element. ImmutableSet<DeclaredType> enclosedCreators = creatorAnnotationsFor(componentAnnotation).stream() .flatMap( creatorAnnotation -> enclosedAnnotatedTypes(typeElement, creatorAnnotation).stream()) .collect(toImmutableSet()); Optional<ComponentCreatorDescriptor> creatorDescriptor = enclosedCreators.isEmpty() ? Optional.empty() : Optional.of( ComponentCreatorDescriptor.create( getOnlyElement(enclosedCreators), elements, types, dependencyRequestFactory)); ImmutableSet<Scope> scopes = scopesOf(typeElement); if (componentAnnotation.isProduction()) { scopes = ImmutableSet.<Scope>builder().addAll(scopes).add(productionScope(elements)).build(); } return new AutoValue_ComponentDescriptor( componentAnnotation, typeElement, componentDependencies, transitiveModules, dependenciesByDependencyMethod.build(), scopes, subcomponentsFromModules.build(), subcomponentsByFactoryMethod.build(), subcomponentsByBuilderMethod.build(), componentMethodsBuilder.build(), creatorDescriptor); } private ComponentMethodDescriptor getDescriptorForComponentMethod( TypeElement componentElement, ComponentAnnotation componentAnnotation, ExecutableElement componentMethod) { ComponentMethodDescriptor.Builder descriptor = ComponentMethodDescriptor.builder(componentMethod); ExecutableType resolvedComponentMethod = MoreTypes.asExecutable( types.asMemberOf(MoreTypes.asDeclared(componentElement.asType()), componentMethod)); TypeMirror returnType = resolvedComponentMethod.getReturnType(); if (returnType.getKind().equals(DECLARED) && !injectionAnnotations.getQualifier(componentMethod).isPresent()) { TypeElement returnTypeElement = asTypeElement(returnType); if (subcomponentAnnotation(returnTypeElement).isPresent()) { // It's a subcomponent factory method. There is no dependency request, and there could be // any number of parameters. Just return the descriptor. return descriptor.subcomponent(subcomponentDescriptor(returnTypeElement)).build(); } if (isSubcomponentCreator(returnTypeElement)) { descriptor.subcomponent( subcomponentDescriptor(asType(returnTypeElement.getEnclosingElement()))); } } switch (componentMethod.getParameters().size()) { case 0: checkArgument( !returnType.getKind().equals(VOID), "component method cannot be void: %s", componentMethod); descriptor.dependencyRequest( componentAnnotation.isProduction() ? dependencyRequestFactory.forComponentProductionMethod( componentMethod, resolvedComponentMethod) : dependencyRequestFactory.forComponentProvisionMethod( componentMethod, resolvedComponentMethod)); break; case 1: checkArgument( returnType.getKind().equals(VOID) || MoreTypes.equivalence() .equivalent(returnType, resolvedComponentMethod.getParameterTypes().get(0)), "members injection method must return void or parameter type: %s", componentMethod); descriptor.dependencyRequest( dependencyRequestFactory.forComponentMembersInjectionMethod( componentMethod, resolvedComponentMethod)); break; default: throw new IllegalArgumentException( "component method has too many parameters: " + componentMethod); } return descriptor.build(); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.services.managers; import org.jboss.logging.Logger; import org.keycloak.TokenIdGenerator; import org.keycloak.common.util.KeycloakUriBuilder; import org.keycloak.common.util.MultivaluedHashMap; import org.keycloak.common.util.StringPropertyReplacer; import org.keycloak.common.util.Time; import org.keycloak.connections.httpclient.HttpClientProvider; import org.keycloak.constants.AdapterConstants; import org.keycloak.models.AuthenticatedClientSessionModel; import org.keycloak.models.ClientModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.RealmModel; import org.keycloak.models.UserModel; import org.keycloak.models.UserSessionModel; import org.keycloak.protocol.oidc.TokenManager; import org.keycloak.representations.adapters.action.GlobalRequestResult; import org.keycloak.representations.adapters.action.LogoutAction; import org.keycloak.representations.adapters.action.PushNotBeforeAction; import org.keycloak.representations.adapters.action.TestAvailabilityAction; import org.keycloak.services.ServicesLogger; import org.keycloak.services.util.ResolveRelative; import javax.ws.rs.core.UriBuilder; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class ResourceAdminManager { private static final Logger logger = Logger.getLogger(ResourceAdminManager.class); private static final String CLIENT_SESSION_HOST_PROPERTY = "${application.session.host}"; private KeycloakSession session; public ResourceAdminManager(KeycloakSession session) { this.session = session; } public static String resolveUri(URI requestUri, String rootUrl, String uri) { String absoluteURI = ResolveRelative.resolveRelativeUri(requestUri, rootUrl, uri); return StringPropertyReplacer.replaceProperties(absoluteURI); } public static String getManagementUrl(URI requestUri, ClientModel client) { String mgmtUrl = client.getManagementUrl(); if (mgmtUrl == null || mgmtUrl.equals("")) { return null; } String absoluteURI = ResolveRelative.resolveRelativeUri(requestUri, client.getRootUrl(), mgmtUrl); // this is for resolving URI like "http://${jboss.host.name}:8080/..." in order to send request to same machine and avoid request to LB in cluster environment return StringPropertyReplacer.replaceProperties(absoluteURI); } // For non-cluster setup, return just single configured managementUrls // For cluster setup, return the management Urls corresponding to all registered cluster nodes private List<String> getAllManagementUrls(URI requestUri, ClientModel client) { String baseMgmtUrl = getManagementUrl(requestUri, client); if (baseMgmtUrl == null) { return Collections.emptyList(); } Set<String> registeredNodesHosts = new ClientManager().validateRegisteredNodes(client); // No-cluster setup if (registeredNodesHosts.isEmpty()) { return Arrays.asList(baseMgmtUrl); } List<String> result = new LinkedList<String>(); KeycloakUriBuilder uriBuilder = KeycloakUriBuilder.fromUri(baseMgmtUrl); for (String nodeHost : registeredNodesHosts) { String currentNodeUri = uriBuilder.clone().host(nodeHost).build().toString(); result.add(currentNodeUri); } return result; } public void logoutUser(URI requestUri, RealmModel realm, UserModel user, KeycloakSession keycloakSession) { List<UserSessionModel> userSessions = keycloakSession.sessions().getUserSessions(realm, user); logoutUserSessions(requestUri, realm, userSessions); } protected void logoutUserSessions(URI requestUri, RealmModel realm, List<UserSessionModel> userSessions) { // Map from "app" to clientSessions for this app MultivaluedHashMap<String, AuthenticatedClientSessionModel> clientSessions = new MultivaluedHashMap<>(); for (UserSessionModel userSession : userSessions) { putClientSessions(clientSessions, userSession); } logger.debugv("logging out {0} resources ", clientSessions.size()); //logger.infov("logging out resources: {0}", clientSessions); for (Map.Entry<String, List<AuthenticatedClientSessionModel>> entry : clientSessions.entrySet()) { if (entry.getValue().size() == 0) { continue; } logoutClientSessions(requestUri, realm, entry.getValue().get(0).getClient(), entry.getValue()); } } private void putClientSessions(MultivaluedHashMap<String, AuthenticatedClientSessionModel> clientSessions, UserSessionModel userSession) { for (Map.Entry<String, AuthenticatedClientSessionModel> entry : userSession.getAuthenticatedClientSessions().entrySet()) { clientSessions.add(entry.getKey(), entry.getValue()); } } public void logoutUserFromClient(URI requestUri, RealmModel realm, ClientModel resource, UserModel user) { List<UserSessionModel> userSessions = session.sessions().getUserSessions(realm, user); List<AuthenticatedClientSessionModel> ourAppClientSessions = new LinkedList<>(); if (userSessions != null) { for (UserSessionModel userSession : userSessions) { AuthenticatedClientSessionModel clientSession = userSession.getAuthenticatedClientSessions().get(resource.getId()); if (clientSession != null) { ourAppClientSessions.add(clientSession); } } } logoutClientSessions(requestUri, realm, resource, ourAppClientSessions); } public boolean logoutClientSession(URI requestUri, RealmModel realm, ClientModel resource, AuthenticatedClientSessionModel clientSession) { return logoutClientSessions(requestUri, realm, resource, Arrays.asList(clientSession)); } protected boolean logoutClientSessions(URI requestUri, RealmModel realm, ClientModel resource, List<AuthenticatedClientSessionModel> clientSessions) { String managementUrl = getManagementUrl(requestUri, resource); if (managementUrl != null) { // Key is host, value is list of http sessions for this host MultivaluedHashMap<String, String> adapterSessionIds = null; List<String> userSessions = new LinkedList<>(); if (clientSessions != null && clientSessions.size() > 0) { adapterSessionIds = new MultivaluedHashMap<String, String>(); for (AuthenticatedClientSessionModel clientSession : clientSessions) { String adapterSessionId = clientSession.getNote(AdapterConstants.CLIENT_SESSION_STATE); if (adapterSessionId != null) { String host = clientSession.getNote(AdapterConstants.CLIENT_SESSION_HOST); adapterSessionIds.add(host, adapterSessionId); } if (clientSession.getUserSession() != null) userSessions.add(clientSession.getUserSession().getId()); } } if (adapterSessionIds == null || adapterSessionIds.isEmpty()) { logger.debugv("Can't logout {0}: no logged adapter sessions", resource.getClientId()); return false; } if (managementUrl.contains(CLIENT_SESSION_HOST_PROPERTY)) { boolean allPassed = true; // Send logout separately to each host (needed for single-sign-out in cluster for non-distributable apps - KEYCLOAK-748) for (Map.Entry<String, List<String>> entry : adapterSessionIds.entrySet()) { String host = entry.getKey(); List<String> sessionIds = entry.getValue(); String currentHostMgmtUrl = managementUrl.replace(CLIENT_SESSION_HOST_PROPERTY, host); allPassed = sendLogoutRequest(realm, resource, sessionIds, userSessions, 0, currentHostMgmtUrl) && allPassed; } return allPassed; } else { // Send single logout request List<String> allSessionIds = new ArrayList<String>(); for (List<String> currentIds : adapterSessionIds.values()) { allSessionIds.addAll(currentIds); } return sendLogoutRequest(realm, resource, allSessionIds, userSessions, 0, managementUrl); } } else { logger.debugv("Can't logout {0}: no management url", resource.getClientId()); return false; } } // Methods for logout all public GlobalRequestResult logoutAll(URI requestUri, RealmModel realm) { realm.setNotBefore(Time.currentTime()); List<ClientModel> resources = realm.getClients(); logger.debugv("logging out {0} resources ", resources.size()); GlobalRequestResult finalResult = new GlobalRequestResult(); for (ClientModel resource : resources) { GlobalRequestResult currentResult = logoutClient(requestUri, realm, resource, realm.getNotBefore()); finalResult.addAll(currentResult); } return finalResult; } public GlobalRequestResult logoutClient(URI requestUri, RealmModel realm, ClientModel resource) { resource.setNotBefore(Time.currentTime()); return logoutClient(requestUri, realm, resource, resource.getNotBefore()); } protected GlobalRequestResult logoutClient(URI requestUri, RealmModel realm, ClientModel resource, int notBefore) { List<String> mgmtUrls = getAllManagementUrls(requestUri, resource); if (mgmtUrls.isEmpty()) { logger.debug("No management URL or no registered cluster nodes for the client " + resource.getClientId()); return new GlobalRequestResult(); } if (logger.isDebugEnabled()) logger.debug("Send logoutClient for URLs: " + mgmtUrls); // Propagate this to all hosts GlobalRequestResult result = new GlobalRequestResult(); for (String mgmtUrl : mgmtUrls) { if (sendLogoutRequest(realm, resource, null, null, notBefore, mgmtUrl)) { result.addSuccessRequest(mgmtUrl); } else { result.addFailedRequest(mgmtUrl); } } return result; } protected boolean sendLogoutRequest(RealmModel realm, ClientModel resource, List<String> adapterSessionIds, List<String> userSessions, int notBefore, String managementUrl) { LogoutAction adminAction = new LogoutAction(TokenIdGenerator.generateId(), Time.currentTime() + 30, resource.getClientId(), adapterSessionIds, notBefore, userSessions); String token = new TokenManager().encodeToken(session, realm, adminAction); if (logger.isDebugEnabled()) logger.debugv("logout resource {0} url: {1} sessionIds: " + adapterSessionIds, resource.getClientId(), managementUrl); URI target = UriBuilder.fromUri(managementUrl).path(AdapterConstants.K_LOGOUT).build(); try { int status = session.getProvider(HttpClientProvider.class).postText(target.toString(), token); boolean success = status == 204 || status == 200; logger.debugf("logout success for %s: %s", managementUrl, success); return success; } catch (IOException e) { ServicesLogger.LOGGER.logoutFailed(e, resource.getClientId()); return false; } } public GlobalRequestResult pushRealmRevocationPolicy(URI requestUri, RealmModel realm) { GlobalRequestResult finalResult = new GlobalRequestResult(); for (ClientModel client : realm.getClients()) { GlobalRequestResult currentResult = pushRevocationPolicy(requestUri, realm, client, realm.getNotBefore()); finalResult.addAll(currentResult); } return finalResult; } public GlobalRequestResult pushClientRevocationPolicy(URI requestUri, RealmModel realm, ClientModel client) { return pushRevocationPolicy(requestUri, realm, client, client.getNotBefore()); } protected GlobalRequestResult pushRevocationPolicy(URI requestUri, RealmModel realm, ClientModel resource, int notBefore) { List<String> mgmtUrls = getAllManagementUrls(requestUri, resource); if (mgmtUrls.isEmpty()) { logger.debugf("No management URL or no registered cluster nodes for the client %s", resource.getClientId()); return new GlobalRequestResult(); } if (logger.isDebugEnabled()) logger.debug("Sending push revocation to URLS: " + mgmtUrls); // Propagate this to all hosts GlobalRequestResult result = new GlobalRequestResult(); for (String mgmtUrl : mgmtUrls) { if (sendPushRevocationPolicyRequest(realm, resource, notBefore, mgmtUrl)) { result.addSuccessRequest(mgmtUrl); } else { result.addFailedRequest(mgmtUrl); } } return result; } protected boolean sendPushRevocationPolicyRequest(RealmModel realm, ClientModel resource, int notBefore, String managementUrl) { PushNotBeforeAction adminAction = new PushNotBeforeAction(TokenIdGenerator.generateId(), Time.currentTime() + 30, resource.getClientId(), notBefore); String token = new TokenManager().encodeToken(session, realm, adminAction); logger.debugv("pushRevocation resource: {0} url: {1}", resource.getClientId(), managementUrl); URI target = UriBuilder.fromUri(managementUrl).path(AdapterConstants.K_PUSH_NOT_BEFORE).build(); try { int status = session.getProvider(HttpClientProvider.class).postText(target.toString(), token); boolean success = status == 204 || status == 200; logger.debugf("pushRevocation success for %s: %s", managementUrl, success); return success; } catch (IOException e) { ServicesLogger.LOGGER.failedToSendRevocation(e); return false; } } public GlobalRequestResult testNodesAvailability(URI requestUri, RealmModel realm, ClientModel client) { List<String> mgmtUrls = getAllManagementUrls(requestUri, client); if (mgmtUrls.isEmpty()) { logger.debug("No management URL or no registered cluster nodes for the application " + client.getClientId()); return new GlobalRequestResult(); } if (logger.isDebugEnabled()) logger.debug("Sending test nodes availability: " + mgmtUrls); // Propagate this to all hosts GlobalRequestResult result = new GlobalRequestResult(); for (String mgmtUrl : mgmtUrls) { if (sendTestNodeAvailabilityRequest(realm, client, mgmtUrl)) { result.addSuccessRequest(mgmtUrl); } else { result.addFailedRequest(mgmtUrl); } } return result; } protected boolean sendTestNodeAvailabilityRequest(RealmModel realm, ClientModel client, String managementUrl) { TestAvailabilityAction adminAction = new TestAvailabilityAction(TokenIdGenerator.generateId(), Time.currentTime() + 30, client.getClientId()); String token = new TokenManager().encodeToken(session, realm, adminAction); logger.debugv("testNodes availability resource: {0} url: {1}", client.getClientId(), managementUrl); URI target = UriBuilder.fromUri(managementUrl).path(AdapterConstants.K_TEST_AVAILABLE).build(); try { int status = session.getProvider(HttpClientProvider.class).postText(target.toString(), token); boolean success = status == 204 || status == 200; logger.debugf("testAvailability success for %s: %s", managementUrl, success); return success; } catch (IOException e) { ServicesLogger.LOGGER.availabilityTestFailed(managementUrl); return false; } } }
package architecture; import global.GlobalConstants; import java.util.ArrayList; import java.util.Collection; import java.util.Vector; import TRouter.RouteNodeData; public class RouteNode implements Comparable<RouteNode> { public final String name; public final short x; public final short y; public final short n; public final RouteNodeType type; public final short capacity; public final double baseCost; private Collection<RouteNode> children; public RouteNodeData routeNodeData; // private Set<RouteNode> parents; public boolean target; public RouteNode(String name, int capacity, int x, int y, int n, RouteNodeType t) { super(); if(GlobalConstants.routeNodesWithName) this.name = name; else this.name = null; this.x = (short)x; this.y = (short)y; this.n = (short)n; this.type = t; this.capacity = (short)capacity; this.baseCost = calculateBaseCost(); this.target = false; this.routeNodeData = null; children = new ArrayList<RouteNode>(); // parents = new HashSet<RouteNode>(); } public RouteNode(String name) { this(name, -1, -1, -1, -1, RouteNodeType.SOURCE); } public void resetDataInNode(){ this.target = false; } private double calculateBaseCost() { switch (type) { case SOURCE: case OPIN: case HCHAN: case VCHAN: return 1; case SINK: return 0; case IPIN: return 0.95; default: throw new RuntimeException(); } } @Override public String toString() { if(name == null) return type+"_"+x+"_"+y+"_"+n; return name; } private void addChild(RouteNode node) { if(!children.contains(node)) { children.add(node); } } public Collection<RouteNode> getChildren() { return children; } private void addParent(RouteNode node) { // parents.add(node); } // public Collection<RouteNode> getParents() { // return parents; // } public static void connect(Vector<RouteNode> vector, Vector<RouteNode> vector2) { if (vector!=null && vector2!=null) for (int i=0; i<vector.size();i++) { connect(vector.get(i),vector2.get(i)); } } //(a*x+b) mod ChannelWidth public static void connect(Vector<RouteNode> vector1, Vector<RouteNode> vector2, int a, int b) { if (vector1!=null && vector2!=null){ int channelWidth = vector1.size(); for (int i=0; i<channelWidth;i++) { connect(vector1.get(i),vector2.get(((a*i+b+2*channelWidth)%channelWidth)) ); } } } public static void connect(Vector<RouteNode> vector, Vector<RouteNode> vector2, int parity, int offset, int a, int b) { int j; if (vector != null && vector2 != null) for (int i = 0; i < vector.size(); i++) { if (i % 2 == parity) { j = ((((a * (i / 2) + b) % (vector.size() / 2) + (vector.size() / 2)) % (vector.size() / 2)) * 2)+ (i % 2)+ offset; //System.out.println("a is "+a+",b is "+b+", i is "+i+" en j is "+j); connect(vector.get(i), vector2.get(j)); } } } public static void connect(RouteNode node, Vector<RouteNode> channel) { for (RouteNode wire:channel) { connect(node, wire); } } static int ic2 = 0; static int nc2 = 0; public static void connect(RouteNode node, Vector<RouteNode> channel, double fcout) { for (RouteNode wire:channel) { if(ic2/(double)nc2 <= fcout) { connect(node, wire); ic2++; } nc2++; } } public static void connect(Vector<RouteNode> channel, RouteNode node) { for (RouteNode wire:channel) { connect(wire, node); } } static int ic1 = 0; static int nc1 = 0; public static void connect(Vector<RouteNode> channel, RouteNode node, double fcin) { for (RouteNode wire:channel) { if(ic1/(double)nc1 <= fcin) { connect(wire, node); ic1++; } nc1++; } } public static void connect(RouteNode parent, RouteNode child) { parent.addChild(child); child.addParent(parent); } public boolean isWire() { return type == RouteNodeType.HCHAN || type == RouteNodeType.VCHAN; } public void reduceMemoryUsage() { ((ArrayList<RouteNode>)children).trimToSize(); } @Override public int compareTo(RouteNode o) { int r = type.compareTo(o.type); if (this == o) return 0; else if (r < 0) return -1; else if (r > 0) return 1; else if(x < o.x) return -1; else if (x > o.x) return 1; else if (y < o.y) return -1; else if (y > o.y) return 1; else if (n < o.n) return -1; else if (n > o.n) return 1; else return Long.valueOf(this.hashCode()).compareTo(Long.valueOf(o.hashCode())); //throw new RuntimeException(); } }
/* ******************************************************************************* * Copyright (C) 2011-2012, International Business Machines * Corporation and others. All Rights Reserved. ******************************************************************************* * created on: 2011jan06 * created by: Markus W. Scherer * ported from ICU4C ucharstrie.h/.cpp */ package com.ibm.icu.util; import java.io.IOException; import java.util.ArrayList; import java.util.NoSuchElementException; import com.ibm.icu.text.UTF16; import com.ibm.icu.util.BytesTrie.Result; /** * Light-weight, non-const reader class for a CharsTrie. * Traverses a char-serialized data structure with minimal state, * for mapping strings (16-bit-unit sequences) to non-negative integer values. * * <p>This class is not intended for public subclassing. * * @stable ICU 4.8 * @author Markus W. Scherer */ public final class CharsTrie implements Cloneable, Iterable<CharsTrie.Entry> { /** * Constructs a CharsTrie reader instance. * * <p>The CharSequence must contain a copy of a char sequence from the CharsTrieBuilder, * with the offset indicating the first char of that sequence. * The CharsTrie object will not read more chars than * the CharsTrieBuilder generated in the corresponding build() call. * * <p>The CharSequence is not copied/cloned and must not be modified while * the CharsTrie object is in use. * * @param trieChars CharSequence that contains the serialized trie. * @param offset Root offset of the trie in the CharSequence. * @stable ICU 4.8 */ public CharsTrie(CharSequence trieChars, int offset) { chars_=trieChars; pos_=root_=offset; remainingMatchLength_=-1; } /** * Clones this trie reader object and its state, * but not the char array which will be shared. * @return A shallow clone of this trie. * @stable ICU 4.8 */ @Override public Object clone() throws CloneNotSupportedException { return super.clone(); // A shallow copy is just what we need. } /** * Resets this trie to its initial state. * @return this * @stable ICU 4.8 */ public CharsTrie reset() { pos_=root_; remainingMatchLength_=-1; return this; } /** * CharsTrie state object, for saving a trie's current state * and resetting the trie back to this state later. * @stable ICU 4.8 */ public static final class State { /** * Constructs an empty State. * @stable ICU 4.8 */ public State() {} private CharSequence chars; private int root; private int pos; private int remainingMatchLength; } /** * Saves the state of this trie. * @param state The State object to hold the trie's state. * @return this * @see #resetToState * @stable ICU 4.8 */ public CharsTrie saveState(State state) /*const*/ { state.chars=chars_; state.root=root_; state.pos=pos_; state.remainingMatchLength=remainingMatchLength_; return this; } /** * Resets this trie to the saved state. * @param state The State object which holds a saved trie state. * @return this * @throws IllegalArgumentException if the state object contains no state, * or the state of a different trie * @see #saveState * @see #reset * @stable ICU 4.8 */ public CharsTrie resetToState(State state) { if(chars_==state.chars && chars_!=null && root_==state.root) { pos_=state.pos; remainingMatchLength_=state.remainingMatchLength; } else { throw new IllegalArgumentException("incompatible trie state"); } return this; } /** * Determines whether the string so far matches, whether it has a value, * and whether another input char can continue a matching string. * @return The match/value Result. * @stable ICU 4.8 */ public Result current() /*const*/ { int pos=pos_; if(pos<0) { return Result.NO_MATCH; } else { int node; return (remainingMatchLength_<0 && (node=chars_.charAt(pos))>=kMinValueLead) ? valueResults_[node>>15] : Result.NO_VALUE; } } /** * Traverses the trie from the initial state for this input char. * Equivalent to reset().next(inUnit). * @param inUnit Input char value. Values below 0 and above 0xffff will never match. * @return The match/value Result. * @stable ICU 4.8 */ public Result first(int inUnit) { remainingMatchLength_=-1; return nextImpl(root_, inUnit); } /** * Traverses the trie from the initial state for the * one or two UTF-16 code units for this input code point. * Equivalent to reset().nextForCodePoint(cp). * @param cp A Unicode code point 0..0x10ffff. * @return The match/value Result. * @stable ICU 4.8 */ public Result firstForCodePoint(int cp) { return cp<=0xffff ? first(cp) : (first(UTF16.getLeadSurrogate(cp)).hasNext() ? next(UTF16.getTrailSurrogate(cp)) : Result.NO_MATCH); } /** * Traverses the trie from the current state for this input char. * @param inUnit Input char value. Values below 0 and above 0xffff will never match. * @return The match/value Result. * @stable ICU 4.8 */ public Result next(int inUnit) { int pos=pos_; if(pos<0) { return Result.NO_MATCH; } int length=remainingMatchLength_; // Actual remaining match length minus 1. if(length>=0) { // Remaining part of a linear-match node. if(inUnit==chars_.charAt(pos++)) { remainingMatchLength_=--length; pos_=pos; int node; return (length<0 && (node=chars_.charAt(pos))>=kMinValueLead) ? valueResults_[node>>15] : Result.NO_VALUE; } else { stop(); return Result.NO_MATCH; } } return nextImpl(pos, inUnit); } /** * Traverses the trie from the current state for the * one or two UTF-16 code units for this input code point. * @param cp A Unicode code point 0..0x10ffff. * @return The match/value Result. * @stable ICU 4.8 */ public Result nextForCodePoint(int cp) { return cp<=0xffff ? next(cp) : (next(UTF16.getLeadSurrogate(cp)).hasNext() ? next(UTF16.getTrailSurrogate(cp)) : Result.NO_MATCH); } /** * Traverses the trie from the current state for this string. * Equivalent to * <pre> * Result result=current(); * for(each c in s) * if(!result.hasNext()) return Result.NO_MATCH; * result=next(c); * return result; * </pre> * @param s Contains a string. * @param sIndex The start index of the string in s. * @param sLimit The (exclusive) end index of the string in s. * @return The match/value Result. * @stable ICU 4.8 */ public Result next(CharSequence s, int sIndex, int sLimit) { if(sIndex>=sLimit) { // Empty input. return current(); } int pos=pos_; if(pos<0) { return Result.NO_MATCH; } int length=remainingMatchLength_; // Actual remaining match length minus 1. for(;;) { // Fetch the next input unit, if there is one. // Continue a linear-match node. char inUnit; for(;;) { if(sIndex==sLimit) { remainingMatchLength_=length; pos_=pos; int node; return (length<0 && (node=chars_.charAt(pos))>=kMinValueLead) ? valueResults_[node>>15] : Result.NO_VALUE; } inUnit=s.charAt(sIndex++); if(length<0) { remainingMatchLength_=length; break; } if(inUnit!=chars_.charAt(pos)) { stop(); return Result.NO_MATCH; } ++pos; --length; } int node=chars_.charAt(pos++); for(;;) { if(node<kMinLinearMatch) { Result result=branchNext(pos, node, inUnit); if(result==Result.NO_MATCH) { return Result.NO_MATCH; } // Fetch the next input unit, if there is one. if(sIndex==sLimit) { return result; } if(result==Result.FINAL_VALUE) { // No further matching units. stop(); return Result.NO_MATCH; } inUnit=s.charAt(sIndex++); pos=pos_; // branchNext() advanced pos and wrote it to pos_ . node=chars_.charAt(pos++); } else if(node<kMinValueLead) { // Match length+1 units. length=node-kMinLinearMatch; // Actual match length minus 1. if(inUnit!=chars_.charAt(pos)) { stop(); return Result.NO_MATCH; } ++pos; --length; break; } else if((node&kValueIsFinal)!=0) { // No further matching units. stop(); return Result.NO_MATCH; } else { // Skip intermediate value. pos=skipNodeValue(pos, node); node&=kNodeTypeMask; } } } } /** * Returns a matching string's value if called immediately after * current()/first()/next() returned Result.INTERMEDIATE_VALUE or Result.FINAL_VALUE. * getValue() can be called multiple times. * * Do not call getValue() after Result.NO_MATCH or Result.NO_VALUE! * @return The value for the string so far. * @stable ICU 4.8 */ public int getValue() /*const*/ { int pos=pos_; int leadUnit=chars_.charAt(pos++); assert(leadUnit>=kMinValueLead); return (leadUnit&kValueIsFinal)!=0 ? readValue(chars_, pos, leadUnit&0x7fff) : readNodeValue(chars_, pos, leadUnit); } /** * Determines whether all strings reachable from the current state * map to the same value, and if so, returns that value. * @return The unique value in bits 32..1 with bit 0 set, * if all strings reachable from the current state * map to the same value; otherwise returns 0. * @stable ICU 4.8 */ public long getUniqueValue() /*const*/ { int pos=pos_; if(pos<0) { return 0; } // Skip the rest of a pending linear-match node. long uniqueValue=findUniqueValue(chars_, pos+remainingMatchLength_+1, 0); // Ignore internally used bits 63..33; extend the actual value's sign bit from bit 32. return (uniqueValue<<31)>>31; } /** * Finds each char which continues the string from the current state. * That is, each char c for which it would be next(c)!=Result.NO_MATCH now. * @param out Each next char is appended to this object. * (Only uses the out.append(c) method.) * @return The number of chars which continue the string from here. * @stable ICU 4.8 */ public int getNextChars(Appendable out) /*const*/ { int pos=pos_; if(pos<0) { return 0; } if(remainingMatchLength_>=0) { append(out, chars_.charAt(pos)); // Next unit of a pending linear-match node. return 1; } int node=chars_.charAt(pos++); if(node>=kMinValueLead) { if((node&kValueIsFinal)!=0) { return 0; } else { pos=skipNodeValue(pos, node); node&=kNodeTypeMask; } } if(node<kMinLinearMatch) { if(node==0) { node=chars_.charAt(pos++); } getNextBranchChars(chars_, pos, ++node, out); return node; } else { // First unit of the linear-match node. append(out, chars_.charAt(pos)); return 1; } } /** * Iterates from the current state of this trie. * @return A new CharsTrie.Iterator. * @stable ICU 4.8 */ public Iterator iterator() { return new Iterator(chars_, pos_, remainingMatchLength_, 0); } /** * Iterates from the current state of this trie. * @param maxStringLength If 0, the iterator returns full strings. * Otherwise, the iterator returns strings with this maximum length. * @return A new CharsTrie.Iterator. * @stable ICU 4.8 */ public Iterator iterator(int maxStringLength) { return new Iterator(chars_, pos_, remainingMatchLength_, maxStringLength); } /** * Iterates from the root of a char-serialized BytesTrie. * @param trieChars CharSequence that contains the serialized trie. * @param offset Root offset of the trie in the CharSequence. * @param maxStringLength If 0, the iterator returns full strings. * Otherwise, the iterator returns strings with this maximum length. * @return A new CharsTrie.Iterator. * @stable ICU 4.8 */ public static Iterator iterator(CharSequence trieChars, int offset, int maxStringLength) { return new Iterator(trieChars, offset, -1, maxStringLength); } /** * Return value type for the Iterator. * @stable ICU 4.8 */ public static final class Entry { /** * The string. * @stable ICU 4.8 */ public CharSequence chars; /** * The value associated with the string. * @stable ICU 4.8 */ public int value; private Entry() { } } /** * Iterator for all of the (string, value) pairs in a CharsTrie. * @stable ICU 4.8 */ public static final class Iterator implements java.util.Iterator<Entry> { private Iterator(CharSequence trieChars, int offset, int remainingMatchLength, int maxStringLength) { chars_=trieChars; pos_=initialPos_=offset; remainingMatchLength_=initialRemainingMatchLength_=remainingMatchLength; maxLength_=maxStringLength; int length=remainingMatchLength_; // Actual remaining match length minus 1. if(length>=0) { // Pending linear-match node, append remaining bytes to str_. ++length; if(maxLength_>0 && length>maxLength_) { length=maxLength_; // This will leave remainingMatchLength>=0 as a signal. } str_.append(chars_, pos_, pos_+length); pos_+=length; remainingMatchLength_-=length; } } /** * Resets this iterator to its initial state. * @return this * @stable ICU 4.8 */ public Iterator reset() { pos_=initialPos_; remainingMatchLength_=initialRemainingMatchLength_; skipValue_=false; int length=remainingMatchLength_+1; // Remaining match length. if(maxLength_>0 && length>maxLength_) { length=maxLength_; } str_.setLength(length); pos_+=length; remainingMatchLength_-=length; stack_.clear(); return this; } /** * @return true if there are more elements. * @stable ICU 4.8 */ public boolean hasNext() /*const*/ { return pos_>=0 || !stack_.isEmpty(); } /** * Finds the next (string, value) pair if there is one. * * If the string is truncated to the maximum length and does not * have a real value, then the value is set to -1. * In this case, this "not a real value" is indistinguishable from * a real value of -1. * @return An Entry with the string and value of the next element. * @throws NoSuchElementException - iteration has no more elements. * @stable ICU 4.8 */ public Entry next() { int pos=pos_; if(pos<0) { if(stack_.isEmpty()) { throw new NoSuchElementException(); } // Pop the state off the stack and continue with the next outbound edge of // the branch node. long top=stack_.remove(stack_.size()-1); int length=(int)top; pos=(int)(top>>32); str_.setLength(length&0xffff); length>>>=16; if(length>1) { pos=branchNext(pos, length); if(pos<0) { return entry_; // Reached a final value. } } else { str_.append(chars_.charAt(pos++)); } } if(remainingMatchLength_>=0) { // We only get here if we started in a pending linear-match node // with more than maxLength remaining units. return truncateAndStop(); } for(;;) { int node=chars_.charAt(pos++); if(node>=kMinValueLead) { if(skipValue_) { pos=skipNodeValue(pos, node); node&=kNodeTypeMask; skipValue_=false; } else { // Deliver value for the string so far. boolean isFinal=(node&kValueIsFinal)!=0; if(isFinal) { entry_.value=readValue(chars_, pos, node&0x7fff); } else { entry_.value=readNodeValue(chars_, pos, node); } if(isFinal || (maxLength_>0 && str_.length()==maxLength_)) { pos_=-1; } else { // We cannot skip the value right here because it shares its // lead unit with a match node which we have to evaluate // next time. // Instead, keep pos_ on the node lead unit itself. pos_=pos-1; skipValue_=true; } entry_.chars=str_; return entry_; } } if(maxLength_>0 && str_.length()==maxLength_) { return truncateAndStop(); } if(node<kMinLinearMatch) { if(node==0) { node=chars_.charAt(pos++); } pos=branchNext(pos, node+1); if(pos<0) { return entry_; // Reached a final value. } } else { // Linear-match node, append length units to str_. int length=node-kMinLinearMatch+1; if(maxLength_>0 && str_.length()+length>maxLength_) { str_.append(chars_, pos, pos+maxLength_-str_.length()); return truncateAndStop(); } str_.append(chars_, pos, pos+length); pos+=length; } } } /** * Iterator.remove() is not supported. * @throws UnsupportedOperationException (always) * @stable ICU 4.8 */ public void remove() { throw new UnsupportedOperationException(); } private Entry truncateAndStop() { pos_=-1; // We reset entry_.chars every time we return entry_ // just because the caller might have modified the Entry. entry_.chars=str_; entry_.value=-1; // no real value for str return entry_; } private int branchNext(int pos, int length) { while(length>kMaxBranchLinearSubNodeLength) { ++pos; // ignore the comparison unit // Push state for the greater-or-equal edge. stack_.add(((long)skipDelta(chars_, pos)<<32)|((length-(length>>1))<<16)|str_.length()); // Follow the less-than edge. length>>=1; pos=jumpByDelta(chars_, pos); } // List of key-value pairs where values are either final values or jump deltas. // Read the first (key, value) pair. char trieUnit=chars_.charAt(pos++); int node=chars_.charAt(pos++); boolean isFinal=(node&kValueIsFinal)!=0; int value=readValue(chars_, pos, node&=0x7fff); pos=skipValue(pos, node); stack_.add(((long)pos<<32)|((length-1)<<16)|str_.length()); str_.append(trieUnit); if(isFinal) { pos_=-1; entry_.chars=str_; entry_.value=value; return -1; } else { return pos+value; } } private CharSequence chars_; private int pos_; private int initialPos_; private int remainingMatchLength_; private int initialRemainingMatchLength_; private boolean skipValue_; // Skip intermediate value which was already delivered. private StringBuilder str_=new StringBuilder(); private int maxLength_; private Entry entry_=new Entry(); // The stack stores longs for backtracking to another // outbound edge of a branch node. // Each long has the offset in chars_ in bits 62..32, // the str_.length() from before the node in bits 15..0, // and the remaining branch length in bits 31..16. // (We could store the remaining branch length minus 1 in bits 30..16 and not use bit 31, // but the code looks more confusing that way.) private ArrayList<Long> stack_=new ArrayList<Long>(); } private void stop() { pos_=-1; } // Reads a compact 32-bit integer. // pos is already after the leadUnit, and the lead unit has bit 15 reset. private static int readValue(CharSequence chars, int pos, int leadUnit) { int value; if(leadUnit<kMinTwoUnitValueLead) { value=leadUnit; } else if(leadUnit<kThreeUnitValueLead) { value=((leadUnit-kMinTwoUnitValueLead)<<16)|chars.charAt(pos); } else { value=(chars.charAt(pos)<<16)|chars.charAt(pos+1); } return value; } private static int skipValue(int pos, int leadUnit) { if(leadUnit>=kMinTwoUnitValueLead) { if(leadUnit<kThreeUnitValueLead) { ++pos; } else { pos+=2; } } return pos; } private static int skipValue(CharSequence chars, int pos) { int leadUnit=chars.charAt(pos++); return skipValue(pos, leadUnit&0x7fff); } private static int readNodeValue(CharSequence chars, int pos, int leadUnit) { assert(kMinValueLead<=leadUnit && leadUnit<kValueIsFinal); int value; if(leadUnit<kMinTwoUnitNodeValueLead) { value=(leadUnit>>6)-1; } else if(leadUnit<kThreeUnitNodeValueLead) { value=(((leadUnit&0x7fc0)-kMinTwoUnitNodeValueLead)<<10)|chars.charAt(pos); } else { value=(chars.charAt(pos)<<16)|chars.charAt(pos+1); } return value; } private static int skipNodeValue(int pos, int leadUnit) { assert(kMinValueLead<=leadUnit && leadUnit<kValueIsFinal); if(leadUnit>=kMinTwoUnitNodeValueLead) { if(leadUnit<kThreeUnitNodeValueLead) { ++pos; } else { pos+=2; } } return pos; } private static int jumpByDelta(CharSequence chars, int pos) { int delta=chars.charAt(pos++); if(delta>=kMinTwoUnitDeltaLead) { if(delta==kThreeUnitDeltaLead) { delta=(chars.charAt(pos)<<16)|chars.charAt(pos+1); pos+=2; } else { delta=((delta-kMinTwoUnitDeltaLead)<<16)|chars.charAt(pos++); } } return pos+delta; } private static int skipDelta(CharSequence chars, int pos) { int delta=chars.charAt(pos++); if(delta>=kMinTwoUnitDeltaLead) { if(delta==kThreeUnitDeltaLead) { pos+=2; } else { ++pos; } } return pos; } private static Result[] valueResults_={ Result.INTERMEDIATE_VALUE, Result.FINAL_VALUE }; // Handles a branch node for both next(unit) and next(string). private Result branchNext(int pos, int length, int inUnit) { // Branch according to the current unit. if(length==0) { length=chars_.charAt(pos++); } ++length; // The length of the branch is the number of units to select from. // The data structure encodes a binary search. while(length>kMaxBranchLinearSubNodeLength) { if(inUnit<chars_.charAt(pos++)) { length>>=1; pos=jumpByDelta(chars_, pos); } else { length=length-(length>>1); pos=skipDelta(chars_, pos); } } // Drop down to linear search for the last few units. // length>=2 because the loop body above sees length>kMaxBranchLinearSubNodeLength>=3 // and divides length by 2. do { if(inUnit==chars_.charAt(pos++)) { Result result; int node=chars_.charAt(pos); if((node&kValueIsFinal)!=0) { // Leave the final value for getValue() to read. result=Result.FINAL_VALUE; } else { // Use the non-final value as the jump delta. ++pos; // int delta=readValue(pos, node); int delta; if(node<kMinTwoUnitValueLead) { delta=node; } else if(node<kThreeUnitValueLead) { delta=((node-kMinTwoUnitValueLead)<<16)|chars_.charAt(pos++); } else { delta=(chars_.charAt(pos)<<16)|chars_.charAt(pos+1); pos+=2; } // end readValue() pos+=delta; node=chars_.charAt(pos); result= node>=kMinValueLead ? valueResults_[node>>15] : Result.NO_VALUE; } pos_=pos; return result; } --length; pos=skipValue(chars_, pos); } while(length>1); if(inUnit==chars_.charAt(pos++)) { pos_=pos; int node=chars_.charAt(pos); return node>=kMinValueLead ? valueResults_[node>>15] : Result.NO_VALUE; } else { stop(); return Result.NO_MATCH; } } // Requires remainingLength_<0. private Result nextImpl(int pos, int inUnit) { int node=chars_.charAt(pos++); for(;;) { if(node<kMinLinearMatch) { return branchNext(pos, node, inUnit); } else if(node<kMinValueLead) { // Match the first of length+1 units. int length=node-kMinLinearMatch; // Actual match length minus 1. if(inUnit==chars_.charAt(pos++)) { remainingMatchLength_=--length; pos_=pos; return (length<0 && (node=chars_.charAt(pos))>=kMinValueLead) ? valueResults_[node>>15] : Result.NO_VALUE; } else { // No match. break; } } else if((node&kValueIsFinal)!=0) { // No further matching units. break; } else { // Skip intermediate value. pos=skipNodeValue(pos, node); node&=kNodeTypeMask; } } stop(); return Result.NO_MATCH; } // Helper functions for getUniqueValue(). // Recursively finds a unique value (or whether there is not a unique one) // from a branch. // uniqueValue: On input, same as for getUniqueValue()/findUniqueValue(). // On return, if not 0, then bits 63..33 contain the updated non-negative pos. private static long findUniqueValueFromBranch(CharSequence chars, int pos, int length, long uniqueValue) { while(length>kMaxBranchLinearSubNodeLength) { ++pos; // ignore the comparison unit uniqueValue=findUniqueValueFromBranch(chars, jumpByDelta(chars, pos), length>>1, uniqueValue); if(uniqueValue==0) { return 0; } length=length-(length>>1); pos=skipDelta(chars, pos); } do { ++pos; // ignore a comparison unit // handle its value int node=chars.charAt(pos++); boolean isFinal=(node&kValueIsFinal)!=0; node&=0x7fff; int value=readValue(chars, pos, node); pos=skipValue(pos, node); if(isFinal) { if(uniqueValue!=0) { if(value!=(int)(uniqueValue>>1)) { return 0; } } else { uniqueValue=((long)value<<1)|1; } } else { uniqueValue=findUniqueValue(chars, pos+value, uniqueValue); if(uniqueValue==0) { return 0; } } } while(--length>1); // ignore the last comparison byte return ((long)(pos+1)<<33)|(uniqueValue&0x1ffffffffL); } // Recursively finds a unique value (or whether there is not a unique one) // starting from a position on a node lead unit. // uniqueValue: If there is one, then bits 32..1 contain the value and bit 0 is set. // Otherwise, uniqueValue is 0. Bits 63..33 are ignored. private static long findUniqueValue(CharSequence chars, int pos, long uniqueValue) { int node=chars.charAt(pos++); for(;;) { if(node<kMinLinearMatch) { if(node==0) { node=chars.charAt(pos++); } uniqueValue=findUniqueValueFromBranch(chars, pos, node+1, uniqueValue); if(uniqueValue==0) { return 0; } pos=(int)(uniqueValue>>>33); node=chars.charAt(pos++); } else if(node<kMinValueLead) { // linear-match node pos+=node-kMinLinearMatch+1; // Ignore the match units. node=chars.charAt(pos++); } else { boolean isFinal=(node&kValueIsFinal)!=0; int value; if(isFinal) { value=readValue(chars, pos, node&0x7fff); } else { value=readNodeValue(chars, pos, node); } if(uniqueValue!=0) { if(value!=(int)(uniqueValue>>1)) { return 0; } } else { uniqueValue=((long)value<<1)|1; } if(isFinal) { return uniqueValue; } pos=skipNodeValue(pos, node); node&=kNodeTypeMask; } } } // Helper functions for getNextChars(). // getNextChars() when pos is on a branch node. private static void getNextBranchChars(CharSequence chars, int pos, int length, Appendable out) { while(length>kMaxBranchLinearSubNodeLength) { ++pos; // ignore the comparison unit getNextBranchChars(chars, jumpByDelta(chars, pos), length>>1, out); length=length-(length>>1); pos=skipDelta(chars, pos); } do { append(out, chars.charAt(pos++)); pos=skipValue(chars, pos); } while(--length>1); append(out, chars.charAt(pos)); } private static void append(Appendable out, int c) { try { out.append((char)c); } catch(IOException e) { throw new RuntimeException(e); } } // CharsTrie data structure // // The trie consists of a series of char-serialized nodes for incremental // Unicode string/char sequence matching. (char=16-bit unsigned integer) // The root node is at the beginning of the trie data. // // Types of nodes are distinguished by their node lead unit ranges. // After each node, except a final-value node, another node follows to // encode match values or continue matching further units. // // Node types: // - Final-value node: Stores a 32-bit integer in a compact, variable-length format. // The value is for the string/char sequence so far. // - Match node, optionally with an intermediate value in a different compact format. // The value, if present, is for the string/char sequence so far. // // Aside from the value, which uses the node lead unit's high bits: // // - Linear-match node: Matches a number of units. // - Branch node: Branches to other nodes according to the current input unit. // The node unit is the length of the branch (number of units to select from) // minus 1. It is followed by a sub-node: // - If the length is at most kMaxBranchLinearSubNodeLength, then // there are length-1 (key, value) pairs and then one more comparison unit. // If one of the key units matches, then the value is either a final value for // the string so far, or a "jump" delta to the next node. // If the last unit matches, then matching continues with the next node. // (Values have the same encoding as final-value nodes.) // - If the length is greater than kMaxBranchLinearSubNodeLength, then // there is one unit and one "jump" delta. // If the input unit is less than the sub-node unit, then "jump" by delta to // the next sub-node which will have a length of length/2. // (The delta has its own compact encoding.) // Otherwise, skip the "jump" delta to the next sub-node // which will have a length of length-length/2. // Match-node lead unit values, after masking off intermediate-value bits: // 0000..002f: Branch node. If node!=0 then the length is node+1, otherwise // the length is one more than the next unit. // For a branch sub-node with at most this many entries, we drop down // to a linear search. /*package*/ static final int kMaxBranchLinearSubNodeLength=5; // 0030..003f: Linear-match node, match 1..16 units and continue reading the next node. /*package*/ static final int kMinLinearMatch=0x30; /*package*/ static final int kMaxLinearMatchLength=0x10; // Match-node lead unit bits 14..6 for the optional intermediate value. // If these bits are 0, then there is no intermediate value. // Otherwise, see the *NodeValue* constants below. /*package*/ static final int kMinValueLead=kMinLinearMatch+kMaxLinearMatchLength; // 0x0040 /*package*/ static final int kNodeTypeMask=kMinValueLead-1; // 0x003f // A final-value node has bit 15 set. /*package*/ static final int kValueIsFinal=0x8000; // Compact value: After testing and masking off bit 15, use the following thresholds. /*package*/ static final int kMaxOneUnitValue=0x3fff; /*package*/ static final int kMinTwoUnitValueLead=kMaxOneUnitValue+1; // 0x4000 /*package*/ static final int kThreeUnitValueLead=0x7fff; /*package*/ static final int kMaxTwoUnitValue=((kThreeUnitValueLead-kMinTwoUnitValueLead)<<16)-1; // 0x3ffeffff // Compact intermediate-value integer, lead unit shared with a branch or linear-match node. /*package*/ static final int kMaxOneUnitNodeValue=0xff; /*package*/ static final int kMinTwoUnitNodeValueLead=kMinValueLead+((kMaxOneUnitNodeValue+1)<<6); // 0x4040 /*package*/ static final int kThreeUnitNodeValueLead=0x7fc0; /*package*/ static final int kMaxTwoUnitNodeValue= ((kThreeUnitNodeValueLead-kMinTwoUnitNodeValueLead)<<10)-1; // 0xfdffff // Compact delta integers. /*package*/ static final int kMaxOneUnitDelta=0xfbff; /*package*/ static final int kMinTwoUnitDeltaLead=kMaxOneUnitDelta+1; // 0xfc00 /*package*/ static final int kThreeUnitDeltaLead=0xffff; /*package*/ static final int kMaxTwoUnitDelta=((kThreeUnitDeltaLead-kMinTwoUnitDeltaLead)<<16)-1; // 0x03feffff // Fixed value referencing the CharsTrie words. private CharSequence chars_; private int root_; // Iterator variables. // Pointer to next trie unit to read. NULL if no more matches. private int pos_; // Remaining length of a linear-match node, minus 1. Negative if not in such a node. private int remainingMatchLength_; }
/* * Copyright 2013 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.rendering.assets.mesh; import com.google.common.base.Charsets; import com.google.common.collect.Lists; import gnu.trove.list.TFloatList; import gnu.trove.list.TIntList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terasology.asset.AssetLoader; import org.terasology.math.geom.Vector2f; import org.terasology.math.geom.Vector3f; import org.terasology.math.geom.Vector3i; import org.terasology.module.Module; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.util.List; /** * Importer for Wavefront obj files. Supports core obj mesh data * * @author Immortius */ public class ObjMeshLoader implements AssetLoader<MeshData> { private static final Logger logger = LoggerFactory.getLogger(ObjMeshLoader.class); @Override public MeshData load(Module module, InputStream stream, List<URL> urls, List<URL> deltas) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(stream, Charsets.UTF_8)); List<Vector3f> rawVertices = Lists.newArrayList(); List<Vector3f> rawNormals = Lists.newArrayList(); List<Vector2f> rawTexCoords = Lists.newArrayList(); List<Vector3i[]> rawIndices = Lists.newArrayList(); // Gather data readMeshData(reader, rawVertices, rawNormals, rawTexCoords, rawIndices); // Determine face format; if (rawIndices.size() == 0) { throw new IOException("No index data"); } MeshData data = processData(rawVertices, rawNormals, rawTexCoords, rawIndices); if (data.getVertices() == null) { throw new IOException("No vertices define"); } //if (data.getNormals() == null || data.getNormals().size() != data.getVertices().size()) { // throw new IOException("The number of normals does not match the number of vertices."); //} if (data.getTexCoord0() == null || data.getTexCoord0().size() / 2 != data.getVertices().size() / 3) { throw new IOException("The number of tex coords does not match the number of vertices."); } return data; } private MeshData processData(List<Vector3f> rawVertices, List<Vector3f> rawNormals, List<Vector2f> rawTexCoords, List<Vector3i[]> rawIndices) throws IOException { MeshData result = new MeshData(); TFloatList vertices = result.getVertices(); TFloatList texCoord0 = result.getTexCoord0(); TFloatList normals = result.getNormals(); TIntList indices = result.getIndices(); int vertCount = 0; for (Vector3i[] face : rawIndices) { for (Vector3i indexSet : face) { if (indexSet.x > rawVertices.size()) { throw new IOException("Vertex index out of range: " + indexSet.x); } Vector3f vertex = rawVertices.get(indexSet.x - 1); vertices.add(vertex.x); vertices.add(vertex.y); vertices.add(vertex.z); if (indexSet.y != -1) { if (indexSet.y > rawTexCoords.size()) { throw new IOException("TexCoord index out of range: " + indexSet.y); } Vector2f texCoord = rawTexCoords.get(indexSet.y - 1); texCoord0.add(texCoord.x); texCoord0.add(1 - texCoord.y); } if (indexSet.z != -1) { if (indexSet.z > rawNormals.size()) { throw new IOException("Normal index out of range: " + indexSet.z); } Vector3f normal = rawNormals.get(indexSet.z - 1); normals.add(normal.x); normals.add(normal.y); normals.add(normal.z); } } for (int i = 0; i < face.length - 2; ++i) { indices.add(vertCount); indices.add(vertCount + i + 1); indices.add(vertCount + i + 2); } vertCount += face.length; } return result; } private void readMeshData(BufferedReader reader, List<Vector3f> rawVertices, List<Vector3f> rawNormals, List<Vector2f> rawTexCoords, List<Vector3i[]> rawIndices) throws IOException { String line = null; int lineNum = 0; try { while ((line = reader.readLine()) != null) { line = line.trim(); lineNum++; if (line.isEmpty()) { continue; } String[] prefixSplit = line.trim().split("\\s+", 2); String prefix = prefixSplit[0]; // Comment if ("#".equals(prefix)) { continue; } if (prefixSplit.length < 2) { throw new IOException(String.format("Incomplete statement")); } switch (prefix) { // Object name case "o": // Just skip the name break; // Vertex position case "v": { String[] floats = prefixSplit[1].trim().split("\\s+", 4); if (floats.length != 3) { throw new IOException("Bad statement"); } rawVertices.add(new Vector3f(Float.parseFloat(floats[0]), Float.parseFloat(floats[1]), Float.parseFloat(floats[2]))); break; } // Vertex texture coords case "vt": { String[] floats = prefixSplit[1].trim().split("\\s+", 4); if (floats.length < 2 || floats.length > 3) { throw new IOException("Bad statement"); } // Need to flip v coord, apparently rawTexCoords.add(new Vector2f(Float.parseFloat(floats[0]), Float.parseFloat(floats[1]))); break; } // Vertex normal case "vn": { String[] floats = prefixSplit[1].trim().split("\\s+", 4); if (floats.length != 3) { throw new IOException("Bad statement"); } rawNormals.add(new Vector3f(Float.parseFloat(floats[0]), Float.parseFloat(floats[1]), Float.parseFloat(floats[2]))); break; } // Material name (ignored) case "usemtl": break; // Smoothing group (not supported) case "s": { if (!"off".equals(prefixSplit[1]) && !"0".equals(prefixSplit[1])) { logger.warn("Smoothing groups not supported in obj import yet"); } break; } // Face (polygon) case "f": { String[] elements = prefixSplit[1].trim().split("\\s+"); Vector3i[] result = new Vector3i[elements.length]; for (int i = 0; i < elements.length; ++i) { String[] parts = elements[i].split("/", 4); if (parts.length > 3) { throw new IOException("Bad Statement"); } result[i] = new Vector3i(Integer.parseInt(parts[0]), -1, -1); if (parts.length > 1 && !parts[1].isEmpty()) { result[i].y = Integer.parseInt(parts[1]); } if (parts.length > 2 && !parts[2].isEmpty()) { result[i].z = Integer.parseInt(parts[2]); } } rawIndices.add(result); break; } default: logger.warn("Skipping unsupported obj statement on line {}:\"{}\"", lineNum, line); } } } catch (RuntimeException e) { throw new IOException(String.format("Failed to process line %d:\"%s\"", lineNum, line), e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.application.jsp; import org.apache.myfaces.shared.view.ViewResponseWrapper; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponseWrapper; import java.io.ByteArrayOutputStream; import java.io.CharArrayWriter; import java.io.IOException; import java.io.PrintWriter; import java.io.Writer; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; /** * @author Bruno Aranda (latest modification by $Author: struberg $) * @version $Revision: 1188643 $ $Date: 2011-10-25 08:13:09 -0500 (Tue, 25 Oct 2011) $ */ public class ServletViewResponseWrapper extends HttpServletResponseWrapper implements ViewResponseWrapper { private PrintWriter _writer; private CharArrayWriter _charArrayWriter; private int _status = HttpServletResponse.SC_OK; private WrappedServletOutputStream _byteArrayWriter; public ServletViewResponseWrapper(HttpServletResponse httpServletResponse) { super(httpServletResponse); } @Override public void sendError(int status) throws IOException { super.sendError(status); _status = status; } @Override public void sendError(int status, String errorMessage) throws IOException { super.sendError(status, errorMessage); _status = status; } @Override public void setStatus(int status) { super.setStatus(status); _status = status; } @Override public void setStatus(int status, String errorMessage) { super.setStatus(status, errorMessage); _status = status; } public int getStatus() { return _status; } public void flushToWrappedResponse() throws IOException { if (_charArrayWriter != null) { _charArrayWriter.writeTo(getResponse().getWriter()); _charArrayWriter.reset(); _writer.flush(); } else if (_byteArrayWriter != null) { // MYFACES-1955 cannot call getWriter() after getOutputStream() // _byteArrayWriter is not null only if getOutputStream() was called // before. This method is called from f:view to flush data before tag // start, or if an error page is flushed after dispatch. // A resource inside /faces/* (see MYFACES-1815) is handled on flushToWriter. // If response.getOuputStream() was called before, an IllegalStateException // is raised on response.getWriter(), so we should try through stream. try { _byteArrayWriter.writeTo(getResponse().getWriter(), getResponse().getCharacterEncoding()); } catch (IllegalStateException e) { getResponse().getOutputStream().write(_byteArrayWriter.toByteArray()); } _byteArrayWriter.reset(); _byteArrayWriter.flush(); } } public void flushToWriter(Writer writer,String encoding) throws IOException { if (_charArrayWriter != null) { _charArrayWriter.writeTo(writer); _charArrayWriter.reset(); _writer.flush(); } else if (_byteArrayWriter != null) { _byteArrayWriter.writeTo(writer,encoding); _byteArrayWriter.reset(); _byteArrayWriter.flush(); } writer.flush(); } @Override public ServletOutputStream getOutputStream() throws IOException { if (_charArrayWriter != null) { throw new IllegalStateException(); } if (_byteArrayWriter == null) { _byteArrayWriter = new WrappedServletOutputStream(); } return _byteArrayWriter; } @Override public PrintWriter getWriter() throws IOException { if (_byteArrayWriter != null) { throw new IllegalStateException(); } if (_writer == null) { _charArrayWriter = new CharArrayWriter(4096); _writer = new PrintWriter(_charArrayWriter); } return _writer; } @Override public void reset() { if (_charArrayWriter != null) { _charArrayWriter.reset(); } } @Override public String toString() { if (_charArrayWriter != null) { return _charArrayWriter.toString(); } return null; } static class WrappedServletOutputStream extends ServletOutputStream { private WrappedByteArrayOutputStream _byteArrayOutputStream; public WrappedServletOutputStream() { _byteArrayOutputStream = new WrappedByteArrayOutputStream(1024); } @Override public void write(int i) throws IOException { _byteArrayOutputStream.write(i); } public byte[] toByteArray() { return _byteArrayOutputStream.toByteArray(); } /** * Write the data of this stream to the writer, using * the charset encoding supplied or if null the default charset. * * @param out * @param encoding * @throws IOException */ private void writeTo(Writer out, String encoding) throws IOException { //Get the charset based on the encoding or return the default if //encoding == null Charset charset = (encoding == null) ? Charset.defaultCharset() : Charset.forName(encoding); CharsetDecoder decoder = charset.newDecoder(); CharBuffer decodedBuffer = decoder.decode( ByteBuffer.wrap(_byteArrayOutputStream.getInnerArray(), 0,_byteArrayOutputStream.getInnerCount())); if (decodedBuffer.hasArray()) { out.write(decodedBuffer.array()); } } public void reset() { _byteArrayOutputStream.reset(); } /** * This Wrapper is used to provide additional methods to * get the buf and count variables, to use it to decode * in WrappedServletOutputStream.writeTo and avoid buffer * duplication. */ static class WrappedByteArrayOutputStream extends ByteArrayOutputStream { public WrappedByteArrayOutputStream() { super(); } public WrappedByteArrayOutputStream(int size) { super(size); } private byte[] getInnerArray() { return buf; } private int getInnerCount() { return count; } } } }
/** * Copyright 2014 Confluent Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.confluent.kafka.schemaregistry.storage; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.PartitionInfo; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.errors.RecordTooLargeException; import org.apache.kafka.common.errors.WakeupException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import java.util.Properties; import java.util.Arrays; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import io.confluent.kafka.schemaregistry.storage.exceptions.SerializationException; import io.confluent.kafka.schemaregistry.storage.exceptions.StoreException; import io.confluent.kafka.schemaregistry.storage.exceptions.StoreTimeoutException; import io.confluent.kafka.schemaregistry.storage.serialization.Serializer; import kafka.utils.ShutdownableThread; /** * Thread that reads schema registry state from the Kafka compacted topic and modifies * the local store to be consistent. * * On startup, this thread will always read from the beginning of the topic. We assume * the topic will always be small, hence the startup time to read the topic won't take * too long. Because the topic is always read from the beginning, the consumer never * commits offsets. */ public class KafkaStoreReaderThread<K, V> extends ShutdownableThread { private static final Logger log = LoggerFactory.getLogger(KafkaStoreReaderThread.class); private final String topic; private final TopicPartition topicPartition; private final String groupId; private final StoreUpdateHandler<K, V> storeUpdateHandler; private final Serializer<K, V> serializer; private final Store<K, V> localStore; private final ReentrantLock offsetUpdateLock; private final Condition offsetReachedThreshold; private Consumer<byte[], byte[]> consumer; private long offsetInSchemasTopic = -1L; // Noop key is only used to help reliably determine last offset; reader thread ignores // messages with this key private final K noopKey; public KafkaStoreReaderThread(String bootstrapBrokers, String topic, String groupId, StoreUpdateHandler<K, V> storeUpdateHandler, Serializer<K, V> serializer, Store<K, V> localStore, K noopKey) { super("kafka-store-reader-thread-" + topic, false); // this thread is not interruptible offsetUpdateLock = new ReentrantLock(); offsetReachedThreshold = offsetUpdateLock.newCondition(); this.topic = topic; this.groupId = groupId; this.storeUpdateHandler = storeUpdateHandler; this.serializer = serializer; this.localStore = localStore; this.noopKey = noopKey; Properties consumerProps = new Properties(); consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, this.groupId); consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, "KafkaStore-reader-" + this.topic); consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapBrokers); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.ByteArrayDeserializer.class); consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.ByteArrayDeserializer.class); this.consumer = new KafkaConsumer<>(consumerProps); List<PartitionInfo> partitions = this.consumer.partitionsFor(this.topic); if (partitions == null || partitions.size() < 1) { throw new IllegalArgumentException("Unable to subscribe to the Kafka topic " + topic + " backing this data store. Topic may not exist."); } else if (partitions.size() > 1) { throw new IllegalStateException("Unexpected number of partitions in the " + topic + " topic. Expected 1 and instead got " + partitions.size()); } this.topicPartition = new TopicPartition(topic, 0); this.consumer.assign(Arrays.asList(this.topicPartition)); this.consumer.seekToBeginning(Arrays.asList(this.topicPartition)); log.info("Initialized last consumed offset to " + offsetInSchemasTopic); log.debug("Kafka store reader thread started with consumer properties " + consumerProps.toString()); } @Override public void doWork() { try { ConsumerRecords<byte[], byte[]> records = consumer.poll(Long.MAX_VALUE); for (ConsumerRecord<byte[], byte[]> record : records) { K messageKey = null; try { messageKey = this.serializer.deserializeKey(record.key()); } catch (SerializationException e) { log.error("Failed to deserialize the schema or config key", e); continue; } if (messageKey.equals(noopKey)) { // If it's a noop, update local offset counter and do nothing else try { offsetUpdateLock.lock(); offsetInSchemasTopic = record.offset(); offsetReachedThreshold.signalAll(); } finally { offsetUpdateLock.unlock(); } } else { V message = null; try { message = record.value() == null ? null : serializer.deserializeValue(messageKey, record.value()); } catch (SerializationException e) { log.error("Failed to deserialize a schema or config update", e); continue; } try { log.trace("Applying update (" + messageKey + "," + message + ") to the local " + "store"); if (message == null) { localStore.delete(messageKey); } else { localStore.put(messageKey, message); } this.storeUpdateHandler.handleUpdate(messageKey, message); try { offsetUpdateLock.lock(); offsetInSchemasTopic = record.offset(); offsetReachedThreshold.signalAll(); } finally { offsetUpdateLock.unlock(); } } catch (StoreException se) { log.error("Failed to add record from the Kafka topic" + topic + " the local store"); } } } } catch (WakeupException we) { // do nothing because the thread is closing -- see shutdown() } catch (RecordTooLargeException rtle) { throw new IllegalStateException( "Consumer threw RecordTooLargeException. A schema has been written that " + "exceeds the default maximum fetch size.", rtle); } catch (RuntimeException e) { log.error("KafkaStoreReader thread has died for an unknown reason."); throw new RuntimeException(e); } } @Override public void shutdown() { log.debug("Starting shutdown of KafkaStoreReaderThread."); super.initiateShutdown(); if (consumer != null) { consumer.wakeup(); } if (localStore != null) { localStore.close(); } super.awaitShutdown(); consumer.close(); log.info("KafkaStoreReaderThread shutdown complete."); } public void waitUntilOffset(long offset, long timeout, TimeUnit timeUnit) throws StoreException { if (offset < 0) { throw new StoreException("KafkaStoreReaderThread can't wait for a negative offset."); } log.trace("Waiting to read offset {}. Currently at offset {}", offset, offsetInSchemasTopic); try { offsetUpdateLock.lock(); long timeoutNs = TimeUnit.NANOSECONDS.convert(timeout, timeUnit); while ((offsetInSchemasTopic < offset) && (timeoutNs > 0)) { try { timeoutNs = offsetReachedThreshold.awaitNanos(timeoutNs); } catch (InterruptedException e) { log.debug("Interrupted while waiting for the background store reader thread to reach" + " the specified offset: " + offset, e); } } } finally { offsetUpdateLock.unlock(); } if (offsetInSchemasTopic < offset) { throw new StoreTimeoutException( "KafkaStoreReaderThread failed to reach target offset within the timeout interval. " + "targetOffset: " + offset + ", offsetReached: " + offsetInSchemasTopic + ", timeout(ms): " + TimeUnit.MILLISECONDS.convert(timeout, timeUnit)); } } }
/* Author: Miguel Calejo Contact: info@interprolog.com, www.interprolog.com Copyright InterProlog Consulting / Renting Point Lda, Portugal 2014 Use and distribution, without any warranties, under the terms of the Apache License, as per http://www.apache.org/licenses/LICENSE-2.0.html */ package com.declarativa.interprolog.gui; import java.awt.BorderLayout; import java.awt.Component; import java.awt.Container; import java.awt.Cursor; import java.awt.Event; import java.awt.FileDialog; import java.awt.Font; import java.awt.Rectangle; import java.awt.Toolkit; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.Transferable; import java.awt.dnd.DropTarget; import java.awt.dnd.DropTargetDragEvent; import java.awt.dnd.DropTargetDropEvent; import java.awt.dnd.DropTargetEvent; import java.awt.dnd.DropTargetListener; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.WindowEvent; import java.awt.event.WindowListener; import java.io.File; import java.util.HashMap; import java.util.Map; import java.util.Vector; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.JCheckBoxMenuItem; import javax.swing.JFrame; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPopupMenu; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTextArea; import javax.swing.KeyStroke; import javax.swing.SwingUtilities; import javax.swing.event.UndoableEditEvent; import javax.swing.text.AttributeSet; import javax.swing.text.BadLocationException; import javax.swing.text.DefaultEditorKit; import javax.swing.text.JTextComponent; import javax.swing.undo.UndoManager; import com.declarativa.interprolog.AbstractPrologEngine; import com.declarativa.interprolog.ObjectExamplePair; import com.declarativa.interprolog.PrologEngine; import com.declarativa.interprolog.XSBPeer; import com.declarativa.interprolog.util.IPException; /** A simple Prolog listener, with a consult menu and an history mechanism. This should be sub-classed, in order to define sendToProlog()*/ @SuppressWarnings("serial") public abstract class ListenerWindow extends JFrame implements WindowListener{ public JTextArea prologInput; public StyledOutputPane prologOutput; protected JMenu historyMenu, fileMenu; protected CommandHistory commandHistory; // may encompass the previous someday Vector<LoadedFile> loadedFiles; protected static int topLevelCount = 0; public AbstractPrologEngine engine = null; protected boolean mayExitApp; protected JSplitPane splitPane; private static Map<Component,Cursor> previousCursors = new HashMap<Component,Cursor>(); protected InterruptAction interruptAction; protected void initializeOutputStyles() {} public String attributesToTooltip(AttributeSet attributes) { return null; } public ListenerWindow(AbstractPrologEngine e){ this(e,true,true); } public ListenerWindow(AbstractPrologEngine e, boolean autoDisplay, boolean mayExitApp){ this(e,autoDisplay,mayExitApp,null); } public ListenerWindow(AbstractPrologEngine e, boolean autoDisplay, boolean mayExitApp, Rectangle R){ super("PrologEngine listener (Swing)"); if (e==null) throw new IPException("missing Prolog engine"); if (R!=null) setBounds(R); e.waitUntilAvailable(); this.mayExitApp=mayExitApp; engine=e; interruptAction = new InterruptAction(); initializeVisualizationObjects(e); if (!e.deterministicGoal("retractall(ipListenerWindow(_)), asserta(ipListenerWindow("+e.registerJavaObject(this)+"))")) throw new IPException("could not assert ipListenerWindow"); if (engine==null) dispose(); // no interface object permitted! else topLevelCount++; debug=engine.isDebug(); loadedFiles = new Vector<LoadedFile>(); constructWindowContents(); constructMenu(); addWindowListener(this); listenerGreeting(e); if (autoDisplay) { setVisible(true); focusInput(); } } /** * @return the mayExitApp */ public boolean isMayExitApp() { return mayExitApp; } /** * @param mayExitApp if true, attempt to exist the application when the user clicks the close box */ public void setMayExitApp(boolean mayExitApp) { this.mayExitApp = mayExitApp; } public static void initializeVisualizationObjects(AbstractPrologEngine engine){ String VF = engine.getImplementationPeer().visualizationFilename(); if (engine.getLoadFromJar()) engine.consultFromPackage(VF,ListenerWindow.class,true); else engine.consultRelative(VF,ListenerWindow.class); engine.teachMoreObjects(guiExamples()); } protected void listenerGreeting(PrologEngine e){ prologOutput.append("Welcome to an InterProlog top level\n"+e.getPrologVersion() + "\n",null); } // WindowListener methods public void windowOpened(WindowEvent e){} public void windowClosed(WindowEvent e){} public void windowIconified(WindowEvent e){} public void windowClosing(WindowEvent e){ dispose(); if (mayExitApp){ engine.shutdown(); topLevelCount--; if (topLevelCount <= 0) System.exit(0); // should check whether any relevant windows are changed... } } public void windowActivated(WindowEvent e){ prologInput.requestFocus(); } public void windowDeactivated(WindowEvent e){} public void windowDeiconified(WindowEvent e){} public static ObjectExamplePair[] guiExamples() { ObjectExamplePair[] examples = { PredicateTableModel.example(), TermListModel.example(), TermTreeModel.example(), new ObjectExamplePair("ArrayOfTermTreeModel",new TermTreeModel[0]), XSBTableModel.example(), }; return examples; } protected StyledOutputPane makeOutputPane(){ return new StyledOutputPane(); } protected void addPopupEditMenus(){ popupEditMenuFor(prologOutput); popupEditMenuFor(prologInput); } protected void constructWindowContents(){ Font prologFont = new Font("Courier",Font.PLAIN,12); Container c = getContentPane(); c.setLayout(new BorderLayout()); prologOutput = makeOutputPane(); prologOutput.setFont(prologFont); prologOutput.setEditable(false); prologOutput.setToolTipText("Here's Prolog console output"); //prologOutput.setLineWrap(true); // Swing used to crash with large amounts of text... prologOutput.setDoubleBuffered(true); // Use Swing double screen buffer prologOutput.getAccessibleContext().setAccessibleName("Prolog Console Output"); initializeOutputStyles(); JScrollPane piscroller = new JScrollPane(); prologInput = new JTextArea(5,80); prologInput.setFont(prologFont); prologInput.setLineWrap(true); prologInput.setToolTipText("Prolog input: your-query <Enter>. Drop .P files here to reconsult them"); prologInput.getAccessibleContext().setAccessibleName("Prolog Input"); piscroller.getViewport().add(prologInput); addPopupEditMenus(); setupCommandHistory(); prologInput.addKeyListener(new KeyAdapter(){ public void keyPressed(KeyEvent e){ if (e.getKeyCode()==KeyEvent.VK_ENTER) { e.consume(); sendToProlog(); scrollToBottom(); // erase after consuming to not annoy user prologInput.setText(""); } /* else if ((e.getKeyCode() == KeyEvent.VK_D && ! AbstractPrologEngine.isWindowsOS()) && e.isControlDown()){ //Unix end of file e.consume(); sendToProlog("\004"); //System.out.println("Ctrl-D being sent as end of file"); } else if ((e.getKeyCode() == KeyEvent.VK_Z && AbstractPrologEngine.isWindowsOS()) && e.isControlDown()) { //...and Windows e.consume(); sendToProlog("\0x1A"); //System.out.println("Ctrl-Z being sent as end of file"); } */ } }); JScrollPane scroller = new JScrollPane(); scroller.getViewport().add(prologOutput); new SmartScroller(scroller); splitPane = new JSplitPane (JSplitPane.VERTICAL_SPLIT, scroller, prologInput); c.add(BorderLayout.CENTER,splitPane); setSize(600,600); splitPane.setDividerLocation(450); //j.resetToPreferredSizes(); validate(); DropTargetListener dropHandler = new DropTargetListener(){ public void dragOver(DropTargetDragEvent dtde){} public void dropActionChanged(DropTargetDragEvent dtde){} public void dragExit(DropTargetEvent dte){} public void drop(DropTargetDropEvent dtde){ handlePrologInputDnD(dtde); } public void dragEnter(DropTargetDragEvent dtde){ // System.out.println("dragEnter:"+dtde); } }; new DropTarget(prologInput,dropHandler); new DropTarget(prologOutput,dropHandler); new DropTarget(this,dropHandler); } protected void setupCommandHistory() { commandHistory = new CommandHistory(); commandHistory.addField(prologInput); } static class MyUndoManager extends UndoManager{ Action undoAction, redoAction; MyUndoManager(){ undoAction = new AbstractAction("Undo"){ @Override public void actionPerformed(ActionEvent e) { undo(); updateActions(); } }; redoAction = new AbstractAction("Redo"){ @Override public void actionPerformed(ActionEvent e) { redo(); updateActions(); } }; undoAction.setEnabled(false); redoAction.setEnabled(false); } protected void updateActions() { redoAction.setEnabled(canRedo()); undoAction.setEnabled(canUndo()); } public void undoableEditHappened(UndoableEditEvent e){ super.undoableEditHappened(e); updateActions(); } } public static JPopupMenu popupEditMenuFor(final JTextComponent text){ final JPopupMenu menu = new JPopupMenu(); if (text.isEditable()){ MyUndoManager undoManager = new MyUndoManager(); text.getDocument().addUndoableEditListener(undoManager); KeyStroke undoKey = KeyStroke.getKeyStroke(KeyEvent.VK_Z,Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()); KeyStroke redoKey = KeyStroke.getKeyStroke(KeyEvent.VK_Z,Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()+Event.SHIFT_MASK); text.getActionMap().put("UNDO!", undoManager.undoAction); text.getInputMap().put(undoKey, "UNDO!"); text.getActionMap().put("REDO!", undoManager.redoAction); text.getInputMap().put(redoKey, "REDO!"); menu.add(undoManager.undoAction); menu.add(undoManager.redoAction); menu.addSeparator(); Action cut = new DefaultEditorKit.CutAction(); // ALWAYS create action, so we can change its name without breaking parent maps cut.putValue(Action.NAME, "Cut"); menu.add(cut); KeyStroke cutKey = KeyStroke.getKeyStroke(KeyEvent.VK_X,Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()); text.getInputMap().put(cutKey, "CUT!"); text.getActionMap().put("CUT!", cut); } Action copy = new DefaultEditorKit.CopyAction(); copy.putValue(Action.NAME, "Copy"); menu.add(copy); KeyStroke copyKey = KeyStroke.getKeyStroke(KeyEvent.VK_C,Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()); text.getInputMap().put(copyKey, "COPY!"); text.getActionMap().put("COPY!", copy); Action paste_ = null; if (text.isEditable()) paste_ = new DefaultEditorKit.PasteAction(); final Action paste = paste_; if (paste!=null){ paste.putValue(Action.NAME, "Paste"); menu.add(paste); KeyStroke pasteKey = KeyStroke.getKeyStroke(KeyEvent.VK_V,Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()); text.getInputMap().put(pasteKey, "PASTE!"); text.getActionMap().put("PASTE!", paste); } text.addMouseListener(new MouseAdapter(){ public void mousePressed(MouseEvent e) { if (e.isPopupTrigger()){ menu.show(text, e.getX(), e.getY()); } else if (e.getButton()==MouseEvent.BUTTON2 ){ if (paste != null){ //e.consume(); //paste.actionPerformed(new ActionEvent(text, 0, "Mid-button paste")); if (text.getSelectionStart()<text.getSelectionEnd()) try { text.getDocument().remove(text.getSelectionStart(), text.getSelectionEnd()-text.getSelectionStart()); } catch (BadLocationException e1) { System.err.println("Could not remove selection"); e1.printStackTrace(); } } } } public void mouseReleased(MouseEvent e) { if (e.isPopupTrigger()){ menu.show(text, e.getX(), e.getY()); } } }); return menu; } void handlePrologInputDnD(DropTargetDropEvent dtde){ //System.out.println("drop:"+dtde); try{ Transferable transferable = dtde.getTransferable(); /* DataFlavor[] flavors = transferable.getTransferDataFlavors(); for (int f=0;f<flavors.length;f++) System.out.println("Flavor:"+flavors[f]);*/ int action = dtde.getDropAction(); if (transferable.isDataFlavorSupported(DataFlavor.javaFileListFlavor)){ if (engine.isIdle()){ dtde.acceptDrop(action); final java.util.List<?> files = (java.util.List<?>)transferable.getTransferData(DataFlavor.javaFileListFlavor); dtde.getDropTargetContext().dropComplete(true); boolean allPs = true; for (int f=0;f<files.size();f++){ if (!droppableFile((File)files.get(f))) { allPs=false; break; } } if(!allPs) errorMessage(badFilesDroppedMessage()); else { prologOutput.append("\nHandling "+((files.size()>1 ? files.size()+" files...\n" : files.size()+" file...\n")),null); Runnable r = new Runnable(){ public void run(){ boolean crashed = false; Toolkit.getDefaultToolkit().sync(); for (int f=0;f<files.size() && !crashed;f++){ File file = (File)files.get(f); if (!processDraggedFile(file)) crashed = true; } if (crashed) prologOutput.append("...terminated with errors.\n",null); else prologOutput.append("...done.\n",null); scrollToBottom(); } }; SwingUtilities.invokeLater(r); } } else { dtde.rejectDrop(); errorMessage("You can not consult files while Prolog is working"); } } else dtde.rejectDrop(); } catch (Exception e){ throw new IPException("Problem dropping:"+e); } } protected boolean droppableFile(File f){ String filename = f.getName().toLowerCase(); return filename.endsWith(".p") || filename.endsWith(".pl"); } protected String badFilesDroppedMessage(){ return "All dragged files must be Prolog source files"; } public boolean processDraggedFile(File f){ if (!checkEngineAvailable()) return false; if (engine.consultAbsolute(f)) { addToReloaders(f,"consult"); return true; } else { errorMessage("Problems reconsulting "+f.getName()); return false; } } public void errorMessage(String m){ beep(); JOptionPane.showMessageDialog(this,m,"Error",JOptionPane.ERROR_MESSAGE); } protected boolean checkEngineAvailable(){ if (engine.isAvailable()) return true; JOptionPane.showMessageDialog(this,"Please end or pause the current top goal first","Warning",JOptionPane.WARNING_MESSAGE); return false; } protected boolean checkFailed(boolean failed){ if (failed) { beep(); JOptionPane.showMessageDialog(this,"Unexpected failure. Please try again.","Error",JOptionPane.ERROR_MESSAGE); } return failed; } protected JMenu constructFileMenu(JMenuBar mb){ JMenu fileMenu; fileMenu = new JMenu("File"); fileMenu.setMnemonic('F'); mb.add(fileMenu); addItemToMenu(fileMenu,"Consult...", new ActionListener(){ public void actionPerformed(ActionEvent e){ reconsultFile(); } }); if (engine.getImplementationPeer() instanceof XSBPeer) addItemToMenu(fileMenu,"Load dynamically...",new ActionListener(){ public void actionPerformed(ActionEvent e){ load_dynFile(); } }); fileMenu.addSeparator(); return fileMenu; } void constructMenu(){ JMenuBar mb; mb = new JMenuBar(); fileMenu = constructFileMenu(mb); //mb.add(fileMenu); mb.add(constructToolsMenu()); JMenu debugMenu = constructDebugMenu(); if (debugMenu!=null) mb.add(debugMenu); historyMenu = new JMenu("History",true); historyMenu.setMnemonic('H'); mb.add(historyMenu); addItemToMenu(historyMenu,"Clear Listener's Output",new ActionListener(){ public void actionPerformed(ActionEvent e){ prologOutput.setText(""); } }); addItemToMenu(historyMenu,"Clear Command History",new ActionListener(){ public void actionPerformed(ActionEvent e){ commandHistory.clear(); } }); historyMenu.addSeparator(); // to avoid Swing bug handling key events commandHistory.addMenuAndField(historyMenu, historyMenu.getItemCount(), prologInput); setJMenuBar(mb); } protected JMenu constructDebugMenu(){ return null; } protected JMenu constructToolsMenu(){ JMenu toolMenu = new JMenu("Tools"); toolMenu.setMnemonic('T'); addInterPrologItems(toolMenu); return toolMenu; } protected void addInterPrologItems(JMenu toolMenu){ final JCheckBoxMenuItem debugging = new JCheckBoxMenuItem("Engine debugging"); toolMenu.add(debugging); debugging.addActionListener(new ActionListener(){ public void actionPerformed(ActionEvent e){ engine.setDebug(debugging.isSelected()); } }); addItemToMenu(toolMenu,"Java Object Specifications",new ActionListener(){ public void actionPerformed(ActionEvent e){ if (engine.isAvailable()) engine.command("showObjectVariables"); else beep(); } }); // toolMenu.add(interruptAction); } protected class InterruptAction extends AbstractAction{ InterruptAction(){ super("Interrupt Engine"); putValue(SHORT_DESCRIPTION,"Brutal interrupt of engine, may lead to inconsistent state of object streams or even hang the system"); } @Override public void actionPerformed(ActionEvent e) { if (JOptionPane.showConfirmDialog(ListenerWindow.this, getValue(SHORT_DESCRIPTION), getValue(NAME)+" ?", JOptionPane.OK_CANCEL_OPTION) == JOptionPane.CANCEL_OPTION) return; engine.interrupt(); } } class HistoryListener implements ActionListener{ JTextComponent targetText; String memory; HistoryListener(JTextComponent t,String s){ targetText=t; memory=s; } public void actionPerformed(ActionEvent e){ targetText.replaceSelection(memory); } } public static JMenuItem addItemToMenu(JMenu menu,String item,ActionListener handler) { JMenuItem menuItem = new JMenuItem(item); menu.add(menuItem); menuItem.addActionListener(handler); return menuItem; } /** accelerator requires the command (Mac) or ctrl (other systems) modifier */ public static JMenuItem addItemToMenu(JMenu menu, String item, int accelerator, ActionListener handler) { JMenuItem menuItem = new JMenuItem(item); menuItem.setAccelerator( KeyStroke.getKeyStroke(accelerator, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask())); menu.add(menuItem); menuItem.addActionListener(handler); return menuItem; } /** accelerator requires the command (Mac) or ctrl (other systems) modifier */ public static JMenuItem addItemToMenu(JPopupMenu menu, String item, int accelerator, ActionListener handler) { JMenuItem menuItem = new JMenuItem(item); menuItem.setAccelerator( KeyStroke.getKeyStroke(accelerator, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask())); menu.add(menuItem); menuItem.addActionListener(handler); return menuItem; } public static JMenuItem addItemToMenu(JPopupMenu menu, String item, ActionListener handler) { JMenuItem menuItem = new JMenuItem(item); menu.add(menuItem); menuItem.addActionListener(handler); return menuItem; } public void sendToProlog(){ sendToProlog(null); } public abstract void sendToProlog(String invisiblePostfix); static final int HISTORY_WIDTH = 90; protected void addToHistory(){ String goal = prologInput.getText(); if (goal.equals(";") || goal.equals("") || commandHistory.last().equals(goal)) return; // not worthy remembering commandHistory.addToHistory(goal); } /** Returns the object reacting to up/down arrows */ public CommandHistory getCommandHistory(){ return commandHistory; } static class LoadedFile{ File file; String method; LoadedFile(File file,String method){ this.file=file; this.method=method; } public boolean equals(LoadedFile o){ return file.equals(o.file) && method.equals(o.method); } } protected void addToReloaders(File file,String method){ final LoadedFile lf = new LoadedFile(file,method); if (!loadedFiles.contains(lf)){ loadedFiles.addElement(lf); addItemToMenu(fileMenu,file.getName(),new ActionListener(){ public void actionPerformed(ActionEvent e){ if (!checkEngineAvailable()) return; engine.command(lf.method+"('"+engine.unescapedFilePath(lf.file.getAbsolutePath())+ "')"); } }); } } public boolean successfulCommand(String s){ try { return engine.command(s); } catch(Exception e){ System.err.println("Trouble in successfulCommand for "+s+":\n"+e); } return false; } protected void reconsultFile(){ if (!checkEngineAvailable()) return; String nome,directorio; File filetoreconsult=null; FileDialog d = new FileDialog(this,"Consult file..."); d.setVisible(true); nome = d.getFile(); directorio = d.getDirectory(); if (nome!=null) { filetoreconsult = new File(directorio,nome); if (engine.consultAbsolute(filetoreconsult)) addToReloaders(filetoreconsult,"consult"); } } /** For XSB only */ protected void load_dynFile(){ if (!checkEngineAvailable()) return; String nome,directorio; File filetoreconsult=null; FileDialog d = new FileDialog(this,"load_dyn file..."); d.setVisible(true); nome = d.getFile(); directorio = d.getDirectory(); if (nome!=null) { filetoreconsult = new File(directorio,nome); if (successfulCommand("load_dyn('"+engine.unescapedFilePath(filetoreconsult.getAbsolutePath())+ "')")) addToReloaders(filetoreconsult,"load_dyn"); } } public void focusInput(){ prologInput.selectAll(); prologInput.requestFocus(); } public void scrollToBottom(){ prologOutput.scrollToBottom(); } public static boolean debug = false; protected static String[] prologStartCommands=null; public static boolean loadFromJar = true; /** By default, apps will redirect System.out. and System.err to some file */ protected static boolean quietLog = true; public static String workingDir = null; /** If true, ignores Flora/Ergo, thus executing as Prolog Studio*/ protected static boolean forcePrologStudio = false; public static String commonMain(String args[]) { String initialFile = null; commonGreeting(); if (args.length>=1){ int i=0; while(i<args.length){ if (args[i].toLowerCase().startsWith("-d")) { debug=true; i++; } else if (args[i].toLowerCase().startsWith("-printlog")){ quietLog=false; i++; } else if (args[i].toLowerCase().startsWith("-nojar")){ loadFromJar=false; i++; } else if (args[i].toLowerCase().startsWith("-prologstudio")){ forcePrologStudio =true; i++; } else if (args[i].equals("-initfile")) { initialFile = args[i + 1]; i = i + 2; } else if (args[i].equals("-basedir")) { workingDir = args[i+1]; i = i + 2; } else { prologStartCommands = remainingArgs(args,i); break; } } } // else throw new IPException("Missing arguments in command line"); return initialFile; } public static void commonGreeting(){ System.out.println("Welcome "+System.getProperty("user.name")+" to InterProlog "+AbstractPrologEngine.version+" on Java "+ System.getProperty("java.version") + " ("+ System.getProperty("java.vendor") + "), "+ System.getProperty("os.name") + " "+ System.getProperty("os.version")); } public static String[] commandArgs(String[] args){ return remainingArgs(args,0); } /** This handles args in a peculiar way to please the Windows batch file interpreter: the main executable/dir arg is the last one... its args are before it */ public static String[] remainingArgs(String[] args,int first){ if (args.length<first+1) throw new IPException("Missing arguments in command line"); String[] cmds = new String[args.length-first]; if (cmds.length==1) { cmds[0] = args[first]; return cmds; } for (int i=first;i<args.length;i++){ if (i==args.length-1) // last one is the Prolog executable/dir: cmds[0] = args[i]; else cmds[i-first+1]=args[i]; } return cmds; } public static void beep(){ Toolkit.getDefaultToolkit().beep(); } public JTextComponent getOutputPane(){ return prologOutput; } public JTextComponent getInputPane(){ return prologInput; } public AbstractPrologEngine getEngine(){ return engine; } public static void setWaitCursor(Component c) { if (c==null) return; Cursor wait = Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR); if (c.getCursor().equals(wait)) return; previousCursors.put(c,c.getCursor()); c.setCursor(wait); } public static void restoreCursor(Component C) { if (C==null) return; Cursor previousCursor = previousCursors.remove(C); if(previousCursor != null) { C.setCursor(previousCursor); } } /** This method does nothing. Subclasses may have a Windows menu which they should add to W...*/ public void addWindowsMenuTo(Container W){} }
/* ForestFire -- a class within the Cellular Automaton Explorer. Copyright (C) 2007 David B. Bahr (http://academic.regis.edu/dbahr/) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package cellularAutomata.rules; import java.awt.Color; import java.awt.Shape; import java.util.Random; import cellularAutomata.cellState.model.CellState; import cellularAutomata.cellState.view.CellStateView; // import // cellularAutomata.cellState.view.TriangleHexagonCellStateView; import cellularAutomata.rules.templates.FiniteObjectRuleTemplate; import cellularAutomata.rules.util.RuleFolderNames; import cellularAutomata.util.Coordinate; import cellularAutomata.util.math.RandomSingleton; /** * Rule that mimics a forest fire. Trees are born and mature at a specified * rate. Fires start with a certain probability. Mature trees will burn if a * neighboring tree is burning. * * @author David Bahr */ public class ForestFire extends FiniteObjectRuleTemplate { // a display name for this class private static final String RULE_NAME = "Forest Fire"; // a description of property choices that give the best results for this // rule (e.g., which lattice, how many states, etc.) private static final String BEST_RESULTS = "<html> <body><b>" + RULE_NAME + ".</b>" + "<p> " + "<b>For best results</b>, start with a blank (empty) initial " + "state of all bare ground. Use a large two-dimensional " + "nearest-neighbor simulation. For example, try a 150 by 150 or larger " + "square (8 neighbor) lattice. From the empty bare ground, trees will " + "sprout and mature. Eventually lightning will start a fire. The trees " + "will regrow and the cycle of fire and regrowth will repeat." + "Be patient. It takes time for the trees to mature and then burn." + leftClickInstructions + rightClickInstructions + "</body></html>"; // a tooltip description for this class private String TOOLTIP = "<html> <body><b>" + RULE_NAME + ".</b> Forests grow, mature, and then eventually burn.</body></html>"; private static Random random = RandomSingleton.getInstance(); // probability that a tree grows from bare ground private double newTreeProbabililty = 0.05; // probability that a tree matures to the next stage private double matureProbabililty = 0.01; // probability that a mature tree will die private double deathProbabililty = 0.0; // probability that a mature tree catches fire (lightning strike) private double lightningStrikeProbabililty = 0.00001; /** * Create a forest fire using the given cellular automaton properties. * <p> * When calling the parent constructor, the minimalOrLazyInitialization * parameter must be included as * <code>super(minimalOrLazyInitialization);</code>. The boolean is * intended to indicate when the constructor should build a rule with as * small a footprint as possible. In order to load rules by reflection, the * application must query this class for information like the display name, * tooltip description, etc. At these times it makes no sense to build the * complete rule which may have a large footprint in memory. * <p> * It is recommended that the constructor and instance variables do not * initialize any memory intensive variables and that variables be * initialized only when first needed (lazy initialization). Or all * initializations in the constructor may be placed in an <code>if</code> * statement. * * <pre> * if(!minimalOrLazyInitialization) * { * ...initialize * } * </pre> * * @param minimalOrLazyInitialization * When true, the constructor instantiates an object with as * small a footprint as possible. When false, the rule is fully * constructed. This variable should be passed to the super * constructor <code>super(minimalOrLazyInitialization);</code>, * but if uncertain, you may safely ignore this variable. */ public ForestFire(boolean minimalOrLazyInitialization) { super(minimalOrLazyInitialization); } /** * Returns a list of permissable states. Each one has a unique string * representation of "Mature tree", "Seedling", "Burning", etc. * * @param properties * The CA properties. * @return An array of allowed states for the cells. */ protected Object[] getObjectArray() { Tree matureTree = new Tree(Tree.MATURE_TREE_STATE); Tree youngTree = new Tree(Tree.YOUNG_TREE_STATE); Tree saplingTree = new Tree(Tree.SAPLING_TREE_STATE); Tree seedlingTree = new Tree(Tree.SEEDLING_TREE_STATE); Tree bareGround = new Tree(Tree.BARE_GROUND_STATE); Tree burning = new Tree(Tree.BURNING_STATE); Tree smoldering = new Tree(Tree.SMOLDERING_STATE); Tree ashes = new Tree(Tree.ASHES_STATE); // the list of possible states Object[] listOfObjects = {bareGround, seedlingTree, saplingTree, youngTree, matureTree, ashes, smoldering, burning}; return listOfObjects; } /** * Rules for forest fires. Trees mature in a progression from seedling to * mature and finally dead. At any time, a mature tree may be struck by * lightning and start a fire. Neighboring mature trees will then catch * fire. * * @param cell * The value of the cell being updated. * @param neighbors * The value of the neighbors. * @param generation * The current generation of the CA. * @return A new state for the cell. */ protected Object objectRule(Object cell, Object[] neighbors, int generation) { // get the values of the cell and the neighbors Tree theTree = (Tree) cell; Tree[] neighborTrees = new Tree[neighbors.length]; for(int i = 0; i < neighbors.length; i++) { neighborTrees[i] = (Tree) neighbors[i]; } // the new value that we will return. Note that I am careful not to // change the current value because that will mess up calculations done // for neighboring states. Tree newValue = null; // should the tree burn? if(theTree.isMature()) { // check to see if lightning strikes if(random.nextDouble() < this.lightningStrikeProbabililty) { // then the tree was hit by lightning and should burn newValue = new Tree(Tree.BURNING_STATE); } else { // check to see if any neighbors are burning boolean burningNeighbor = false; for(int i = 0; i < neighborTrees.length && !burningNeighbor; i++) { if(neighborTrees[i].isBurning()) { burningNeighbor = true; // then the tree should burn newValue = new Tree(Tree.BURNING_STATE); } } } // otherwise the mature tree stays mature or dies (doesn't burn and // doesn't // grow any further) if(newValue == null) { if(random.nextDouble() < this.deathProbabililty) { // dies newValue = new Tree(Tree.BARE_GROUND_STATE); } else { // lives newValue = new Tree(Tree.MATURE_TREE_STATE); } } } else if(theTree.isBareGround()) { // see if a tree should sprout if(random.nextDouble() < this.newTreeProbabililty) { newValue = new Tree(Tree.SEEDLING_TREE_STATE); } else { // stay the same newValue = new Tree(theTree); } } else if(theTree.isAlive() && !theTree.isMature()) { // see if the tree should grow up if(random.nextDouble() < this.matureProbabililty) { newValue = new Tree(theTree.getNextState()); } else { // stay the same newValue = new Tree(theTree); } } else { newValue = new Tree(theTree.getNextState()); } // be safe if(newValue == null) { // stay the same newValue = new Tree(theTree); } return newValue; } /** * A brief description (written in HTML) that describes what parameters will * give best results for this rule (which lattice, how many states, etc). * The description will be displayed on the properties panel. Using html * permits line breaks, font colors, etcetera, as described in HTML * resources. Regular line breaks will not work. * <p> * Recommend starting with the title of the rule followed by "For best * results, ...". See Rule 102 for an example. * * @return An HTML string describing how to get best results from this rule. * May be null. */ public String getBestResultsDescription() { return BEST_RESULTS; } /** * Gets an instance of the CellStateView class that will be used to display * cells being updated by this rule. Note: This method must return a view * that is able to display cell states of the type returned by the method * getCompatibleCellState(). Appropriate CellStatesViews to return include * BinaryCellStateView, IntegerCellStateView, HexagonalIntegerCellStateView, * IntegerVectorArrowView, IntegerVectorDefaultView, and * RealValuedDefaultView among others. the user may also create their own * views (see online documentation). * <p> * Any values passed to the constructor of the CellStateView should match * those values needed by this rule. * * @return An instance of the CellStateView (any values passed to the * constructor of the CellStateView should match those values needed * by this rule). */ public CellStateView getCompatibleCellStateView() { return new ForestFireView(); } /** * When displayed for selection, the rule will be listed under specific * folders specified here. The rule will always be listed under the "All * rules" folder. And if the rule is contributed by a user and is placed in * the userRules folder, then it will also be shown in a folder called "User * rules". Any strings may be used; if the folder does not exist, then one * will be created with the specified name. If the folder already exists, * then that folder will be used. * <p> * By default, this returns null so that the rule is only placed in the * default folder(s). * <p> * Child classes should override this method if they want the rule to appear * in a specific folder. The "All rules" and "User rules" folder are * automatic and do not need to be specified; they are always added. * * @return A list of the folders in which rule will be displayed for * selection. May be null. */ public String[] getDisplayFolderNames() { String[] folders = {RuleFolderNames.CYCLIC_RULES_FOLDER, RuleFolderNames.PROBABILISTIC_FOLDER, RuleFolderNames.PHYSICS_FOLDER, RuleFolderNames.SOCIAL_FOLDER}; return folders; } /** * A brief one or two-word string describing the rule, appropriate for * display in a drop-down list. * * @return A string no longer than 15 characters. */ public String getDisplayName() { return RULE_NAME; } /** * A brief description (written in HTML) that describes this rule. The * description will be displayed as a tooltip. Using html permits line * breaks, font colors, etcetera, as described in HTML resources. Regular * line breaks will not work. * * @return An HTML string describing this rule. */ public String getToolTipDescription() { return TOOLTIP; } /** * An object that represents a tree. The tree may be a seedling, sapling, * young, or mature. It may also be burning, smoldering, or dead. It may * also be bare ground (pine cone). * * @author David Bahr */ private class Tree { /** * The state of a mature tree. */ public static final String MATURE_TREE_STATE = "Mature tree"; /** * The state of a young tree. */ public static final String YOUNG_TREE_STATE = "Young tree"; /** * The state of a sapling tree. */ public static final String SAPLING_TREE_STATE = "Sapling"; /** * The state of a seedling tree. */ public static final String SEEDLING_TREE_STATE = "Seedling"; /** * The state of bare ground. */ public static final String BARE_GROUND_STATE = "Bare ground"; /** * The state of a burning tree. */ public static final String BURNING_STATE = "Burning"; /** * The state of a smoldering tree. */ public static final String SMOLDERING_STATE = "Smoldering"; /** * The state of a ashes. */ public static final String ASHES_STATE = "Ashes"; // the current state of this object private String currentState = BARE_GROUND_STATE; // An array of all the states private String[] allStates = {BARE_GROUND_STATE, SEEDLING_TREE_STATE, SAPLING_TREE_STATE, YOUNG_TREE_STATE, MATURE_TREE_STATE, BURNING_STATE, SMOLDERING_STATE, ASHES_STATE}; // An array of non-burning states, listed in order private String[] onlyNonBurningStates = {BARE_GROUND_STATE, SEEDLING_TREE_STATE, SAPLING_TREE_STATE, YOUNG_TREE_STATE, MATURE_TREE_STATE}; // An array of tree states (no burning or bare ground states), listed in // order private String[] onlyTreeStates = {SEEDLING_TREE_STATE, SAPLING_TREE_STATE, YOUNG_TREE_STATE, MATURE_TREE_STATE}; // An array of burning states, listed in order. Includes the bare ground // state which follows the ashes. private String[] onlyBurningStates = {BURNING_STATE, SMOLDERING_STATE, ASHES_STATE, BARE_GROUND_STATE}; /** * Create a tree state corresponding to the specified string. Valid * strings are MATURE_TREE_STATE, YOUND_TREE_STATE, BURNING_STATE, etc. */ public Tree(String state) { if(state.equals(MATURE_TREE_STATE) || state.equals(YOUNG_TREE_STATE) || state.equals(SAPLING_TREE_STATE) || state.equals(SEEDLING_TREE_STATE) || state.equals(BARE_GROUND_STATE) || state.equals(BURNING_STATE) || state.equals(SMOLDERING_STATE) || state.equals(ASHES_STATE)) { currentState = state; } else { currentState = BARE_GROUND_STATE; } } /** * Create a random tree (but not burning tree) state. */ public Tree() { int choice = random.nextInt(onlyNonBurningStates.length); currentState = onlyNonBurningStates[choice]; } /** * Create a ChainLinkFence with the same state as the parameter. * * @param treeState * This new object will be assigned the same state as * treeState. */ public Tree(Tree treeState) { this.currentState = treeState.getState(); } /** * Gets the string representing the next state that follows the current * state. For example, a seedling matures to a sapling, and a sapling * matures to a young tree, etc. The next state for a mature tree is a * burning tree. The next state for ashes is bare ground. The next state * for bare ground is a seedling. * * @return The string representing the next state that follows the * current state. */ public String getNextState() { // find the current state position in the "allState" array int position = 0; while(!allStates[position].toString().equals(currentState)) { position++; } // so the next state is the next position position++; // wrap to the beginning if necessary position %= allStates.length; return allStates[position]; } /** * Gets the string representing the current state. * * @return The string representing the current state. */ public String getState() { return currentState; } /** * True if the "tree" is actualy just bare ground. * * @return true if bare ground. */ public boolean isBareGround() { boolean bare = false; if(currentState.equals(Tree.BARE_GROUND_STATE)) { bare = true; } return bare; } /** * True if the tree is burning or smoldering. * * @return true if burning or smoldering. */ public boolean isBurning() { boolean burning = false; if(currentState.equals(Tree.BURNING_STATE) || currentState.equals(Tree.SMOLDERING_STATE)) { burning = true; } return burning; } /** * True if the tree is mature. * * @return true if mature. */ public boolean isMature() { boolean mature = false; if(currentState.equals(Tree.MATURE_TREE_STATE)) { mature = true; } return mature; } /** * True if the tree is alive. * * @return true if the tree is alive (not burning and not bare ground). */ public boolean isAlive() { boolean alive = false; for(int i = 0; i < onlyTreeStates.length; i++) { if(currentState.equals(onlyTreeStates[i].toString())) { alive = true; } } return alive; } /** * Sets the state to the value of the specified ChainLinkFence object. * * @param treeState * The value of this object will be set to the same state as * treeState. */ public void setStateToSameValue(Tree treeState) { currentState = treeState.toString(); } /** * Gets the string representing the current state. * * @return The string representing the current state. */ public String toString() { return currentState; } } /** * A view that is specific to the Objects being displayed for this forest * fire rule. Tells the graphics how to display the Object stored by a cell. * * @author David Bahr */ private class ForestFireView extends CellStateView // extends TriangleHexagonCellStateView { // colors associated with each state private Color[] treeColors = {new Color(162, 128, 94), new Color(0, 240, 0), new Color(0, 200, 0), new Color(0, 160, 0), new Color(0, 120, 0), Color.BLACK, Color.RED, Color.YELLOW.brighter()}; /** * Create a view for the forest fire. */ public ForestFireView() { } /** * The colors of each cell are usually based on the selected color * scheme, but sometimes the view may wish to prevent the colors from * changing. For example the ForestFire rule wants "tree states" to be * green no matter what. * <p> * The default behavior is to allow all color schemes, but child classes * may override this method to prevent color schemes from being * displayed in the menu. In general, this method should return false if * the rule wants to create a CellStateView that uses fixed colors (that * won't channge with the scheme). * * @return true if all color schemes are allowed and will be enabled in * the menu, and false if the color schemes will be disabled in * the menu. */ public boolean enableColorSchemes() { return false; } /** * Returns null so that the default shape is used (a square). * * @see cellularAutomata.cellState.view.CellStateView#getAverageDisplayShape( * cellularAutomata.cellState.model.CellState[], int, int, * Coordinate) */ public Shape getAverageDisplayShape(CellState[] states, int width, int height, Coordinate rowAndCol) { return null; } /** * Creates a display color based on the maturity of the tree and the * state of the fire. * * @param state * The cell state that will be displayed. * @param numStates * If relevant, the number of possible states (which may not * be the same as the currently active number of states) -- * may be null which indicates that the number of states is * inapplicable or that the currently active number of states * should be used. (See for example, * createProbabilityChoosers() method in InitialStatesPanel * class.) * @param rowAndCol * The row and col of the cell being displayed. May be * ignored. * @return The color to be displayed. */ public Color getColor(CellState state, Integer numStates, Coordinate rowAndCol) { int stateNumber = state.toInt(); // the pink color is irrelevant -- it's in case something weird // happens, and the stateNumber is outside its correct range of 0 // to treeColors.length Color color = Color.PINK; if(stateNumber >= 0 && stateNumber < treeColors.length) { color = treeColors[stateNumber]; } return color; } /** * Returns null so that the default shape is used (a square). * * @see cellularAutomata.cellState.view.CellStateView#getDisplayShape(CellState, * int, int, Coordinate) */ public Shape getDisplayShape(CellState state, int width, int height, Coordinate rowAndCol) { return null; } /** * When a CellState is "tagged" for extra visibility, this method is * called and creates an appropriate color that stands out. * * @param originalColor * The original color that will be modified with the tagged * color. * @param taggingColor * The tagging color used to modify the original color. * @return The original color, but modified with the tagged color for * high visibility. * @see cellularAutomata.cellState.view.CellStateView#modifyColorWithTaggedColor( * Color, Color) */ public Color modifyColorWithTaggedColor(Color originalColor, Color taggingColor) { // add blue (and reduce green and red) so is different from the red // and green of this rule int newRed = (int) (1.0 * taggingColor.getRed()); int newGreen = (int) (1.0 * taggingColor.getGreen()); int newBlue = Math.max(180, taggingColor.getBlue()); taggingColor = new Color(newRed, newGreen, newBlue); return taggingColor; } } }
/* * Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.common.parser; import java.util.ArrayList; /** * String parser utility class * * @author Luca Garulli * */ public class OStringParser { public static final String WHITE_SPACE = " "; public static final String COMMON_JUMP = " \r\n"; public static String getWordFromString(String iText, final int iBeginIndex, final String ioSeparatorChars) { return getWord(iText.trim(), iBeginIndex, ioSeparatorChars); } public static String getWord(final CharSequence iText, final int iBeginIndex, final String ioSeparatorChars) { final StringBuilder buffer = new StringBuilder(); char stringBeginChar = ' '; char c; for (int i = iBeginIndex; i < iText.length(); ++i) { c = iText.charAt(i); if (c == '\'' || c == '"') { if (stringBeginChar != ' ') { // CLOSE THE STRING? if (stringBeginChar == c) { // SAME CHAR AS THE BEGIN OF THE STRING: CLOSE IT AND PUSH stringBeginChar = ' '; } } else { // START STRING stringBeginChar = c; } } else if (stringBeginChar == ' ') { for (int sepIndex = 0; sepIndex < ioSeparatorChars.length(); ++sepIndex) { if (ioSeparatorChars.charAt(sepIndex) == c && buffer.length() > 0) // SEPARATOR (OUTSIDE A STRING): PUSH return buffer.toString(); } } buffer.append(c); } return buffer.toString(); } public static String[] getWords(String iRecord, final String iSeparatorChars) { return getWords(iRecord, iSeparatorChars, false); } public static String[] getWords(String iRecord, final String iSeparatorChars, final boolean iIncludeStringSep) { return getWords(iRecord, iSeparatorChars, " \n\r", iIncludeStringSep); } public static String[] getWords(String iText, final String iSeparatorChars, final String iJumpChars, final boolean iIncludeStringSep) { iText = iText.trim(); ArrayList<String> fields = new ArrayList<String>(); StringBuilder buffer = new StringBuilder(); char stringBeginChar = ' '; char c; int openBraket = 0; int openGraph = 0; boolean charFound; boolean escape = false; for (int i = 0; i < iText.length(); ++i) { c = iText.charAt(i); if (!escape && c == '\\') { escape = true; continue; } if (openBraket == 0 && openGraph == 0 && !escape && (c == '\'' || c == '"')) { if (stringBeginChar != ' ') { // CLOSE THE STRING? if (stringBeginChar == c) { // SAME CHAR AS THE BEGIN OF THE STRING: CLOSE IT AND PUSH stringBeginChar = ' '; if (iIncludeStringSep) buffer.append(c); fields.add(buffer.toString()); buffer.setLength(0); continue; } } else { // START STRING stringBeginChar = c; if (iIncludeStringSep) buffer.append(c); continue; } } else if (stringBeginChar == ' ') { if (c == '[') openBraket++; else if (c == ']') openBraket--; if (c == '{') openGraph++; else if (c == '}') openGraph--; else if (openBraket == 0 && openGraph == 0) { charFound = false; for (int sepIndex = 0; sepIndex < iSeparatorChars.length(); ++sepIndex) { if (iSeparatorChars.charAt(sepIndex) == c) { charFound = true; if (buffer.length() > 0) { // SEPARATOR (OUTSIDE A STRING): PUSH fields.add(buffer.toString()); buffer.setLength(0); } break; } } if (charFound) continue; } } // CHECK IF IT MUST JUMP THE CHAR if (buffer.length() == 0) { charFound = false; for (int jumpIndex = 0; jumpIndex < iJumpChars.length(); ++jumpIndex) { if (iJumpChars.charAt(jumpIndex) == c) { charFound = true; break; } } if (charFound) continue; } buffer.append(c); if (escape) escape = false; } if (buffer.length() > 0) // ADD THE LAST WORD IF ANY fields.add(buffer.toString()); String[] result = new String[fields.size()]; fields.toArray(result); return result; } /** * Jump white spaces. * * @param iText * String to analyze * @param iCurrentPosition * Current position in text * @return The new offset inside the string analyzed */ public static int jumpWhiteSpaces(final CharSequence iText, final int iCurrentPosition) { return jump(iText, iCurrentPosition, WHITE_SPACE); } /** * Jump some characters reading from an offset of a String. * * @param iText * String to analyze * @param iCurrentPosition * Current position in text * @param iJumpChars * String as char array of chars to jump * @return The new offset inside the string analyzed */ public static int jump(final CharSequence iText, int iCurrentPosition, final String iJumpChars) { final int size = iText.length(); final int jumpCharSize = iJumpChars.length(); boolean found = true; char c; for (; iCurrentPosition < size; ++iCurrentPosition) { found = false; c = iText.charAt(iCurrentPosition); for (int jumpIndex = 0; jumpIndex < jumpCharSize; ++jumpIndex) { if (iJumpChars.charAt(jumpIndex) == c) { found = true; break; } } if (!found) break; } return iCurrentPosition; } }
package us.kbase.meme; import java.util.HashMap; import java.util.Map; import javax.annotation.Generated; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; /** * <p>Original spec-file type: TomtomHit</p> * <pre> * Represents a particluar TOMTOM hit * string query_pspm_id - id of query MemePSPM * string target_pspm_id - id of target MemePSPM * int optimal_offset - Optimal offset: the offset between the query and the target motif * float pvalue - p-value * float evalue - E-value * float qvalue - q-value * int overlap - Overlap: the number of positions of overlap between the two motifs. * string query_consensus - Query consensus sequence. * string target_consensus - Target consensus sequence. * string strand - Orientation: Orientation of target motif with respect to query motif. * @optional optimal_offset pvalue evalue qvalue overlap query_consensus target_consensus strand * </pre> * */ @JsonInclude(JsonInclude.Include.NON_NULL) @Generated("com.googlecode.jsonschema2pojo") @JsonPropertyOrder({ "query_pspm_id", "target_pspm_id", "optimal_offset", "pvalue", "evalue", "qvalue", "overlap", "query_consensus", "target_consensus", "strand" }) public class TomtomHit { @JsonProperty("query_pspm_id") private String queryPspmId; @JsonProperty("target_pspm_id") private String targetPspmId; @JsonProperty("optimal_offset") private Long optimalOffset; @JsonProperty("pvalue") private Double pvalue; @JsonProperty("evalue") private Double evalue; @JsonProperty("qvalue") private Double qvalue; @JsonProperty("overlap") private Long overlap; @JsonProperty("query_consensus") private String queryConsensus; @JsonProperty("target_consensus") private String targetConsensus; @JsonProperty("strand") private String strand; private Map<String, Object> additionalProperties = new HashMap<String, Object>(); @JsonProperty("query_pspm_id") public String getQueryPspmId() { return queryPspmId; } @JsonProperty("query_pspm_id") public void setQueryPspmId(String queryPspmId) { this.queryPspmId = queryPspmId; } public TomtomHit withQueryPspmId(String queryPspmId) { this.queryPspmId = queryPspmId; return this; } @JsonProperty("target_pspm_id") public String getTargetPspmId() { return targetPspmId; } @JsonProperty("target_pspm_id") public void setTargetPspmId(String targetPspmId) { this.targetPspmId = targetPspmId; } public TomtomHit withTargetPspmId(String targetPspmId) { this.targetPspmId = targetPspmId; return this; } @JsonProperty("optimal_offset") public Long getOptimalOffset() { return optimalOffset; } @JsonProperty("optimal_offset") public void setOptimalOffset(Long optimalOffset) { this.optimalOffset = optimalOffset; } public TomtomHit withOptimalOffset(Long optimalOffset) { this.optimalOffset = optimalOffset; return this; } @JsonProperty("pvalue") public Double getPvalue() { return pvalue; } @JsonProperty("pvalue") public void setPvalue(Double pvalue) { this.pvalue = pvalue; } public TomtomHit withPvalue(Double pvalue) { this.pvalue = pvalue; return this; } @JsonProperty("evalue") public Double getEvalue() { return evalue; } @JsonProperty("evalue") public void setEvalue(Double evalue) { this.evalue = evalue; } public TomtomHit withEvalue(Double evalue) { this.evalue = evalue; return this; } @JsonProperty("qvalue") public Double getQvalue() { return qvalue; } @JsonProperty("qvalue") public void setQvalue(Double qvalue) { this.qvalue = qvalue; } public TomtomHit withQvalue(Double qvalue) { this.qvalue = qvalue; return this; } @JsonProperty("overlap") public Long getOverlap() { return overlap; } @JsonProperty("overlap") public void setOverlap(Long overlap) { this.overlap = overlap; } public TomtomHit withOverlap(Long overlap) { this.overlap = overlap; return this; } @JsonProperty("query_consensus") public String getQueryConsensus() { return queryConsensus; } @JsonProperty("query_consensus") public void setQueryConsensus(String queryConsensus) { this.queryConsensus = queryConsensus; } public TomtomHit withQueryConsensus(String queryConsensus) { this.queryConsensus = queryConsensus; return this; } @JsonProperty("target_consensus") public String getTargetConsensus() { return targetConsensus; } @JsonProperty("target_consensus") public void setTargetConsensus(String targetConsensus) { this.targetConsensus = targetConsensus; } public TomtomHit withTargetConsensus(String targetConsensus) { this.targetConsensus = targetConsensus; return this; } @JsonProperty("strand") public String getStrand() { return strand; } @JsonProperty("strand") public void setStrand(String strand) { this.strand = strand; } public TomtomHit withStrand(String strand) { this.strand = strand; return this; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperties(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return ((((((((((((((((((((((("TomtomHit"+" [queryPspmId=")+ queryPspmId)+", targetPspmId=")+ targetPspmId)+", optimalOffset=")+ optimalOffset)+", pvalue=")+ pvalue)+", evalue=")+ evalue)+", qvalue=")+ qvalue)+", overlap=")+ overlap)+", queryConsensus=")+ queryConsensus)+", targetConsensus=")+ targetConsensus)+", strand=")+ strand)+", additionalProperties=")+ additionalProperties)+"]"); } }
/* * Copyright 2017 David Karnok * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package hu.akarnokd.reactive4javaflow; import hu.akarnokd.reactive4javaflow.errors.CompositeThrowable; import hu.akarnokd.reactive4javaflow.functionals.AutoDisposable; import hu.akarnokd.reactive4javaflow.fused.*; import hu.akarnokd.reactive4javaflow.impl.*; import hu.akarnokd.reactive4javaflow.impl.util.VolatileSizeArrayList; import java.lang.invoke.*; import java.util.*; import java.util.concurrent.*; import java.util.function.*; public class TestConsumer<T> implements FolyamSubscriber<T>, AutoDisposable { final List<T> items; final List<Throwable> errors; final CountDownLatch cdl; volatile int completions; Flow.Subscription upstream; static final VarHandle UPSTREAM = VH.find(MethodHandles.lookup(), TestConsumer.class, "upstream", Flow.Subscription.class); long requested; static final VarHandle REQUESTED = VH.find(MethodHandles.lookup(), TestConsumer.class, "requested", Long.TYPE); FusedQueue<T> qs; int requestedFusionMode; int actualFusionMode; String tag; boolean timeout; public TestConsumer() { this(Long.MAX_VALUE); } public TestConsumer(long initialRequest) { this.items = new VolatileSizeArrayList<>(); this.errors = new VolatileSizeArrayList<>(); this.cdl = new CountDownLatch(1); this.requested = initialRequest; } @Override public final void onSubscribe(Flow.Subscription subscription) { if (subscription == null) { errors.add(new NullPointerException("subscription == null in TestConsumer")); return; } if (UPSTREAM.compareAndSet(this, null, subscription)) { if (subscription instanceof FusedSubscription) { @SuppressWarnings("unchecked") FusedSubscription<T> qs = (FusedSubscription<T>)subscription; int f = requestedFusionMode; if (f != 0) { int m = qs.requestFusion(f); actualFusionMode = m; if (m == FusedSubscription.SYNC) { T v; for (;;) { try { v = qs.poll(); } catch (Throwable ex) { close(); errors.add(ex); cdl.countDown(); return; } if (v == null) { completions++; cdl.countDown(); return; } items.add(v); } } else if (m == FusedSubscription.ASYNC) { this.qs = qs; } } } else { actualFusionMode = -1; } long r = (long)REQUESTED.getAndSet(this, 0L); if (r != 0L) { subscription.request(r); } } else { subscription.cancel(); if (!SubscriptionHelper.isCancelled(this, UPSTREAM)) { errors.add(new IllegalStateException("onSubscribe called again in TestConsumer")); } } } @Override public void onNext(T item) { if (upstream == null) { UPSTREAM.compareAndSet(this, null, MissingSubscription.MISSING); errors.add(new IllegalStateException("onSubscribe was not called before onNext in TestConsumer")); } if (actualFusionMode > 0) { if (actualFusionMode == FusedSubscription.SYNC) { close(); errors.add(new IllegalStateException("Should not call onNext in SYNC mode.")); } else { T v; for (; ; ) { try { v = qs.poll(); } catch (Throwable ex) { close(); qs.clear(); errors.add(ex); cdl.countDown(); return; } if (v == null) { break; } items.add(v); } } } else { if (item == null) { errors.add(new NullPointerException("item == null in TestConsumer")); } else { items.add(item); } } } @Override public void onError(Throwable throwable) { if (upstream == null) { UPSTREAM.compareAndSet(this, null, MissingSubscription.MISSING); errors.add(new IllegalStateException("onSubscribe was not called before onError in TestConsumer")); } if (throwable == null) { throwable = new NullPointerException("throwable == null in TestConsumer"); } errors.add(throwable); cdl.countDown(); } @Override public void onComplete() { if (upstream == null) { UPSTREAM.compareAndSet(this, null, MissingSubscription.MISSING); errors.add(new IllegalStateException("onSubscribe was not called before onComplete in TestConsumer")); } if (++completions > 1) { errors.add(new IllegalStateException("onComplete called again: " + completions)); } cdl.countDown(); } @Override public final void close() { SubscriptionHelper.cancel(this, UPSTREAM); } AssertionError fail(String message) { StringBuilder b = new StringBuilder(); b.append(message); b.append(" (") .append("items: ").append(items.size()) .append(", errors: ").append(errors.size()) .append(", completions: ").append(completions) .append(", latch: ").append(cdl.getCount()) ; if (timeout) { b.append(", timeout!"); } if (SubscriptionHelper.isCancelled(this, UPSTREAM)) { b.append(", cancelled!"); } if (tag != null) { b.append(", tag: ").append(tag); } b.append(")"); AssertionError ex = new AssertionError(b.toString()); int c = errors.size(); for (int i = 0; i < c; i++) { ex.addSuppressed(errors.get(i)); } return ex; } public final TestConsumer<T> requestFusionMode(int mode) { this.requestedFusionMode = mode; return this; } public final TestConsumer<T> assertFusionMode(int mode) { if (this.actualFusionMode != mode) { throw fail("Wrong fusion mode. Expected: " + fusionMode(mode) + ", Actual: " + fusionMode(actualFusionMode)); } return this; } static String fusionMode(int mode) { if (mode == FusedSubscription.NONE) { return "NONE"; } if (mode == FusedSubscription.SYNC) { return "SYNC"; } if (mode == FusedSubscription.ASYNC) { return "ASYNC"; } if (mode == -1) { return "Not supported"; } return "??? " + mode; } public final TestConsumer<T> awaitDone(long timeout, TimeUnit unit) { try { if (!cdl.await(timeout, unit)) { this.timeout = true; close(); } } catch (InterruptedException ex) { close(); throw fail("Wait interrupted"); } return this; } @SafeVarargs public final TestConsumer<T> assertValues(T... expected) { int c = items.size(); if (c != expected.length) { throw fail("Number of items differ. Expected: " + expected.length + ", Actual: " + c); } for (int i = 0; i < c; i++) { Object exp = expected[i]; Object act = items.get(i); if (!Objects.equals(exp, act)) { throw fail("Item #" + i + " differs. Expected: " + valueAndClass(exp) + ", Actual: " + valueAndClass(act)); } } return this; } public final TestConsumer<T> assertNoErrors() { if (!errors.isEmpty()) { throw fail("Error(s) present."); } return this; } public final TestConsumer<T> assertNotComplete() { if (completions != 0) { throw fail("Completed."); } return this; } public final TestConsumer<T> assertComplete() { int c = completions; if (c == 0) { throw fail("Not completed."); } if (c > 1) { throw fail("Multiple completions."); } return this; } public final TestConsumer<T> assertOnSubscribe() { if (upstream == null) { throw fail("onSubscribe not called."); } return this; } public final TestConsumer<T> assertError(Class<? extends Throwable> errorClass) { int c = errors.size(); if (c == 0) { throw fail("No errors."); } for (int i = 0; i < c; i++) { if (errorClass.isInstance(errors.get(i))) { if (c == 1) { return this; } throw fail("Error present but not alone."); } } throw fail("Error not present."); } static String valueAndClass(Object o) { if (o == null) { return "null"; } return o.toString() + " (" + o.getClass().getSimpleName() + ")"; } public final TestConsumer<T> assertErrorMessage(String message) { int c = errors.size(); if (c == 0) { throw fail("No errors."); } String msg = errors.get(0).getMessage(); if (Objects.equals(message, msg)) { if (c != 1) { throw fail("Message present but other errors as well."); } } else { throw fail("Messages differ. Expected: " + message + ", Actual: " + msg); } return this; } @SafeVarargs public final TestConsumer<T> assertResult(T... expected) { assertOnSubscribe(); assertValues(expected); assertNoErrors(); assertComplete(); return this; } @SafeVarargs public final TestConsumer<T> assertFailure(Class<? extends Throwable> errorClass, T... expected) { assertOnSubscribe(); assertValues(expected); assertError(errorClass); assertNotComplete(); return this; } @SafeVarargs public final TestConsumer<T> assertFailureAndMessage(Class<? extends Throwable> errorClass, String message, T... expected) { assertOnSubscribe(); assertValues(expected); assertError(errorClass); assertErrorMessage(message); assertNotComplete(); return this; } public final TestConsumer<T> assertEmpty() { assertOnSubscribe(); assertValues(); assertNoErrors(); assertNotComplete(); return this; } public final TestConsumer<T> awaitCount(int expected, long delayStep, long delayTotal) { long start = System.currentTimeMillis(); while (items.size() < expected && cdl.getCount() != 0 && start + delayTotal > System.currentTimeMillis()) { try { Thread.sleep(delayStep); } catch (InterruptedException ex) { close(); break; } } return this; } public final TestConsumer<T> withTag(String tag) { this.tag = tag; return this; } public final String getTag() { return tag; } public final TestConsumer<T> requestMore(long n) { if (actualFusionMode == FusedSubscription.SYNC) { throw fail("Requesting in SYNC fused mode is forbidden."); } SubscriptionHelper.deferredRequest(this, UPSTREAM, REQUESTED, n); return this; } public final TestConsumer<T> cancel() { close(); return this; } public final TestConsumer<T> assertInnerErrors(Consumer<List<Throwable>> consumer) { if (errors.size() == 0) { throw fail("No errors."); } List<Throwable> errorsList = new ArrayList<>(); errors.forEach(e -> { if (e instanceof CompositeThrowable) { errorsList.addAll(Arrays.asList(e.getSuppressed())); } else { errorsList.add(e); } }); consumer.accept(errorsList); return this; } public final TestConsumer<T> assertValueAt(int index, T item) { int s = items.size(); if (s <= index) { throw fail("Not enough elements: " + index); } T v = items.get(index); if (!Objects.equals(item, v)) { throw fail("Item @ " + index + " differs. Expected: " + valueAndClass(item) + ", Actual: " + valueAndClass(v)); } return this; } public final TestConsumer<T> assertNoTimeout() { if (timeout) { throw fail("Timeout."); } return this; } public final TestConsumer<T> assertValueCount(int expected) { int s = items.size(); if (s != expected) { throw fail("Number of items differ. Expected: " + expected + ", Actual: " + s); } return this; } public final TestConsumer<T> clear() { items.clear(); return this; } public final TestConsumer<T> assertValueSet(Collection<T> expected) { int s = items.size(); if (s != expected.size()) { throw fail("Number of items differ. Expected: " + expected.size() + ", Actual: " + s); } for (int i = 0; i < items.size(); i++) { T v = items.get(i); if (!expected.contains(v)) { throw fail("Item @ " + i + " not expected: " + valueAndClass(v)); } } return this; } public final List<T> values() { return items; } public final TestConsumer<T> forEach(BiConsumer<Integer, T> onItem) { int s = items.size(); for (int i = 0; i < s; i++) { onItem.accept(i, items.get(i)); } return this; } public final List<Throwable> errors() { return errors; } enum MissingSubscription implements Flow.Subscription { MISSING; @Override public void request(long n) { // deliberately no-op } @Override public void cancel() { // deliberately no-op } } }
/*================================================================================ Copyright (c) 2008 VMware, Inc. All Rights Reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of VMware, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ================================================================================*/ package com.vmware.vim25.mo; import java.net.MalformedURLException; import java.net.URL; import java.rmi.RemoteException; import java.util.Calendar; import com.vmware.vim25.*; import com.vmware.vim25.mo.util.*; import com.vmware.vim25.ws.WSClient; /** * The managed object class corresponding to the one defined in VI SDK API reference. * @author Steve JIN (sjin@vmware.com) */ public class ServiceInstance extends ManagedObject { private ServiceContent serviceContent = null; final static ManagedObjectReference SERVICE_INSTANCE_MOR; public final static String VIM25_NAMESPACE = " xmlns=\"urn:vim25\">"; public final static String VIM20_NAMESPACE = " xmlns=\"urn:vim2\">"; static { SERVICE_INSTANCE_MOR = new ManagedObjectReference(); SERVICE_INSTANCE_MOR.set_value("ServiceInstance"); SERVICE_INSTANCE_MOR.setType("ServiceInstance"); } public ServiceInstance(URL url, String username, String password) throws RemoteException, MalformedURLException { this(url, username, password, false); } public ServiceInstance(URL url, String username, String password, boolean ignoreCert) throws RemoteException, MalformedURLException { this(url, username, password, ignoreCert, VIM25_NAMESPACE); } public ServiceInstance(URL url, String username, String password, boolean ignoreCert, String namespace) throws RemoteException, MalformedURLException { this(url, username, password, ignoreCert, namespace, null, null); } public ServiceInstance(URL url, String username, String password, boolean ignoreCert, String namespace, Integer connectTimeoutMillis, Integer readTimeoutMillis) throws RemoteException, MalformedURLException { if(url == null || username==null) { throw new NullPointerException("None of url, username can be null."); } setMOR(SERVICE_INSTANCE_MOR); VimPortType vimService = new VimPortType(url.toString(), ignoreCert); vimService.getWsc().setVimNameSpace(namespace); if (connectTimeoutMillis != null) { vimService.getWsc().setConnectTimeout(connectTimeoutMillis); } if (readTimeoutMillis != null) { vimService.getWsc().setReadTimeout(readTimeoutMillis); } serviceContent = vimService.retrieveServiceContent(SERVICE_INSTANCE_MOR); vimService.getWsc().setSoapActionOnApiVersion(serviceContent.getAbout().getApiVersion()); setServerConnection(new ServerConnection(url, vimService, this)); // escape 5 special chars // http://en.wikipedia.org/wiki/List_of_XML_and_HTML_character_entity_references password = password.replace("&", "&amp;") .replace("<", "&lt;") .replace(">", "&gt;") .replace("\"", "&quot;") .replace("'", "&apos;"); UserSession userSession = getSessionManager().login(username, password, null); getServerConnection().setUserSession(userSession); } public ServiceInstance(URL url, String sessionStr, boolean ignoreCert) throws RemoteException, MalformedURLException { this(url, sessionStr, ignoreCert, VIM25_NAMESPACE); } public ServiceInstance(URL url, String sessionStr, boolean ignoreCert, String namespace) throws RemoteException, MalformedURLException { this(url, sessionStr, ignoreCert, namespace, null, null); } // sessionStr format: "vmware_soap_session=\"B3240D15-34DF-4BB8-B902-A844FDF42E85\"" public ServiceInstance(URL url, String sessionStr, boolean ignoreCert, String namespace, Integer connectTimeoutMillis, Integer readTimeoutMillis) throws RemoteException, MalformedURLException { if(url == null || sessionStr ==null) { throw new NullPointerException("None of url, session string can be null."); } setMOR(SERVICE_INSTANCE_MOR); VimPortType vimService = new VimPortType(url.toString(), ignoreCert); WSClient wsc = vimService.getWsc(); wsc.setCookie(sessionStr); wsc.setVimNameSpace(namespace); if (connectTimeoutMillis != null) { vimService.getWsc().setConnectTimeout(connectTimeoutMillis); } if (readTimeoutMillis != null) { vimService.getWsc().setReadTimeout(readTimeoutMillis); } serviceContent = vimService.retrieveServiceContent(SERVICE_INSTANCE_MOR); wsc.setSoapActionOnApiVersion(serviceContent.getAbout().getApiVersion()); setServerConnection(new ServerConnection(url, vimService, this)); UserSession userSession = (UserSession) getSessionManager().getCurrentProperty("currentSession"); getServerConnection().setUserSession(userSession); } public ServiceInstance(ServerConnection sc) { super(sc, SERVICE_INSTANCE_MOR); } public Calendar getServerClock() { return (Calendar) getCurrentProperty("serverClock"); } public Capability getCapability() { return (Capability) getCurrentProperty("capability"); } public ClusterProfileManager getClusterProfileManager() { return (ClusterProfileManager) createMO(getServiceContent().getClusterProfileManager()); } public Calendar currentTime() throws RuntimeFault, RemoteException { return getVimService().currentTime(getMOR()); } public Folder getRootFolder() { return new Folder(this.getServerConnection(), this.getServiceContent().getRootFolder()); } public HostVMotionCompatibility[] queryVMotionCompatibility(VirtualMachine vm, HostSystem[] hosts, String[] compatibility) throws RuntimeFault, RemoteException { if(vm==null || hosts==null) { throw new IllegalArgumentException("Neither vm or hosts can be null."); } return getVimService().queryVMotionCompatibility(getMOR(), vm.getMOR(), MorUtil.createMORs(hosts), compatibility); } public ProductComponentInfo[] retrieveProductComponents() throws RuntimeFault, RemoteException { return getVimService().retrieveProductComponents(getMOR()); } private ServiceContent retrieveServiceContent() throws RuntimeFault, RemoteException { return getVimService().retrieveServiceContent(getMOR()); } public Event[] validateMigration(VirtualMachine[] vms, VirtualMachinePowerState state, String[] testType , ResourcePool pool, HostSystem host) throws InvalidState, RuntimeFault, RemoteException { if(vms==null) { throw new IllegalArgumentException("vms must not be null."); } return getVimService().validateMigration(getMOR(), MorUtil.createMORs(vms), state, testType, pool==null? null: pool.getMOR(), host==null? null : host.getMOR()); } public ServiceContent getServiceContent() { if(serviceContent == null) { try { serviceContent = retrieveServiceContent(); } catch(Exception e) { System.out.println("Exceptoin: " + e); } } return serviceContent; } public AboutInfo getAboutInfo() { return getServiceContent().getAbout(); } public AlarmManager getAlarmManager() { return (AlarmManager) createMO(getServiceContent().getAlarmManager()); } public AuthorizationManager getAuthorizationManager() { return (AuthorizationManager) createMO(getServiceContent().getAuthorizationManager()); } public CustomFieldsManager getCustomFieldsManager() { return (CustomFieldsManager) createMO(getServiceContent().getCustomFieldsManager()); } public CustomizationSpecManager getCustomizationSpecManager() { return (CustomizationSpecManager) createMO(getServiceContent().getCustomizationSpecManager()); } public EventManager getEventManager() { return (EventManager) createMO(getServiceContent().getEventManager()); } public DiagnosticManager getDiagnosticManager() { return (DiagnosticManager) createMO(getServiceContent().getDiagnosticManager()); } public DistributedVirtualSwitchManager getDistributedVirtualSwitchManager() { return (DistributedVirtualSwitchManager) createMO(getServiceContent().getDvSwitchManager()); } public ExtensionManager getExtensionManager() { return (ExtensionManager) createMO(getServiceContent().getExtensionManager()); } public FileManager getFileManager() { return (FileManager) createMO(getServiceContent().getFileManager()); } public HostLocalAccountManager getAccountManager() { return (HostLocalAccountManager) createMO(getServiceContent().getAccountManager()); } public LicenseManager getLicenseManager() { return (LicenseManager) createMO(getServiceContent().getLicenseManager()); } public LocalizationManager getLocalizationManager() { return (LocalizationManager) createMO(getServiceContent().getLocalizationManager()); } public PerformanceManager getPerformanceManager() { return (PerformanceManager) createMO(getServiceContent().getPerfManager()); } public ProfileComplianceManager getProfileComplianceManager() { return (ProfileComplianceManager) createMO(getServiceContent().getComplianceManager()); } public PropertyCollector getPropertyCollector() { return (PropertyCollector) createMO(getServiceContent().getPropertyCollector()); } public ScheduledTaskManager getScheduledTaskManager() { return (ScheduledTaskManager) createMO(getServiceContent().getScheduledTaskManager()); } public SearchIndex getSearchIndex() { return (SearchIndex) createMO(getServiceContent().getSearchIndex()); } public SessionManager getSessionManager() { return (SessionManager) createMO(getServiceContent().getSessionManager()); } public HostSnmpSystem getHostSnmpSystem() { return (HostSnmpSystem) createMO(getServiceContent().getSnmpSystem()); } public HostProfileManager getHostProfileManager() { return (HostProfileManager) createMO(getServiceContent().getHostProfileManager()); } public IpPoolManager getIpPoolManager() { return (IpPoolManager) createMO(getServiceContent().getIpPoolManager()); } public VirtualMachineProvisioningChecker getVirtualMachineProvisioningChecker() { return (VirtualMachineProvisioningChecker) createMO(getServiceContent().getVmProvisioningChecker()); } public VirtualMachineCompatibilityChecker getVirtualMachineCompatibilityChecker() { return (VirtualMachineCompatibilityChecker) createMO(getServiceContent().getVmCompatibilityChecker()); } public TaskManager getTaskManager() { return (TaskManager) createMO(getServiceContent().getTaskManager()); } public UserDirectory getUserDirectory() { return (UserDirectory) createMO(getServiceContent().getUserDirectory()); } public ViewManager getViewManager() { return (ViewManager) createMO(getServiceContent().getViewManager()); } public VirtualDiskManager getVirtualDiskManager() { return (VirtualDiskManager) createMO(getServiceContent().getVirtualDiskManager()); } public OptionManager getOptionManager() { return (OptionManager) createMO(getServiceContent().getSetting()); } public OvfManager getOvfManager() { return (OvfManager) createMO(getServiceContent().getOvfManager()); } private ManagedObject createMO(ManagedObjectReference mor) { return MorUtil.createExactManagedObject(getServerConnection(), mor); } // TODO vim.VirtualizationManager is defined in servicecontent but no documentation there. Filed a bug already }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.operators.coordination; import org.apache.flink.core.testutils.OneShotLatch; import org.apache.flink.runtime.concurrent.ComponentMainThreadExecutor; import org.apache.flink.runtime.concurrent.ComponentMainThreadExecutorServiceAdapter; import org.apache.flink.runtime.concurrent.ManuallyTriggeredScheduledExecutorService; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.runtime.messages.Acknowledge; import org.apache.flink.runtime.operators.coordination.EventReceivingTasks.EventWithSubtask; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.TestLogger; import org.junit.After; import org.junit.Test; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import java.nio.ByteBuffer; import java.util.ArrayDeque; import java.util.Queue; import java.util.Random; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import java.util.function.Consumer; import java.util.function.Function; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; /** * A test that ensures the before/after conditions around event sending and checkpoint are met. * concurrency */ @SuppressWarnings("serial") public class OperatorCoordinatorHolderTest extends TestLogger { private final Consumer<Throwable> globalFailureHandler = (t) -> globalFailure = t; private Throwable globalFailure; @After public void checkNoGlobalFailure() throws Exception { if (globalFailure != null) { ExceptionUtils.rethrowException(globalFailure); } } // ------------------------------------------------------------------------ @Test public void checkpointFutureInitiallyNotDone() throws Exception { final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasks(); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); final CompletableFuture<byte[]> checkpointFuture = new CompletableFuture<>(); holder.checkpointCoordinator(1L, checkpointFuture); assertFalse(checkpointFuture.isDone()); } @Test public void completedCheckpointFuture() throws Exception { final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasks(); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); final byte[] testData = new byte[] {11, 22, 33, 44}; final CompletableFuture<byte[]> checkpointFuture = new CompletableFuture<>(); holder.checkpointCoordinator(9L, checkpointFuture); getCoordinator(holder).getLastTriggeredCheckpoint().complete(testData); assertTrue(checkpointFuture.isDone()); assertArrayEquals(testData, checkpointFuture.get()); } @Test public void eventsBeforeCheckpointFutureCompletionPassThrough() throws Exception { final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasks(); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); holder.checkpointCoordinator(1L, new CompletableFuture<>()); getCoordinator(holder).getSubtaskGateway(1).sendEvent(new TestOperatorEvent(1)); assertThat(tasks.getSentEventsForSubtask(1), contains(new TestOperatorEvent(1))); } @Test public void eventsAreBlockedAfterCheckpointFutureCompletes() throws Exception { final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasks(); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); triggerAndCompleteCheckpoint(holder, 10L); getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(1337)); assertEquals(0, tasks.getNumberOfSentEvents()); } @Test public void abortedCheckpointReleasesBlockedEvents() throws Exception { final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasks(); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); triggerAndCompleteCheckpoint(holder, 123L); getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(1337)); holder.abortCurrentTriggering(); assertThat(tasks.getSentEventsForSubtask(0), contains(new TestOperatorEvent(1337))); } @Test public void sourceBarrierInjectionReleasesBlockedEvents() throws Exception { final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasks(); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); triggerAndCompleteCheckpoint(holder, 1111L); getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(1337)); holder.afterSourceBarrierInjection(1111L); assertThat(tasks.getSentEventsForSubtask(0), contains(new TestOperatorEvent(1337))); } @Test public void restoreOpensValveEvents() throws Exception { final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasks(); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); triggerAndCompleteCheckpoint(holder, 1000L); holder.resetToCheckpoint(1L, new byte[0]); getCoordinator(holder).getSubtaskGateway(1).sendEvent(new TestOperatorEvent(999)); assertThat(tasks.getSentEventsForSubtask(1), contains(new TestOperatorEvent(999))); } @Test public void lateCompleteCheckpointFutureDoesNotBlockEvents() throws Exception { final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasks(); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); final CompletableFuture<byte[]> holderFuture = new CompletableFuture<>(); holder.checkpointCoordinator(1000L, holderFuture); final CompletableFuture<byte[]> future1 = getCoordinator(holder).getLastTriggeredCheckpoint(); holder.abortCurrentTriggering(); triggerAndCompleteCheckpoint(holder, 1010L); holder.afterSourceBarrierInjection(1010L); future1.complete(new byte[0]); getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(123)); assertThat(tasks.events, contains(new EventWithSubtask(new TestOperatorEvent(123), 0))); } @Test public void triggeringFailsIfOtherTriggeringInProgress() throws Exception { final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasks(); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); holder.checkpointCoordinator(11L, new CompletableFuture<>()); final CompletableFuture<byte[]> future = new CompletableFuture<>(); holder.checkpointCoordinator(12L, future); assertTrue(future.isCompletedExceptionally()); assertNotNull(globalFailure); globalFailure = null; } @Test public void takeCheckpointAfterSuccessfulCheckpoint() throws Exception { final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasks(); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(0)); triggerAndCompleteCheckpoint(holder, 22L); getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(1)); holder.afterSourceBarrierInjection(22L); getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(2)); triggerAndCompleteCheckpoint(holder, 23L); getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(3)); holder.afterSourceBarrierInjection(23L); assertThat( tasks.getSentEventsForSubtask(0), contains( new TestOperatorEvent(0), new TestOperatorEvent(1), new TestOperatorEvent(2), new TestOperatorEvent(3))); } @Test public void takeCheckpointAfterAbortedCheckpoint() throws Exception { final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasks(); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(0)); triggerAndCompleteCheckpoint(holder, 22L); getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(1)); holder.abortCurrentTriggering(); getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(2)); triggerAndCompleteCheckpoint(holder, 23L); getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(3)); holder.afterSourceBarrierInjection(23L); assertThat( tasks.getSentEventsForSubtask(0), contains( new TestOperatorEvent(0), new TestOperatorEvent(1), new TestOperatorEvent(2), new TestOperatorEvent(3))); } @Test public void testFailingJobMultipleTimesNotCauseCascadingJobFailure() throws Exception { Function<OperatorCoordinator.Context, OperatorCoordinator> coordinatorProvider = context -> new TestingOperatorCoordinator(context) { @Override public void handleEventFromOperator(int subtask, OperatorEvent event) { context.failJob(new RuntimeException("Artificial Exception")); } }; final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasks(); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, coordinatorProvider); holder.handleEventFromOperator(0, new TestOperatorEvent()); assertNotNull(globalFailure); final Throwable firstGlobalFailure = globalFailure; holder.handleEventFromOperator(1, new TestOperatorEvent()); assertEquals( "The global failure should be the same instance because the context" + "should only take the first request from the coordinator to fail the job.", firstGlobalFailure, globalFailure); holder.resetToCheckpoint(0L, new byte[0]); holder.handleEventFromOperator(1, new TestOperatorEvent()); assertNotEquals( "The new failures should be propagated after the coordinator " + "is reset.", firstGlobalFailure, globalFailure); // Reset global failure to null to make the after method check happy. globalFailure = null; } @Test public void checkpointCompletionWaitsForEventFutures() throws Exception { final CompletableFuture<Acknowledge> ackFuture = new CompletableFuture<>(); final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasksWithRpcResult(ackFuture); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(0)); final CompletableFuture<?> checkpointFuture = triggerAndCompleteCheckpoint(holder, 22L); assertFalse(checkpointFuture.isDone()); ackFuture.complete(Acknowledge.get()); assertTrue(checkpointFuture.isDone()); } /** * This test verifies that the order of Checkpoint Completion and Event Sending observed from * the outside matches that from within the OperatorCoordinator. * * <p>Extreme case 1: The coordinator immediately completes the checkpoint future and sends an * event directly after that. */ @Test public void verifyCheckpointEventOrderWhenCheckpointFutureCompletedImmediately() throws Exception { checkpointEventValueAtomicity(FutureCompletedInstantlyTestCoordinator::new); } /** * This test verifies that the order of Checkpoint Completion and Event Sending observed from * the outside matches that from within the OperatorCoordinator. * * <p>Extreme case 2: After the checkpoint triggering, the coordinator flushes a bunch of events * before completing the checkpoint future. */ @Test public void verifyCheckpointEventOrderWhenCheckpointFutureCompletesLate() throws Exception { checkpointEventValueAtomicity(FutureCompletedAfterSendingEventsCoordinator::new); } private void checkpointEventValueAtomicity( final Function<OperatorCoordinator.Context, OperatorCoordinator> coordinatorCtor) throws Exception { final ManuallyTriggeredScheduledExecutorService executor = new ManuallyTriggeredScheduledExecutorService(); final ComponentMainThreadExecutor mainThreadExecutor = new ComponentMainThreadExecutorServiceAdapter( (ScheduledExecutorService) executor, Thread.currentThread()); final EventReceivingTasks sender = EventReceivingTasks.createForRunningTasks(); final OperatorCoordinatorHolder holder = createCoordinatorHolder(sender, coordinatorCtor, mainThreadExecutor); // give the coordinator some time to emit some events. This isn't strictly necessary, // but it randomly alters the timings between the coordinator's thread (event sender) and // the main thread (holder). This should produce a flaky test if we missed some corner // cases. Thread.sleep(new Random().nextInt(10)); executor.triggerAll(); // trigger the checkpoint - this should also shut the valve as soon as the future is // completed final CompletableFuture<byte[]> checkpointFuture = new CompletableFuture<>(); holder.checkpointCoordinator(0L, checkpointFuture); executor.triggerAll(); // give the coordinator some time to emit some events. Same as above, this adds some // randomization Thread.sleep(new Random().nextInt(10)); holder.close(); executor.triggerAll(); assertTrue(checkpointFuture.isDone()); final int checkpointedNumber = bytesToInt(checkpointFuture.get()); assertEquals(checkpointedNumber, sender.getNumberOfSentEvents()); for (int i = 0; i < checkpointedNumber; i++) { assertEquals( i, ((TestOperatorEvent) sender.getAllSentEvents().get(i).event).getValue()); } } @Test public void testCheckpointFailsIfSendingEventFailedAfterTrigger() throws Exception { CompletableFuture<Acknowledge> eventSendingResult = new CompletableFuture<>(); final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasksWithRpcResult(eventSendingResult); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); // Send one event without finishing it. getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(0)); // Trigger one checkpoint. CompletableFuture<byte[]> checkpointResult = new CompletableFuture<>(); holder.checkpointCoordinator(1, checkpointResult); getCoordinator(holder).getLastTriggeredCheckpoint().complete(new byte[0]); // Fail the event sending. eventSendingResult.completeExceptionally(new RuntimeException("Artificial")); assertTrue(checkpointResult.isCompletedExceptionally()); } @Test public void testCheckpointFailsIfSendingEventFailedBeforeTrigger() throws Exception { final ReorderableManualExecutorService executor = new ReorderableManualExecutorService(); final ComponentMainThreadExecutor mainThreadExecutor = new ComponentMainThreadExecutorServiceAdapter( (ScheduledExecutorService) executor, Thread.currentThread()); CompletableFuture<Acknowledge> eventSendingResult = new CompletableFuture<>(); final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasksWithRpcResult(eventSendingResult); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new, mainThreadExecutor); // Send one event without finishing it. getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(0)); executor.triggerAll(); // Finish the event sending. This will insert one runnable that handles // failed events to the executor. And we delay this runnable to // simulates checkpoints triggered before the failure get processed. executor.setDelayNewRunnables(true); eventSendingResult.completeExceptionally(new RuntimeException("Artificial")); executor.setDelayNewRunnables(false); // Trigger one checkpoint, the checkpoint should not be confirmed // before the failure get triggered. CompletableFuture<byte[]> checkpointResult = new CompletableFuture<>(); holder.checkpointCoordinator(1, checkpointResult); executor.triggerAll(); getCoordinator(holder).getLastTriggeredCheckpoint().complete(new byte[0]); executor.triggerAll(); assertFalse(checkpointResult.isDone()); // Then the failure finally get processed by fail the corresponding tasks. executor.executeAllDelayedRunnables(); executor.triggerAll(); // The checkpoint would be finally confirmed. assertTrue(checkpointResult.isCompletedExceptionally()); } // ------------------------------------------------------------------------ // test actions // ------------------------------------------------------------------------ private CompletableFuture<byte[]> triggerAndCompleteCheckpoint( OperatorCoordinatorHolder holder, long checkpointId) throws Exception { final CompletableFuture<byte[]> future = new CompletableFuture<>(); holder.checkpointCoordinator(checkpointId, future); getCoordinator(holder).getLastTriggeredCheckpoint().complete(new byte[0]); return future; } // ------------------------------------------------------------------------ // miscellaneous helpers // ------------------------------------------------------------------------ static byte[] intToBytes(int value) { return ByteBuffer.allocate(4).putInt(value).array(); } static int bytesToInt(byte[] bytes) { return ByteBuffer.wrap(bytes).getInt(); } private static TestingOperatorCoordinator getCoordinator(OperatorCoordinatorHolder holder) { return (TestingOperatorCoordinator) holder.coordinator(); } private OperatorCoordinatorHolder createCoordinatorHolder( final SubtaskAccess.SubtaskAccessFactory eventTarget, final Function<OperatorCoordinator.Context, OperatorCoordinator> coordinatorCtor) throws Exception { return createCoordinatorHolder( eventTarget, coordinatorCtor, ComponentMainThreadExecutorServiceAdapter.forMainThread()); } private OperatorCoordinatorHolder createCoordinatorHolder( final SubtaskAccess.SubtaskAccessFactory eventTarget, final Function<OperatorCoordinator.Context, OperatorCoordinator> coordinatorCtor, final ComponentMainThreadExecutor mainThreadExecutor) throws Exception { final OperatorID opId = new OperatorID(); final OperatorCoordinator.Provider provider = new OperatorCoordinator.Provider() { @Override public OperatorID getOperatorId() { return opId; } @Override public OperatorCoordinator create(OperatorCoordinator.Context context) { return coordinatorCtor.apply(context); } }; final OperatorCoordinatorHolder holder = OperatorCoordinatorHolder.create( opId, provider, new CoordinatorStoreImpl(), "test-coordinator-name", getClass().getClassLoader(), 3, 1775, eventTarget); holder.lazyInitialize(globalFailureHandler, mainThreadExecutor); holder.start(); return holder; } private static class ReorderableManualExecutorService extends ManuallyTriggeredScheduledExecutorService { private boolean delayNewRunnables; private final Queue<Runnable> delayedRunnables = new ArrayDeque<>(); public void setDelayNewRunnables(boolean delayNewRunnables) { this.delayNewRunnables = delayNewRunnables; } @Override public void execute(@Nonnull Runnable command) { if (delayNewRunnables) { delayedRunnables.add(command); } else { super.execute(command); } } public void executeAllDelayedRunnables() { while (!delayedRunnables.isEmpty()) { super.execute(delayedRunnables.poll()); } } } // ------------------------------------------------------------------------ // test implementations // ------------------------------------------------------------------------ private static final class FutureCompletedInstantlyTestCoordinator extends CheckpointEventOrderTestBaseCoordinator { private final ReentrantLock lock = new ReentrantLock(true); private final Condition condition = lock.newCondition(); @Nullable @GuardedBy("lock") private CompletableFuture<byte[]> checkpoint; private int num; FutureCompletedInstantlyTestCoordinator(Context context) { super(context); } @Override public void checkpointCoordinator(long checkpointId, CompletableFuture<byte[]> result) throws Exception { // before returning from this method, we wait on a condition. // that way, we simulate a "context switch" just at the time when the // future would be returned and make the other thread complete the future and send an // event before this method returns lock.lock(); try { checkpoint = result; condition.await(); } finally { lock.unlock(); } } @Override protected void step() throws Exception { lock.lock(); try { // if there is a checkpoint to complete, we complete it and immediately // try to send another event, without releasing the lock. that way we // force the situation as if the checkpoint get completed and an event gets // sent while the triggering thread is stalled if (checkpoint != null) { checkpoint.complete(intToBytes(num)); checkpoint = null; } subtaskGateways[0].sendEvent(new TestOperatorEvent(num++)); condition.signalAll(); } finally { lock.unlock(); } Thread.sleep(2); } } private static final class FutureCompletedAfterSendingEventsCoordinator extends CheckpointEventOrderTestBaseCoordinator { private final OneShotLatch checkpointCompleted = new OneShotLatch(); @Nullable private volatile CompletableFuture<byte[]> checkpoint; private int num; FutureCompletedAfterSendingEventsCoordinator(Context context) { super(context); } @Override public void checkpointCoordinator(long checkpointId, CompletableFuture<byte[]> result) throws Exception { checkpoint = result; } @Override protected void step() throws Exception { Thread.sleep(2); subtaskGateways[0].sendEvent(new TestOperatorEvent(num++)); subtaskGateways[1].sendEvent(new TestOperatorEvent(num++)); subtaskGateways[2].sendEvent(new TestOperatorEvent(num++)); final CompletableFuture<byte[]> chkpnt = this.checkpoint; if (chkpnt != null) { chkpnt.complete(intToBytes(num)); checkpointCompleted.trigger(); this.checkpoint = null; } } @Override public void close() throws Exception { // we need to ensure that we don't close this before we have actually completed the // triggered checkpoint, to ensure the test conditions are robust. checkpointCompleted.await(); super.close(); } } private abstract static class CheckpointEventOrderTestBaseCoordinator implements OperatorCoordinator, Runnable { private final Thread coordinatorThread; protected final Context context; protected final SubtaskGateway[] subtaskGateways; private volatile boolean closed; CheckpointEventOrderTestBaseCoordinator(Context context) { this.context = context; this.subtaskGateways = new SubtaskGateway[context.currentParallelism()]; this.coordinatorThread = new Thread(this); } @Override public void start() throws Exception {} @Override public void close() throws Exception { closed = true; coordinatorThread.interrupt(); coordinatorThread.join(); } @Override public void handleEventFromOperator(int subtask, OperatorEvent event) {} @Override public void subtaskFailed(int subtask, @Nullable Throwable reason) {} @Override public void subtaskReset(int subtask, long checkpointId) {} @Override public void subtaskReady(int subtask, SubtaskGateway gateway) { subtaskGateways[subtask] = gateway; for (SubtaskGateway subtaskGateway : subtaskGateways) { if (subtaskGateway == null) { return; } } // start only once all tasks are ready coordinatorThread.start(); } @Override public abstract void checkpointCoordinator( long checkpointId, CompletableFuture<byte[]> result) throws Exception; @Override public void notifyCheckpointComplete(long checkpointId) {} @Override public void resetToCheckpoint(long checkpointId, byte[] checkpointData) throws Exception {} @Override public void run() { try { while (!closed) { step(); } } catch (Throwable t) { if (closed) { return; } // this should never happen, but just in case, print and crash the test //noinspection CallToPrintStackTrace t.printStackTrace(); System.exit(-1); } } protected abstract void step() throws Exception; } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.daemon.impl.analysis; import com.intellij.codeInsight.daemon.JavaErrorMessages; import com.intellij.codeInsight.daemon.QuickFixBundle; import com.intellij.codeInsight.daemon.impl.HighlightInfo; import com.intellij.codeInsight.daemon.impl.HighlightInfoType; import com.intellij.codeInsight.daemon.impl.quickfix.*; import com.intellij.codeInsight.intention.QuickFixFactory; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.Trinity; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.JarFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.PsiPackageAccessibilityStatement.Role; import com.intellij.psi.impl.light.LightJavaModule; import com.intellij.psi.search.FilenameIndex; import com.intellij.psi.util.ClassUtil; import com.intellij.psi.util.InheritanceUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.JBIterable; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.PropertyKey; import org.jetbrains.jps.model.java.JavaSourceRootType; import java.io.IOException; import java.io.InputStream; import java.util.Collection; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.function.Function; import java.util.jar.Attributes; import java.util.jar.JarFile; import java.util.jar.Manifest; import java.util.stream.Collectors; import java.util.stream.Stream; import static com.intellij.openapi.module.ModuleUtilCore.findModuleForFile; import static com.intellij.psi.SyntaxTraverser.psiTraverser; public class ModuleHighlightUtil { private static final Attributes.Name MULTI_RELEASE = new Attributes.Name("Multi-Release"); @Nullable static PsiJavaModule getModuleDescriptor(@Nullable VirtualFile file, @NotNull Project project) { if (file == null) return null; ProjectFileIndex index = ProjectFileIndex.SERVICE.getInstance(project); if (index.isInLibrary(file)) { VirtualFile root; if ((root = index.getClassRootForFile(file)) != null) { VirtualFile descriptorFile = root.findChild(PsiJavaModule.MODULE_INFO_CLS_FILE); if (descriptorFile == null) { VirtualFile alt = root.findFileByRelativePath("META-INF/versions/9/" + PsiJavaModule.MODULE_INFO_CLS_FILE); if (alt != null && isMultiReleaseJar(root)) { descriptorFile = alt; } } if (descriptorFile != null) { PsiFile psiFile = PsiManager.getInstance(project).findFile(descriptorFile); if (psiFile instanceof PsiJavaFile) { return ((PsiJavaFile)psiFile).getModuleDeclaration(); } } else if (root.getFileSystem() instanceof JarFileSystem && "jar".equalsIgnoreCase(root.getExtension())) { return LightJavaModule.getModule(PsiManager.getInstance(project), root); } } else if ((root = index.getSourceRootForFile(file)) != null) { VirtualFile descriptorFile = root.findChild(PsiJavaModule.MODULE_INFO_FILE); if (descriptorFile != null) { PsiFile psiFile = PsiManager.getInstance(project).findFile(descriptorFile); if (psiFile instanceof PsiJavaFile) { return ((PsiJavaFile)psiFile).getModuleDeclaration(); } } } } else { Module module = index.getModuleForFile(file); if (module != null) { JavaSourceRootType rootType = index.isInTestSourceContent(file) ? JavaSourceRootType.TEST_SOURCE : JavaSourceRootType.SOURCE; List<VirtualFile> files = ModuleRootManager.getInstance(module).getSourceRoots(rootType).stream() .map(root -> root.findChild(PsiJavaModule.MODULE_INFO_FILE)) .filter(Objects::nonNull) .collect(Collectors.toList()); if (files.size() == 1) { PsiFile psiFile = PsiManager.getInstance(project).findFile(files.get(0)); if (psiFile instanceof PsiJavaFile) { return ((PsiJavaFile)psiFile).getModuleDeclaration(); } } } } return null; } private static boolean isMultiReleaseJar(VirtualFile root) { if (root.getFileSystem() instanceof JarFileSystem) { VirtualFile manifest = root.findFileByRelativePath(JarFile.MANIFEST_NAME); if (manifest != null) { try (InputStream stream = manifest.getInputStream()) { return Boolean.valueOf(new Manifest(stream).getMainAttributes().getValue(MULTI_RELEASE)); } catch (IOException ignored) { } } } return false; } static HighlightInfo checkPackageStatement(@NotNull PsiPackageStatement statement, @NotNull PsiFile file, @Nullable PsiJavaModule module) { if (PsiUtil.isModuleFile(file)) { String message = JavaErrorMessages.message("module.no.package"); HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(statement).descriptionAndTooltip(message).create(); QuickFixAction.registerQuickFixAction(info, factory().createDeleteFix(statement)); return info; } if (module != null) { String packageName = statement.getPackageName(); if (packageName != null) { PsiJavaModule origin = JavaModuleGraphUtil.findOrigin(module, packageName); if (origin != null) { String message = JavaErrorMessages.message("module.conflicting.packages", packageName, origin.getName()); return HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(statement).descriptionAndTooltip(message).create(); } } } return null; } @Nullable static HighlightInfo checkFileName(@NotNull PsiJavaModule element, @NotNull PsiFile file) { if (!PsiJavaModule.MODULE_INFO_FILE.equals(file.getName())) { String message = JavaErrorMessages.message("module.file.wrong.name"); HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(range(element)).descriptionAndTooltip(message).create(); QuickFixAction.registerQuickFixAction(info, factory().createRenameFileFix(PsiJavaModule.MODULE_INFO_FILE)); return info; } return null; } @Nullable static HighlightInfo checkFileDuplicates(@NotNull PsiJavaModule element, @NotNull PsiFile file) { Module module = findModuleForFile(file); if (module != null) { Project project = file.getProject(); Collection<VirtualFile> others = FilenameIndex.getVirtualFilesByName(project, PsiJavaModule.MODULE_INFO_FILE, module.getModuleScope()); if (others.size() > 1) { String message = JavaErrorMessages.message("module.file.duplicate"); HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(range(element)).descriptionAndTooltip(message).create(); others.stream().map(f -> PsiManager.getInstance(project).findFile(f)).filter(f -> f != file).findFirst().ifPresent( duplicate -> QuickFixAction.registerQuickFixAction(info, new GoToSymbolFix(duplicate, JavaErrorMessages.message("module.open.duplicate.text"))) ); return info; } } return null; } @NotNull static List<HighlightInfo> checkDuplicateStatements(@NotNull PsiJavaModule module) { List<HighlightInfo> results = ContainerUtil.newSmartList(); checkDuplicateRefs(module.getRequires(), st -> st.getModuleName(), "module.duplicate.requires", results); checkDuplicateRefs(module.getExports(), st -> st.getPackageName(), "module.duplicate.exports", results); checkDuplicateRefs(module.getOpens(), st -> st.getPackageName(), "module.duplicate.opens", results); checkDuplicateRefs(module.getUses(), st -> qName(st.getClassReference()), "module.duplicate.uses", results); checkDuplicateRefs(module.getProvides(), st -> qName(st.getInterfaceReference()), "module.duplicate.provides", results); return results; } private static <T extends PsiStatement> void checkDuplicateRefs(Iterable<T> statements, Function<T, String> ref, @PropertyKey(resourceBundle = JavaErrorMessages.BUNDLE) String key, List<HighlightInfo> results) { Set<String> filter = ContainerUtil.newTroveSet(); for (T statement : statements) { String refText = ref.apply(statement); if (refText != null && !filter.add(refText)) { String message = JavaErrorMessages.message(key, refText); HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(statement).descriptionAndTooltip(message).create(); QuickFixAction.registerQuickFixAction(info, factory().createDeleteFix(statement)); QuickFixAction.registerQuickFixAction(info, MergeModuleStatementsFix.createFix(statement)); results.add(info); } } } @NotNull static List<HighlightInfo> checkUnusedServices(@NotNull PsiJavaModule module, @NotNull PsiFile file) { List<HighlightInfo> results = ContainerUtil.newSmartList(); Module host = findModuleForFile(file); if (host != null) { List<PsiProvidesStatement> provides = JBIterable.from(module.getProvides()).toList(); if (!provides.isEmpty()) { Set<String> exports = JBIterable.from(module.getExports()).map(PsiPackageAccessibilityStatement::getPackageName).filter(Objects::nonNull).toSet(); Set<String> uses = JBIterable.from(module.getUses()).map(st -> qName(st.getClassReference())).filter(Objects::nonNull).toSet(); for (PsiProvidesStatement statement : provides) { PsiJavaCodeReferenceElement ref = statement.getInterfaceReference(); if (ref != null) { PsiElement target = ref.resolve(); if (target instanceof PsiClass && findModuleForFile(target.getContainingFile()) == host) { String className = qName(ref), packageName = StringUtil.getPackageName(className); if (!exports.contains(packageName) && !uses.contains(className)) { String message = JavaErrorMessages.message("module.service.unused"); HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.WARNING).range(range(ref)).descriptionAndTooltip(message).create(); QuickFixAction.registerQuickFixAction(info, new AddExportsDirectiveFix(module, packageName, "")); QuickFixAction.registerQuickFixAction(info, new AddUsesDirectiveFix(module, className)); results.add(info); } } } } } } return results; } private static String qName(PsiJavaCodeReferenceElement ref) { return ref != null ? ref.getQualifiedName() : null; } @Nullable static HighlightInfo checkFileLocation(@NotNull PsiJavaModule element, @NotNull PsiFile file) { VirtualFile vFile = file.getVirtualFile(); if (vFile != null) { VirtualFile root = ProjectFileIndex.SERVICE.getInstance(file.getProject()).getSourceRootForFile(vFile); if (root != null && !root.equals(vFile.getParent())) { String message = JavaErrorMessages.message("module.file.wrong.location"); HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(range(element)).descriptionAndTooltip(message).create(); QuickFixAction.registerQuickFixAction(info, new MoveFileFix(vFile, root, QuickFixBundle.message("move.file.to.source.root.text"))); return info; } } return null; } @Nullable static HighlightInfo checkModuleReference(@Nullable PsiJavaModuleReferenceElement refElement, @NotNull PsiJavaModule container) { if (refElement != null) { PsiPolyVariantReference ref = refElement.getReference(); assert ref != null : refElement.getParent(); PsiElement target = ref.resolve(); if (!(target instanceof PsiJavaModule)) { return moduleResolveError(refElement, ref); } else if (target == container) { String message = JavaErrorMessages.message("module.cyclic.dependence", container.getName()); return HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(refElement).descriptionAndTooltip(message).create(); } else { Collection<PsiJavaModule> cycle = JavaModuleGraphUtil.findCycle((PsiJavaModule)target); if (cycle != null && cycle.contains(container)) { Stream<String> stream = cycle.stream().map(PsiJavaModule::getName); if (ApplicationManager.getApplication().isUnitTestMode()) stream = stream.sorted(); String message = JavaErrorMessages.message("module.cyclic.dependence", stream.collect(Collectors.joining(", "))); return HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(refElement).descriptionAndTooltip(message).create(); } } } return null; } @Nullable static HighlightInfo checkHostModuleStrength(@NotNull PsiPackageAccessibilityStatement statement) { PsiElement parent; if (statement.getRole() == Role.OPENS && (parent = statement.getParent()) instanceof PsiJavaModule && ((PsiJavaModule)parent).hasModifierProperty(PsiModifier.OPEN)) { String message = JavaErrorMessages.message("module.opens.in.weak.module"); HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(statement).descriptionAndTooltip(message).create(); QuickFixAction.registerQuickFixAction(info, factory().createModifierListFix((PsiModifierListOwner)parent, PsiModifier.OPEN, false, false)); QuickFixAction.registerQuickFixAction(info, factory().createDeleteFix(statement)); return info; } return null; } @Nullable static HighlightInfo checkPackageReference(@NotNull PsiPackageAccessibilityStatement statement, @NotNull PsiFile file) { PsiJavaCodeReferenceElement refElement = statement.getPackageReference(); if (refElement != null) { Module module = findModuleForFile(file); if (module != null) { PsiElement target = refElement.resolve(); PsiDirectory[] directories = target instanceof PsiPackage ? ((PsiPackage)target).getDirectories(module.getModuleScope(false)) : null; String packageName = statement.getPackageName(); HighlightInfoType type = statement.getRole() == Role.OPENS ? HighlightInfoType.WARNING : HighlightInfoType.ERROR; if (directories == null || directories.length == 0) { String message = JavaErrorMessages.message("package.not.found", packageName); HighlightInfo info = HighlightInfo.newHighlightInfo(type).range(refElement).descriptionAndTooltip(message).create(); QuickFixAction.registerQuickFixAction(info, factory().createCreateClassInPackageInModuleFix(module, packageName)); return info; } if (packageName != null && PsiUtil.isPackageEmpty(directories, packageName)) { String message = JavaErrorMessages.message("package.is.empty", packageName); HighlightInfo info = HighlightInfo.newHighlightInfo(type).range(refElement).descriptionAndTooltip(message).create(); QuickFixAction.registerQuickFixAction(info, factory().createCreateClassInPackageInModuleFix(module, packageName)); return info; } } } return null; } @NotNull static List<HighlightInfo> checkPackageAccessTargets(@NotNull PsiPackageAccessibilityStatement statement) { List<HighlightInfo> results = ContainerUtil.newSmartList(); Set<String> targets = ContainerUtil.newTroveSet(); for (PsiJavaModuleReferenceElement refElement : statement.getModuleReferences()) { String refText = refElement.getReferenceText(); PsiPolyVariantReference ref = refElement.getReference(); assert ref != null : statement; if (!targets.add(refText)) { boolean exports = statement.getRole() == Role.EXPORTS; String message = JavaErrorMessages.message(exports ? "module.duplicate.exports.target" : "module.duplicate.opens.target", refText); HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(refElement).descriptionAndTooltip(message).create(); QuickFixAction.registerQuickFixAction(info, factory().createDeleteFix(refElement, QuickFixBundle.message("delete.reference.fix.text"))); results.add(info); } else if (ref.multiResolve(true).length == 0) { String message = JavaErrorMessages.message("module.not.found", refElement.getReferenceText()); results.add(HighlightInfo.newHighlightInfo(HighlightInfoType.WARNING).range(refElement).descriptionAndTooltip(message).create()); } } return results; } @Nullable static HighlightInfo checkServiceReference(@Nullable PsiJavaCodeReferenceElement refElement) { if (refElement != null) { PsiElement target = refElement.resolve(); if (!(target instanceof PsiClass)) { String message = JavaErrorMessages.message("cannot.resolve.symbol", refElement.getReferenceName()); return HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(range(refElement)).descriptionAndTooltip(message).create(); } else if (((PsiClass)target).isEnum()) { String message = JavaErrorMessages.message("module.service.enum", ((PsiClass)target).getName()); return HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(range(refElement)).descriptionAndTooltip(message).create(); } } return null; } @Nullable static List<HighlightInfo> checkServiceImplementations(@NotNull PsiProvidesStatement statement, @NotNull PsiFile file) { PsiReferenceList implRefList = statement.getImplementationList(); if (implRefList == null) return null; List<HighlightInfo> results = ContainerUtil.newSmartList(); PsiJavaCodeReferenceElement intRef = statement.getInterfaceReference(); PsiElement intTarget = intRef != null ? intRef.resolve() : null; Set<String> filter = ContainerUtil.newTroveSet(); for (PsiJavaCodeReferenceElement implRef : implRefList.getReferenceElements()) { String refText = implRef.getQualifiedName(); if (!filter.add(refText)) { String message = JavaErrorMessages.message("module.duplicate.impl", refText); HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(implRef).descriptionAndTooltip(message).create(); QuickFixAction.registerQuickFixAction(info, factory().createDeleteFix(implRef, QuickFixBundle.message("delete.reference.fix.text"))); results.add(info); continue; } if (!(intTarget instanceof PsiClass)) continue; PsiElement implTarget = implRef.resolve(); if (implTarget instanceof PsiClass) { PsiClass implClass = (PsiClass)implTarget; if (findModuleForFile(file) != findModuleForFile(implClass.getContainingFile())) { String message = JavaErrorMessages.message("module.service.alien"); results.add(HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(range(implRef)).descriptionAndTooltip(message).create()); } PsiMethod provider = ContainerUtil.find( implClass.findMethodsByName("provider", false), m -> m.hasModifierProperty(PsiModifier.PUBLIC) && m.hasModifierProperty(PsiModifier.STATIC) && m.getParameterList().isEmpty()); if (provider != null) { PsiType type = provider.getReturnType(); PsiClass typeClass = type instanceof PsiClassType ? ((PsiClassType)type).resolve() : null; if (!InheritanceUtil.isInheritorOrSelf(typeClass, (PsiClass)intTarget, true)) { String message = JavaErrorMessages.message("module.service.provider.type", implClass.getName()); results.add(HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(range(implRef)).descriptionAndTooltip(message).create()); } } else if (InheritanceUtil.isInheritorOrSelf(implClass, (PsiClass)intTarget, true)) { if (implClass.hasModifierProperty(PsiModifier.ABSTRACT)) { String message = JavaErrorMessages.message("module.service.abstract", implClass.getName()); results.add(HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(range(implRef)).descriptionAndTooltip(message).create()); } else if (!(ClassUtil.isTopLevelClass(implClass) || implClass.hasModifierProperty(PsiModifier.STATIC))) { String message = JavaErrorMessages.message("module.service.inner", implClass.getName()); results.add(HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(range(implRef)).descriptionAndTooltip(message).create()); } else if (!PsiUtil.hasDefaultConstructor(implClass)) { String message = JavaErrorMessages.message("module.service.no.ctor", implClass.getName()); results.add(HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(range(implRef)).descriptionAndTooltip(message).create()); } } else { String message = JavaErrorMessages.message("module.service.impl"); results.add(HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(range(implRef)).descriptionAndTooltip(message).create()); } } } return results; } @Nullable static HighlightInfo checkClashingReads(@NotNull PsiJavaModule module) { Trinity<String, PsiJavaModule, PsiJavaModule> conflict = JavaModuleGraphUtil.findConflict(module); if (conflict != null) { String message = JavaErrorMessages.message( "module.conflicting.reads", module.getName(), conflict.first, conflict.second.getName(), conflict.third.getName()); return HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(range(module)).descriptionAndTooltip(message).create(); } return null; } @Nullable static List<HighlightInfo> checkModifiers(@NotNull PsiRequiresStatement statement) { PsiModifierList modList = statement.getModifierList(); if (modList != null && PsiJavaModule.JAVA_BASE.equals(statement.getModuleName())) { return psiTraverser().children(modList) .filter(PsiKeyword.class) .map(keyword -> { @PsiModifier.ModifierConstant String modifier = keyword.getText(); String message = JavaErrorMessages.message("modifier.not.allowed", modifier); HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(keyword).descriptionAndTooltip(message).create(); QuickFixAction.registerQuickFixAction(info, factory().createModifierListFix(modList, modifier, false, false)); return info; }).toList(); } return null; } private static HighlightInfo moduleResolveError(PsiJavaModuleReferenceElement refElement, PsiPolyVariantReference ref) { if (ref.multiResolve(true).length == 0) { String message = JavaErrorMessages.message("module.not.found", refElement.getReferenceText()); return HighlightInfo.newHighlightInfo(HighlightInfoType.WRONG_REF).range(refElement).descriptionAndTooltip(message).create(); } else if (ref.multiResolve(false).length > 1) { String message = JavaErrorMessages.message("module.ambiguous", refElement.getReferenceText()); return HighlightInfo.newHighlightInfo(HighlightInfoType.WARNING).range(refElement).descriptionAndTooltip(message).create(); } else { String message = JavaErrorMessages.message("module.not.on.path", refElement.getReferenceText()); HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.WRONG_REF).range(refElement).descriptionAndTooltip(message).create(); factory().registerOrderEntryFixes(new QuickFixActionRegistrarImpl(info), ref); return info; } } private static QuickFixFactory factory() { return QuickFixFactory.getInstance(); } private static TextRange range(PsiJavaModule module) { PsiKeyword kw = PsiTreeUtil.getChildOfType(module, PsiKeyword.class); return new TextRange(kw != null ? kw.getTextOffset() : module.getTextOffset(), module.getNameIdentifier().getTextRange().getEndOffset()); } private static PsiElement range(PsiJavaCodeReferenceElement refElement) { return ObjectUtils.notNull(refElement.getReferenceNameElement(), refElement); } }
/** * */ package com.crm.subscriber.bean; import java.io.Serializable; import java.util.Date; import com.crm.kernel.message.Constants; /** * @author ThangPV * */ public class SubscriberOrder implements Serializable { // PK fields --> /** * */ private static final long serialVersionUID = -4947692391004700878L; private long orderId = Constants.DEFAULT_ID; // Audit fields --> private long userId = Constants.DEFAULT_ID; private String userName = ""; private Date createDate = null; private Date modifiedDate = null; // Other fields --> private long merchantId = Constants.DEFAULT_ID; private String orderType = ""; private Date orderDate = null; private String orderNo = ""; private Date cycleDate = null; private long subscriberId = Constants.DEFAULT_ID; private long subProductId = Constants.DEFAULT_ID; private int subscriberType = Constants.PREPAID_SUB_TYPE; private String isdn = ""; private String shipTo = ""; private long productId = Constants.DEFAULT_ID; private double offerPrice = 0; private double price = 0; private int quantity = 1; private double discount = 0; private double amount = 0; private double score = 0; private String currency = ""; private int status = Constants.ORDER_STATUS_PENDING; private String cause = ""; private String description = ""; public long getOrderId() { return orderId; } public void setOrderId(long orderId) { this.orderId = orderId; } public long getUserId() { return userId; } public void setUserId(long userId) { this.userId = userId; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public Date getCreateDate() { return createDate; } public void setCreateDate(Date createDate) { this.createDate = createDate; } public Date getModifiedDate() { return modifiedDate; } public void setModifiedDate(Date modifiedDate) { this.modifiedDate = modifiedDate; } public long getMerchantId() { return merchantId; } public void setMerchantId(long merchantId) { this.merchantId = merchantId; } public String getOrderType() { return orderType; } public void setOrderType(String orderType) { this.orderType = orderType; } public Date getOrderDate() { return orderDate; } public void setOrderDate(Date orderDate) { this.orderDate = orderDate; } public String getOrderNo() { return orderNo; } public void setOrderNo(String orderNo) { this.orderNo = orderNo; } public Date getCycleDate() { return cycleDate; } public void setCycleDate(Date cycleDate) { this.cycleDate = cycleDate; } public long getSubscriberId() { return subscriberId; } public void setSubscriberId(long subscriberId) { this.subscriberId = subscriberId; } public long getSubProductId() { return subProductId; } public void setSubProductId(long subProductId) { this.subProductId = subProductId; } public int getSubscriberType() { return subscriberType; } public void setSubscriberType(int subscriberType) { this.subscriberType = subscriberType; } public String getIsdn() { return isdn; } public void setIsdn(String isdn) { this.isdn = isdn; } public String getShipTo() { return shipTo; } public void setShipTo(String destAddress) { this.shipTo = destAddress; } public long getProductId() { return productId; } public void setProductId(long productId) { this.productId = productId; } public double getOfferPrice() { return offerPrice; } public void setOfferPrice(double offerPrice) { this.offerPrice = offerPrice; } public double getPrice() { return price; } public void setPrice(double price) { this.price = price; } public int getQuantity() { return quantity; } public void setQuantity(int quantity) { this.quantity = quantity; } public double getDiscount() { return discount; } public void setDiscount(double discount) { this.discount = discount; } public double getAmount() { return amount; } public void setAmount(double amount) { this.amount = amount; } public double getScore() { return score; } public void setScore(double score) { this.score = score; } public String getCurrency() { return currency; } public void setCurrency(String currency) { this.currency = currency; } public String getCause() { return cause; } public void setCause(String cause) { this.cause = cause; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public int getStatus() { return status; } public void setStatus(int status) { this.status = status; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl.test; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.Callable; import org.apache.ibatis.logging.LogFactory; import org.camunda.bpm.engine.AuthorizationService; import org.camunda.bpm.engine.CaseService; import org.camunda.bpm.engine.DecisionService; import org.camunda.bpm.engine.ExternalTaskService; import org.camunda.bpm.engine.FilterService; import org.camunda.bpm.engine.FormService; import org.camunda.bpm.engine.HistoryService; import org.camunda.bpm.engine.IdentityService; import org.camunda.bpm.engine.ManagementService; import org.camunda.bpm.engine.ProcessEngine; import org.camunda.bpm.engine.ProcessEngineException; import org.camunda.bpm.engine.RepositoryService; import org.camunda.bpm.engine.RuntimeService; import org.camunda.bpm.engine.TaskService; import org.camunda.bpm.engine.impl.ProcessEngineImpl; import org.camunda.bpm.engine.impl.cfg.ProcessEngineConfigurationImpl; import org.camunda.bpm.engine.impl.interceptor.Command; import org.camunda.bpm.engine.impl.interceptor.CommandContext; import org.camunda.bpm.engine.impl.jobexecutor.JobExecutor; import org.camunda.bpm.engine.impl.persistence.entity.JobEntity; import org.camunda.bpm.engine.impl.util.ClockUtil; import org.camunda.bpm.engine.repository.DeploymentBuilder; import org.camunda.bpm.engine.runtime.ActivityInstance; import org.camunda.bpm.engine.runtime.CaseInstance; import org.camunda.bpm.engine.runtime.Job; import org.camunda.bpm.engine.runtime.ProcessInstance; import org.camunda.bpm.model.bpmn.BpmnModelInstance; import org.slf4j.Logger; import junit.framework.AssertionFailedError; /** * @author Tom Baeyens */ public abstract class AbstractProcessEngineTestCase extends PvmTestCase { private final static Logger LOG = TestLogger.TEST_LOGGER.getLogger(); static { // this ensures that mybatis uses slf4j logging LogFactory.useSlf4jLogging(); } protected ProcessEngine processEngine; protected String deploymentId; protected Set<String> deploymentIds = new HashSet<String>(); protected Throwable exception; protected ProcessEngineConfigurationImpl processEngineConfiguration; protected RepositoryService repositoryService; protected RuntimeService runtimeService; protected TaskService taskService; protected FormService formService; protected HistoryService historyService; protected IdentityService identityService; protected ManagementService managementService; protected AuthorizationService authorizationService; protected CaseService caseService; protected FilterService filterService; protected ExternalTaskService externalTaskService; protected DecisionService decisionService; protected abstract void initializeProcessEngine(); // Default: do nothing protected void closeDownProcessEngine() { } @Override public void runBare() throws Throwable { initializeProcessEngine(); if (repositoryService==null) { initializeServices(); } try { boolean hasRequiredHistoryLevel = TestHelper.annotationRequiredHistoryLevelCheck(processEngine, getClass(), getName()); // ignore test case when current history level is too low if (hasRequiredHistoryLevel) { deploymentId = TestHelper.annotationDeploymentSetUp(processEngine, getClass(), getName()); super.runBare(); } } catch (AssertionFailedError e) { LOG.error("ASSERTION FAILED: " + e, e); exception = e; throw e; } catch (Throwable e) { LOG.error("EXCEPTION: " + e, e); exception = e; throw e; } finally { identityService.clearAuthentication(); processEngineConfiguration.setTenantCheckEnabled(true); deleteDeployments(); deleteHistoryCleanupJob(); // only fail if no test failure was recorded TestHelper.assertAndEnsureCleanDbAndCache(processEngine, exception == null); TestHelper.resetIdGenerator(processEngineConfiguration); ClockUtil.reset(); // Can't do this in the teardown, as the teardown will be called as part // of the super.runBare closeDownProcessEngine(); clearServiceReferences(); } } private void deleteHistoryCleanupJob() { final Job job = historyService.findHistoryCleanupJob(); if (job != null) { processEngineConfiguration.getCommandExecutorTxRequired().execute(new Command<Void>() { public Void execute(CommandContext commandContext) { commandContext.getJobManager().deleteJob((JobEntity) job); return null; } }); } } protected void deleteDeployments() { if(deploymentId != null) { deploymentIds.add(deploymentId); } for(String deploymentId : deploymentIds) { TestHelper.annotationDeploymentTearDown(processEngine, deploymentId, getClass(), getName()); } deploymentId = null; deploymentIds.clear(); } protected void initializeServices() { processEngineConfiguration = ((ProcessEngineImpl) processEngine).getProcessEngineConfiguration(); repositoryService = processEngine.getRepositoryService(); runtimeService = processEngine.getRuntimeService(); taskService = processEngine.getTaskService(); formService = processEngine.getFormService(); historyService = processEngine.getHistoryService(); identityService = processEngine.getIdentityService(); managementService = processEngine.getManagementService(); authorizationService = processEngine.getAuthorizationService(); caseService = processEngine.getCaseService(); filterService = processEngine.getFilterService(); externalTaskService = processEngine.getExternalTaskService(); decisionService = processEngine.getDecisionService(); } protected void clearServiceReferences() { processEngineConfiguration = null; repositoryService = null; runtimeService = null; taskService = null; formService = null; historyService = null; identityService = null; managementService = null; authorizationService = null; caseService = null; filterService = null; externalTaskService = null; decisionService = null; } public void assertProcessEnded(final String processInstanceId) { ProcessInstance processInstance = processEngine .getRuntimeService() .createProcessInstanceQuery() .processInstanceId(processInstanceId) .singleResult(); if (processInstance!=null) { throw new AssertionFailedError("Expected finished process instance '"+processInstanceId+"' but it was still in the db"); } } public void assertProcessNotEnded(final String processInstanceId) { ProcessInstance processInstance = processEngine .getRuntimeService() .createProcessInstanceQuery() .processInstanceId(processInstanceId) .singleResult(); if (processInstance==null) { throw new AssertionFailedError("Expected process instance '"+processInstanceId+"' to be still active but it was not in the db"); } } public void assertCaseEnded(final String caseInstanceId) { CaseInstance caseInstance = processEngine .getCaseService() .createCaseInstanceQuery() .caseInstanceId(caseInstanceId) .singleResult(); if (caseInstance!=null) { throw new AssertionFailedError("Expected finished case instance '"+caseInstanceId+"' but it was still in the db"); } } @Deprecated public void waitForJobExecutorToProcessAllJobs(long maxMillisToWait, long intervalMillis) { waitForJobExecutorToProcessAllJobs(maxMillisToWait); } public void waitForJobExecutorToProcessAllJobs(long maxMillisToWait) { JobExecutor jobExecutor = processEngineConfiguration.getJobExecutor(); jobExecutor.start(); long intervalMillis = 1000; int jobExecutorWaitTime = jobExecutor.getWaitTimeInMillis() * 2; if(maxMillisToWait < jobExecutorWaitTime) { maxMillisToWait = jobExecutorWaitTime; } try { Timer timer = new Timer(); InterruptTask task = new InterruptTask(Thread.currentThread()); timer.schedule(task, maxMillisToWait); boolean areJobsAvailable = true; try { while (areJobsAvailable && !task.isTimeLimitExceeded()) { Thread.sleep(intervalMillis); try { areJobsAvailable = areJobsAvailable(); } catch(Throwable t) { // Ignore, possible that exception occurs due to locking/updating of table on MSSQL when // isolation level doesn't allow READ of the table } } } catch (InterruptedException e) { } finally { timer.cancel(); } if (areJobsAvailable) { throw new ProcessEngineException("time limit of " + maxMillisToWait + " was exceeded"); } } finally { jobExecutor.shutdown(); } } @Deprecated public void waitForJobExecutorOnCondition(long maxMillisToWait, long intervalMillis, Callable<Boolean> condition) { waitForJobExecutorOnCondition(maxMillisToWait, condition); } public void waitForJobExecutorOnCondition(long maxMillisToWait, Callable<Boolean> condition) { JobExecutor jobExecutor = processEngineConfiguration.getJobExecutor(); jobExecutor.start(); long intervalMillis = 500; if(maxMillisToWait < (jobExecutor.getWaitTimeInMillis()*2)) { maxMillisToWait = (jobExecutor.getWaitTimeInMillis()*2); } try { Timer timer = new Timer(); InterruptTask task = new InterruptTask(Thread.currentThread()); timer.schedule(task, maxMillisToWait); boolean conditionIsViolated = true; try { while (conditionIsViolated && !task.isTimeLimitExceeded()) { Thread.sleep(intervalMillis); conditionIsViolated = !condition.call(); } } catch (InterruptedException e) { } catch (Exception e) { throw new ProcessEngineException("Exception while waiting on condition: "+e.getMessage(), e); } finally { timer.cancel(); } if (conditionIsViolated) { throw new ProcessEngineException("time limit of " + maxMillisToWait + " was exceeded"); } } finally { jobExecutor.shutdown(); } } /** * Execute all available jobs recursively till no more jobs found. */ public void executeAvailableJobs() { executeAvailableJobs(0, Integer.MAX_VALUE, true); } /** * Execute all available jobs recursively till no more jobs found or the number of executions is higher than expected. * * @param expectedExecutions number of expected job executions * * @throws AssertionFailedError when execute less or more jobs than expected * * @see #executeAvailableJobs() */ public void executeAvailableJobs(int expectedExecutions){ executeAvailableJobs(0, expectedExecutions, false); } private void executeAvailableJobs(int jobsExecuted, int expectedExecutions, boolean ignoreLessExecutions) { List<Job> jobs = managementService.createJobQuery().withRetriesLeft().list(); if (jobs.isEmpty()) { assertTrue("executed less jobs than expected. expected <" + expectedExecutions + "> actual <" + jobsExecuted + ">", jobsExecuted == expectedExecutions || ignoreLessExecutions); return; } for (Job job : jobs) { try { managementService.executeJob(job.getId()); jobsExecuted += 1; } catch (Exception e) {} } assertTrue("executed more jobs than expected. expected <" + expectedExecutions + "> actual <" + jobsExecuted + ">", jobsExecuted <= expectedExecutions); executeAvailableJobs(jobsExecuted, expectedExecutions, ignoreLessExecutions); } public boolean areJobsAvailable() { List<Job> list = managementService.createJobQuery().list(); for (Job job : list) { if (!job.isSuspended() && job.getRetries() > 0 && (job.getDuedate() == null || ClockUtil.getCurrentTime().after(job.getDuedate()))) { return true; } } return false; } private static class InterruptTask extends TimerTask { protected boolean timeLimitExceeded = false; protected Thread thread; public InterruptTask(Thread thread) { this.thread = thread; } public boolean isTimeLimitExceeded() { return timeLimitExceeded; } @Override public void run() { timeLimitExceeded = true; thread.interrupt(); } } @Deprecated protected List<ActivityInstance> getInstancesForActivitiyId(ActivityInstance activityInstance, String activityId) { return getInstancesForActivityId(activityInstance, activityId); } protected List<ActivityInstance> getInstancesForActivityId(ActivityInstance activityInstance, String activityId) { List<ActivityInstance> result = new ArrayList<ActivityInstance>(); if(activityInstance.getActivityId().equals(activityId)) { result.add(activityInstance); } for (ActivityInstance childInstance : activityInstance.getChildActivityInstances()) { result.addAll(getInstancesForActivityId(childInstance, activityId)); } return result; } protected void runAsUser(String userId, List<String> groupIds, Runnable r) { try { identityService.setAuthenticatedUserId(userId); processEngineConfiguration.setAuthorizationEnabled(true); r.run(); } finally { identityService.setAuthenticatedUserId(null); processEngineConfiguration.setAuthorizationEnabled(false); } } protected String deployment(BpmnModelInstance... bpmnModelInstances) { DeploymentBuilder deploymentBuilder = repositoryService.createDeployment(); return deployment(deploymentBuilder, bpmnModelInstances); } protected String deployment(String... resources) { DeploymentBuilder deploymentBuilder = repositoryService.createDeployment(); return deployment(deploymentBuilder, resources); } protected String deploymentForTenant(String tenantId, BpmnModelInstance... bpmnModelInstances) { DeploymentBuilder deploymentBuilder = repositoryService.createDeployment().tenantId(tenantId); return deployment(deploymentBuilder, bpmnModelInstances); } protected String deploymentForTenant(String tenantId, String... resources) { DeploymentBuilder deploymentBuilder = repositoryService.createDeployment().tenantId(tenantId); return deployment(deploymentBuilder, resources); } protected String deploymentForTenant(String tenantId, String classpathResource, BpmnModelInstance modelInstance) { return deployment(repositoryService.createDeployment() .tenantId(tenantId) .addClasspathResource(classpathResource), modelInstance); } protected String deployment(DeploymentBuilder deploymentBuilder, BpmnModelInstance... bpmnModelInstances) { for (int i = 0; i < bpmnModelInstances.length; i++) { BpmnModelInstance bpmnModelInstance = bpmnModelInstances[i]; deploymentBuilder.addModelInstance("testProcess-"+i+".bpmn", bpmnModelInstance); } return deploymentWithBuilder(deploymentBuilder); } protected String deployment(DeploymentBuilder deploymentBuilder, String... resources) { for (int i = 0; i < resources.length; i++) { deploymentBuilder.addClasspathResource(resources[i]); } return deploymentWithBuilder(deploymentBuilder); } protected String deploymentWithBuilder(DeploymentBuilder builder) { deploymentId = builder.deploy().getId(); deploymentIds.add(deploymentId); return deploymentId; } }
/* * Copyright 2012 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.workbench.screens.guided.dtable.client.widget.table.model.synchronizers.impl; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.drools.workbench.models.datamodel.rule.Attribute; import org.drools.workbench.models.guided.dtable.shared.model.AttributeCol52; import org.drools.workbench.models.guided.dtable.shared.model.BaseColumnFieldDiff; import org.drools.workbench.screens.guided.dtable.client.widget.table.columns.BaseSingletonDOMElementUiColumn; import org.drools.workbench.screens.guided.dtable.client.widget.table.columns.BooleanUiColumn; import org.drools.workbench.screens.guided.dtable.client.widget.table.columns.IntegerUiColumn; import org.drools.workbench.screens.guided.dtable.client.widget.table.columns.SalienceUiColumn; import org.drools.workbench.screens.guided.dtable.client.widget.table.columns.StringUiColumn; import org.drools.workbench.screens.guided.dtable.client.widget.table.model.synchronizers.ModelSynchronizer.VetoException; import org.drools.workbench.screens.guided.dtable.client.widget.table.model.synchronizers.impl.BaseSynchronizer.MoveColumnToMetaData; import org.junit.Test; import org.uberfire.ext.wires.core.grids.client.model.GridColumn; import org.uberfire.ext.wires.core.grids.client.model.impl.BaseGridCellValue; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class AttributeColumnSynchronizerTest extends BaseSynchronizerTest { @Test public void testAppend() throws VetoException { final AttributeCol52 column = new AttributeCol52(); column.setAttribute(Attribute.SALIENCE.getAttributeName()); modelSynchronizer.appendColumn(column); assertEquals(1, model.getAttributeCols().size()); assertEquals(Attribute.SALIENCE.getAttributeName(), model.getAttributeCols().get(0).getAttribute()); assertEquals(3, uiModel.getColumns().size()); assertTrue(uiModel.getColumns().get(2) instanceof IntegerUiColumn); assertEquals(Attribute.SALIENCE.getAttributeName(), uiModel.getColumns().get(2).getHeaderMetaData().get(0).getTitle()); assertEquals(true, ((BaseSingletonDOMElementUiColumn) uiModel.getColumns().get(2)).isEditable()); } @Test public void testUpdate1() throws VetoException { final AttributeCol52 column = spy(new AttributeCol52()); column.setAttribute(Attribute.SALIENCE.getAttributeName()); modelSynchronizer.appendColumn(column); final AttributeCol52 edited = new AttributeCol52(); edited.setWidth(column.getWidth()); edited.setAttribute(Attribute.ENABLED.getAttributeName()); List<BaseColumnFieldDiff> diffs = modelSynchronizer.updateColumn(column, edited); assertEquals(1, diffs.size()); verify(column).diff(edited); assertEquals(1, model.getAttributeCols().size()); assertEquals(Attribute.ENABLED.getAttributeName(), model.getAttributeCols().get(0).getAttribute()); assertEquals(3, uiModel.getColumns().size()); assertTrue(uiModel.getColumns().get(2) instanceof BooleanUiColumn); assertEquals(Attribute.ENABLED.getAttributeName(), uiModel.getColumns().get(2).getHeaderMetaData().get(0).getTitle()); } @Test public void testUpdate2() throws VetoException { final AttributeCol52 column = spy(new AttributeCol52()); column.setAttribute(Attribute.SALIENCE.getAttributeName()); modelSynchronizer.appendColumn(column); final AttributeCol52 edited = new AttributeCol52(); edited.setWidth(column.getWidth()); edited.setAttribute(Attribute.SALIENCE.getAttributeName()); edited.setHideColumn(true); List<BaseColumnFieldDiff> diffs = modelSynchronizer.updateColumn(column, edited); assertEquals(1, diffs.size()); verify(column).diff(edited); assertEquals(1, model.getAttributeCols().size()); assertEquals(Attribute.SALIENCE.getAttributeName(), model.getAttributeCols().get(0).getAttribute()); assertEquals(3, uiModel.getColumns().size()); assertTrue(uiModel.getColumns().get(2) instanceof IntegerUiColumn); assertEquals(Attribute.SALIENCE.getAttributeName(), uiModel.getColumns().get(2).getHeaderMetaData().get(0).getTitle()); assertEquals(false, uiModel.getColumns().get(2).isVisible()); } @Test public void testUpdateSalienceRowNumber() throws VetoException { modelSynchronizer.appendRow(); modelSynchronizer.appendRow(); final AttributeCol52 column = new AttributeCol52(); column.setAttribute(Attribute.SALIENCE.getAttributeName()); modelSynchronizer.appendColumn(column); final AttributeCol52 edited1 = new AttributeCol52(); edited1.setAttribute(Attribute.SALIENCE.getAttributeName()); edited1.setUseRowNumber(true); modelSynchronizer.updateColumn(column, edited1); assertEquals(1, model.getAttributeCols().size()); assertEquals(Attribute.SALIENCE.getAttributeName(), model.getAttributeCols().get(0).getAttribute()); assertEquals(1, model.getData().get(0).get(2).getNumericValue()); assertEquals(2, model.getData().get(1).get(2).getNumericValue()); assertEquals(3, uiModel.getColumns().size()); assertTrue(uiModel.getColumns().get(2) instanceof SalienceUiColumn); assertEquals(Attribute.SALIENCE.getAttributeName(), uiModel.getColumns().get(2).getHeaderMetaData().get(0).getTitle()); assertEquals(true, ((SalienceUiColumn) uiModel.getColumns().get(2)).isUseRowNumber()); assertEquals(1, uiModel.getRow(0).getCells().get(2).getValue().getValue()); assertEquals(2, uiModel.getRow(1).getCells().get(2).getValue().getValue()); final AttributeCol52 edited2 = new AttributeCol52(); edited2.setAttribute(Attribute.SALIENCE.getAttributeName()); edited2.setUseRowNumber(true); edited2.setReverseOrder(true); modelSynchronizer.updateColumn(column, edited2); assertEquals(1, model.getAttributeCols().size()); assertEquals(Attribute.SALIENCE.getAttributeName(), model.getAttributeCols().get(0).getAttribute()); assertEquals(2, model.getData().get(0).get(2).getNumericValue()); assertEquals(1, model.getData().get(1).get(2).getNumericValue()); assertEquals(3, uiModel.getColumns().size()); assertTrue(uiModel.getColumns().get(2) instanceof SalienceUiColumn); assertEquals(Attribute.SALIENCE.getAttributeName(), uiModel.getColumns().get(2).getHeaderMetaData().get(0).getTitle()); assertEquals(true, ((SalienceUiColumn) uiModel.getColumns().get(2)).isUseRowNumber()); assertEquals(2, uiModel.getRow(0).getCells().get(2).getValue().getValue()); assertEquals(1, uiModel.getRow(1).getCells().get(2).getValue().getValue()); } @Test public void testDelete() throws VetoException { final AttributeCol52 column = new AttributeCol52(); column.setAttribute(Attribute.SALIENCE.getAttributeName()); modelSynchronizer.appendColumn(column); assertEquals(1, model.getAttributeCols().size()); assertEquals(3, uiModel.getColumns().size()); modelSynchronizer.deleteColumn(column); assertEquals(0, model.getAttributeCols().size()); assertEquals(2, uiModel.getColumns().size()); } @Test public void testMoveColumnTo_MoveLeft() throws VetoException { final AttributeCol52 column1 = new AttributeCol52(); column1.setAttribute(Attribute.SALIENCE.getAttributeName()); final AttributeCol52 column2 = new AttributeCol52(); column2.setAttribute(Attribute.AGENDA_GROUP.getAttributeName()); modelSynchronizer.appendColumn(column1); modelSynchronizer.appendColumn(column2); modelSynchronizer.appendRow(); uiModel.setCellValue(0, 2, new BaseGridCellValue<Integer>(1)); uiModel.setCellValue(0, 3, new BaseGridCellValue<String>("smurf")); assertEquals(2, model.getAttributeCols().size()); assertEquals(column1, model.getAttributeCols().get(0)); assertEquals(column2, model.getAttributeCols().get(1)); assertEquals(1, model.getData().get(0).get(2).getNumericValue()); assertEquals("smurf", model.getData().get(0).get(3).getStringValue()); assertEquals(4, uiModel.getColumns().size()); final GridColumn<?> uiModelColumn1_1 = uiModel.getColumns().get(2); final GridColumn<?> uiModelColumn2_1 = uiModel.getColumns().get(3); assertEquals(Attribute.SALIENCE.getAttributeName(), uiModelColumn1_1.getHeaderMetaData().get(0).getTitle()); assertEquals(Attribute.AGENDA_GROUP.getAttributeName(), uiModelColumn2_1.getHeaderMetaData().get(0).getTitle()); assertTrue(uiModelColumn1_1 instanceof IntegerUiColumn); assertTrue(uiModelColumn2_1 instanceof StringUiColumn); assertEquals(2, uiModelColumn1_1.getIndex()); assertEquals(3, uiModelColumn2_1.getIndex()); assertEquals(1, uiModel.getRow(0).getCells().get(uiModelColumn1_1.getIndex()).getValue().getValue()); assertEquals("smurf", uiModel.getRow(0).getCells().get(uiModelColumn2_1.getIndex()).getValue().getValue()); uiModel.moveColumnTo(2, uiModelColumn2_1); assertEquals(2, model.getAttributeCols().size()); assertEquals(column2, model.getAttributeCols().get(0)); assertEquals(column1, model.getAttributeCols().get(1)); assertEquals("smurf", model.getData().get(0).get(2).getStringValue()); assertEquals(1, model.getData().get(0).get(3).getNumericValue()); assertEquals(4, uiModel.getColumns().size()); final GridColumn<?> uiModelColumn1_2 = uiModel.getColumns().get(2); final GridColumn<?> uiModelColumn2_2 = uiModel.getColumns().get(3); assertEquals(Attribute.AGENDA_GROUP.getAttributeName(), uiModelColumn1_2.getHeaderMetaData().get(0).getTitle()); assertEquals(Attribute.SALIENCE.getAttributeName(), uiModelColumn2_2.getHeaderMetaData().get(0).getTitle()); assertTrue(uiModelColumn1_2 instanceof StringUiColumn); assertTrue(uiModelColumn2_2 instanceof IntegerUiColumn); assertEquals(3, uiModelColumn1_2.getIndex()); assertEquals(2, uiModelColumn2_2.getIndex()); assertEquals("smurf", uiModel.getRow(0).getCells().get(uiModelColumn1_2.getIndex()).getValue().getValue()); assertEquals(1, uiModel.getRow(0).getCells().get(uiModelColumn2_2.getIndex()).getValue().getValue()); } @Test public void testMoveColumnTo_MoveRight() throws VetoException { final AttributeCol52 column1 = new AttributeCol52(); column1.setAttribute(Attribute.SALIENCE.getAttributeName()); final AttributeCol52 column2 = new AttributeCol52(); column2.setAttribute(Attribute.AGENDA_GROUP.getAttributeName()); modelSynchronizer.appendColumn(column1); modelSynchronizer.appendColumn(column2); modelSynchronizer.appendRow(); uiModel.setCellValue(0, 2, new BaseGridCellValue<Integer>(1)); uiModel.setCellValue(0, 3, new BaseGridCellValue<String>("smurf")); assertEquals(2, model.getAttributeCols().size()); assertEquals(column1, model.getAttributeCols().get(0)); assertEquals(column2, model.getAttributeCols().get(1)); assertEquals(1, model.getData().get(0).get(2).getNumericValue()); assertEquals("smurf", model.getData().get(0).get(3).getStringValue()); assertEquals(4, uiModel.getColumns().size()); final GridColumn<?> uiModelColumn1_1 = uiModel.getColumns().get(2); final GridColumn<?> uiModelColumn2_1 = uiModel.getColumns().get(3); assertEquals(Attribute.SALIENCE.getAttributeName(), uiModelColumn1_1.getHeaderMetaData().get(0).getTitle()); assertEquals(Attribute.AGENDA_GROUP.getAttributeName(), uiModelColumn2_1.getHeaderMetaData().get(0).getTitle()); assertTrue(uiModelColumn1_1 instanceof IntegerUiColumn); assertTrue(uiModelColumn2_1 instanceof StringUiColumn); assertEquals(2, uiModelColumn1_1.getIndex()); assertEquals(3, uiModelColumn2_1.getIndex()); assertEquals(1, uiModel.getRow(0).getCells().get(uiModelColumn1_1.getIndex()).getValue().getValue()); assertEquals("smurf", uiModel.getRow(0).getCells().get(uiModelColumn2_1.getIndex()).getValue().getValue()); uiModel.moveColumnTo(3, uiModelColumn1_1); assertEquals(2, model.getAttributeCols().size()); assertEquals(column2, model.getAttributeCols().get(0)); assertEquals(column1, model.getAttributeCols().get(1)); assertEquals("smurf", model.getData().get(0).get(2).getStringValue()); assertEquals(1, model.getData().get(0).get(3).getNumericValue()); assertEquals(4, uiModel.getColumns().size()); final GridColumn<?> uiModelColumn1_2 = uiModel.getColumns().get(2); final GridColumn<?> uiModelColumn2_2 = uiModel.getColumns().get(3); assertEquals(Attribute.AGENDA_GROUP.getAttributeName(), uiModelColumn1_2.getHeaderMetaData().get(0).getTitle()); assertEquals(Attribute.SALIENCE.getAttributeName(), uiModelColumn2_2.getHeaderMetaData().get(0).getTitle()); assertTrue(uiModelColumn1_2 instanceof StringUiColumn); assertTrue(uiModelColumn2_2 instanceof IntegerUiColumn); assertEquals(3, uiModelColumn1_2.getIndex()); assertEquals(2, uiModelColumn2_2.getIndex()); assertEquals("smurf", uiModel.getRow(0).getCells().get(uiModelColumn1_2.getIndex()).getValue().getValue()); assertEquals(1, uiModel.getRow(0).getCells().get(uiModelColumn2_2.getIndex()).getValue().getValue()); } @Test public void testMoveColumnTo_OutOfBounds() throws VetoException { final AttributeCol52 column1 = new AttributeCol52(); column1.setAttribute(Attribute.SALIENCE.getAttributeName()); final AttributeCol52 column2 = new AttributeCol52(); column2.setAttribute(Attribute.AGENDA_GROUP.getAttributeName()); modelSynchronizer.appendColumn(column1); modelSynchronizer.appendColumn(column2); modelSynchronizer.appendRow(); uiModel.setCellValue(0, 2, new BaseGridCellValue<Integer>(1)); uiModel.setCellValue(0, 3, new BaseGridCellValue<String>("smurf")); assertEquals(2, model.getAttributeCols().size()); assertEquals(column1, model.getAttributeCols().get(0)); assertEquals(column2, model.getAttributeCols().get(1)); assertEquals(1, model.getData().get(0).get(2).getNumericValue()); assertEquals("smurf", model.getData().get(0).get(3).getStringValue()); assertEquals(4, uiModel.getColumns().size()); final GridColumn<?> uiModelColumn1_1 = uiModel.getColumns().get(2); final GridColumn<?> uiModelColumn2_1 = uiModel.getColumns().get(3); assertEquals(Attribute.SALIENCE.getAttributeName(), uiModelColumn1_1.getHeaderMetaData().get(0).getTitle()); assertEquals(Attribute.AGENDA_GROUP.getAttributeName(), uiModelColumn2_1.getHeaderMetaData().get(0).getTitle()); assertTrue(uiModelColumn1_1 instanceof IntegerUiColumn); assertTrue(uiModelColumn2_1 instanceof StringUiColumn); assertEquals(2, uiModelColumn1_1.getIndex()); assertEquals(3, uiModelColumn2_1.getIndex()); assertEquals(1, uiModel.getRow(0).getCells().get(uiModelColumn1_1.getIndex()).getValue().getValue()); assertEquals("smurf", uiModel.getRow(0).getCells().get(uiModelColumn2_1.getIndex()).getValue().getValue()); uiModel.moveColumnTo(0, uiModelColumn1_1); assertEquals(2, model.getAttributeCols().size()); assertEquals(column1, model.getAttributeCols().get(0)); assertEquals(column2, model.getAttributeCols().get(1)); assertEquals(1, model.getData().get(0).get(2).getNumericValue()); assertEquals("smurf", model.getData().get(0).get(3).getStringValue()); assertEquals(4, uiModel.getColumns().size()); final GridColumn<?> uiModelColumn1_2 = uiModel.getColumns().get(2); final GridColumn<?> uiModelColumn2_2 = uiModel.getColumns().get(3); assertEquals(Attribute.SALIENCE.getAttributeName(), uiModelColumn1_2.getHeaderMetaData().get(0).getTitle()); assertEquals(Attribute.AGENDA_GROUP.getAttributeName(), uiModelColumn2_2.getHeaderMetaData().get(0).getTitle()); assertTrue(uiModelColumn1_2 instanceof IntegerUiColumn); assertTrue(uiModelColumn2_2 instanceof StringUiColumn); assertEquals(2, uiModelColumn1_2.getIndex()); assertEquals(3, uiModelColumn2_2.getIndex()); assertEquals(1, uiModel.getRow(0).getCells().get(uiModelColumn1_2.getIndex()).getValue().getValue()); assertEquals("smurf", uiModel.getRow(0).getCells().get(uiModelColumn2_2.getIndex()).getValue().getValue()); } @Test public void testMoveColumnsTo_MoveLeft() throws VetoException { final AttributeCol52 column1 = new AttributeCol52(); column1.setAttribute(Attribute.SALIENCE.getAttributeName()); final AttributeCol52 column2 = new AttributeCol52(); column2.setAttribute(Attribute.AGENDA_GROUP.getAttributeName()); final AttributeCol52 column3 = new AttributeCol52(); column3.setAttribute(Attribute.AUTO_FOCUS.getAttributeName()); modelSynchronizer.appendColumn(column1); modelSynchronizer.appendColumn(column2); modelSynchronizer.appendColumn(column3); modelSynchronizer.appendRow(); uiModel.setCellValue(0, 2, new BaseGridCellValue<Integer>(1)); uiModel.setCellValue(0, 3, new BaseGridCellValue<String>("smurf")); uiModel.setCellValue(0, 4, new BaseGridCellValue<Boolean>(true)); final GridColumn<?> uiModelColumn1_1 = uiModel.getColumns().get(2); final GridColumn<?> uiModelColumn2_1 = uiModel.getColumns().get(3); final GridColumn<?> uiModelColumn3_1 = uiModel.getColumns().get(4); assertTestMoveColumnsTo(column1, column2, column3, uiModelColumn1_1, uiModelColumn2_1, uiModelColumn3_1); //Moving multiple Attribute columns as an unsupported operation as it's impossible via the UI uiModel.moveColumnsTo(2, new ArrayList<GridColumn<?>>() {{ add(uiModelColumn2_1); add(uiModelColumn3_1); }}); final GridColumn<?> uiModelColumn1_2 = uiModel.getColumns().get(2); final GridColumn<?> uiModelColumn2_2 = uiModel.getColumns().get(3); final GridColumn<?> uiModelColumn3_2 = uiModel.getColumns().get(4); assertTestMoveColumnsTo(column1, column2, column3, uiModelColumn1_2, uiModelColumn2_2, uiModelColumn3_2); } @Test public void testMoveColumnsTo_MoveRight() throws VetoException { final AttributeCol52 column1 = new AttributeCol52(); column1.setAttribute(Attribute.SALIENCE.getAttributeName()); final AttributeCol52 column2 = new AttributeCol52(); column2.setAttribute(Attribute.AGENDA_GROUP.getAttributeName()); final AttributeCol52 column3 = new AttributeCol52(); column3.setAttribute(Attribute.AUTO_FOCUS.getAttributeName()); modelSynchronizer.appendColumn(column1); modelSynchronizer.appendColumn(column2); modelSynchronizer.appendColumn(column3); modelSynchronizer.appendRow(); uiModel.setCellValue(0, 2, new BaseGridCellValue<Integer>(1)); uiModel.setCellValue(0, 3, new BaseGridCellValue<String>("smurf")); uiModel.setCellValue(0, 4, new BaseGridCellValue<Boolean>(true)); final GridColumn<?> uiModelColumn1_1 = uiModel.getColumns().get(2); final GridColumn<?> uiModelColumn2_1 = uiModel.getColumns().get(3); final GridColumn<?> uiModelColumn3_1 = uiModel.getColumns().get(4); assertTestMoveColumnsTo(column1, column2, column3, uiModelColumn1_1, uiModelColumn2_1, uiModelColumn3_1); uiModel.moveColumnsTo(4, new ArrayList<GridColumn<?>>() {{ add(uiModelColumn1_1); add(uiModelColumn2_1); }}); final GridColumn<?> uiModelColumn1_2 = uiModel.getColumns().get(2); final GridColumn<?> uiModelColumn2_2 = uiModel.getColumns().get(3); final GridColumn<?> uiModelColumn3_2 = uiModel.getColumns().get(4); assertTestMoveColumnsTo(column1, column2, column3, uiModelColumn1_2, uiModelColumn2_2, uiModelColumn3_2); } @Test public void checkHandlesMoveColumnsToWithEmptyMetadata() throws VetoException { final AttributeColumnSynchronizer synchronizer = new AttributeColumnSynchronizer(); assertFalse(synchronizer.handlesMoveColumnsTo(Collections.emptyList())); } @Test public void checkHandlesMoveColumnsToWithMultipleMetadata() throws VetoException { final MoveColumnToMetaData md0 = mock(MoveColumnToMetaData.class); final MoveColumnToMetaData md1 = mock(MoveColumnToMetaData.class); final AttributeColumnSynchronizer synchronizer = new AttributeColumnSynchronizer(); when(md0.getColumn()).thenReturn(mock(AttributeCol52.class)); when(md1.getColumn()).thenReturn(mock(AttributeCol52.class)); assertFalse(synchronizer.handlesMoveColumnsTo(Arrays.asList(md0, md1))); } @Test public void checkHandlesMoveColumnsToWithSingleMetadata() throws VetoException { final MoveColumnToMetaData md0 = mock(MoveColumnToMetaData.class); final AttributeColumnSynchronizer synchronizer = new AttributeColumnSynchronizer(); when(md0.getColumn()).thenReturn(mock(AttributeCol52.class)); assertTrue(synchronizer.handlesMoveColumnsTo(Collections.singletonList(md0))); } private void assertTestMoveColumnsTo(final AttributeCol52 column1, final AttributeCol52 column2, final AttributeCol52 column3, final GridColumn<?> uiModelColumn1, final GridColumn<?> uiModelColumn2, final GridColumn<?> uiModelColumn3) { assertEquals(3, model.getAttributeCols().size()); assertEquals(column1, model.getAttributeCols().get(0)); assertEquals(column2, model.getAttributeCols().get(1)); assertEquals(column3, model.getAttributeCols().get(2)); assertEquals(1, model.getData().get(0).get(2).getNumericValue()); assertEquals("smurf", model.getData().get(0).get(3).getStringValue()); assertEquals(true, model.getData().get(0).get(4).getBooleanValue()); assertEquals(5, uiModel.getColumns().size()); assertEquals(Attribute.SALIENCE.getAttributeName(), uiModelColumn1.getHeaderMetaData().get(0).getTitle()); assertEquals(Attribute.AGENDA_GROUP.getAttributeName(), uiModelColumn2.getHeaderMetaData().get(0).getTitle()); assertEquals(Attribute.AUTO_FOCUS.getAttributeName(), uiModelColumn3.getHeaderMetaData().get(0).getTitle()); assertTrue(uiModelColumn1 instanceof IntegerUiColumn); assertTrue(uiModelColumn2 instanceof StringUiColumn); assertTrue(uiModelColumn3 instanceof BooleanUiColumn); assertEquals(2, uiModelColumn1.getIndex()); assertEquals(3, uiModelColumn2.getIndex()); assertEquals(4, uiModelColumn3.getIndex()); assertEquals(1, uiModel.getRow(0).getCells().get(uiModelColumn1.getIndex()).getValue().getValue()); assertEquals("smurf", uiModel.getRow(0).getCells().get(uiModelColumn2.getIndex()).getValue().getValue()); assertEquals(true, uiModel.getRow(0).getCells().get(uiModelColumn3.getIndex()).getValue().getValue()); } }
package de.dfki.lt.mdparser.sentencesplitter; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.StringTokenizer; import com.schmeier.posTagger.focus.Focus; import com.schmeier.posTagger.tagger.Tagger; import de.bwaldvogel.liblinear.Linear; import de.bwaldvogel.liblinear.Model; import de.dfki.lt.mdparser.features.Alphabet; import de.dfki.lt.mdparser.features.FeatureVector; import edu.northwestern.at.morphadorner.corpuslinguistics.tokenizer.DefaultWordTokenizer; public class SSPredictor { private Alphabet alpha; private Model model; private Tagger tagger; private Set<String> lowCaseSet; private Set<String> neSet; private Set<String> endSet; public SSPredictor(String[] modelFiles) throws IOException { this.alpha = new Alphabet(modelFiles[0]); this.model = Linear.loadModel(new File(modelFiles[1])); if (modelFiles.length > 2) { this.lowCaseSet = readWords(modelFiles[2]); } if (modelFiles.length > 3) { this.neSet = readWords(modelFiles[3]); } if (modelFiles.length > 4) { this.endSet = readWords(modelFiles[4]); } if (modelFiles.length > 5) { this.tagger = new Tagger(modelFiles[5]); } } public SSPredictor(InputStream[] modelFiles, String taggerFile) throws IOException { this.alpha = new Alphabet(modelFiles[0]); this.model = Linear.loadModel(new InputStreamReader(modelFiles[1])); if (modelFiles.length > 2) { this.lowCaseSet = readWords(modelFiles[2]); } if (modelFiles.length > 3) { this.neSet = readWords(modelFiles[3]); } if (modelFiles.length > 4) { this.endSet = readWords(modelFiles[4]); } this.tagger = new Tagger(taggerFile); } public String readInput(String inputFile, String inputFormat) throws IOException { FileInputStream in = new FileInputStream(inputFile); InputStreamReader ir = new InputStreamReader(in, "UTF8"); BufferedReader fr = new BufferedReader(ir); String line; StringBuilder sb = new StringBuilder(); if (inputFormat.equals("conll")) { while ((line = fr.readLine()) != null) { if (line.length() > 0) { String word = line.split("\t")[1]; sb.append(word + " "); } } } fr.close(); return sb.toString(); } public List<String> tokenise(String inputString) { DefaultWordTokenizer wordTokenizer = new DefaultWordTokenizer(); //List<String> tok1 = return wordTokenizer.extractWords(inputString); //return fix(tok1); } public static String tag(String in, Tagger t) { Focus focus = new Focus(); StringTokenizer str = new StringTokenizer(in); while (str.hasMoreTokens()) { String word = str.nextToken(); focus.add(word); } t.run(focus); return focus.toString(); } private List<String> posTag(List<String> tokens) { List<String> posTags = new ArrayList<String>(tokens.size()); StringBuilder sb = new StringBuilder(); for (int i = 0; i < tokens.size(); i++) { sb.append(tokens.get(i) + " "); } String taggedInput = tag(sb.toString(), this.tagger); String[] array = taggedInput.split(" "); for (int i = 0; i < array.length; i++) { String unit = array[i]; int splitPoint = unit.lastIndexOf(":"); posTags.add(unit.substring(splitPoint + 1, unit.length())); } return posTags; } private Set<String> readWords(String inputFile) throws IOException { FileInputStream in = new FileInputStream(inputFile); InputStreamReader ir = new InputStreamReader(in, "UTF8"); BufferedReader fr = new BufferedReader(ir); String line; Set<String> set = new HashSet<String>(); while ((line = fr.readLine()) != null) { set.add(line); } fr.close(); return set; } private Set<String> readWords(InputStream inputStream) throws IOException { InputStreamReader ir = new InputStreamReader(inputStream, "UTF8"); BufferedReader fr = new BufferedReader(ir); String line; Set<String> set = new HashSet<String>(); while ((line = fr.readLine()) != null) { set.add(line); } return set; } public List<List<String>> predict(List<String> tokensList) { List<List<String>> result = new ArrayList<List<String>>(); List<String> curSent = new ArrayList<String>(); SSFeatureModel fm = new SSFeatureModel(); List<Set<String>> sets = new ArrayList<Set<String>>(); boolean end = false; sets.add(this.lowCaseSet); sets.add(this.neSet); sets.add(this.endSet); //sets.add(nonEndSet);sets.add(abbrSet);sets.add(firstSet); List<String> tagsList = posTag(tokensList); for (int i = 0; i < tokensList.size(); i++) { String curWord = tokensList.get(i); curSent.add(curWord); if (this.endSet.contains(curWord)) { FeatureVector fv = fm.apply(false, i, tokensList, tagsList, sets, this.alpha); double[] probs = new double[2]; int labelInt = (int)Linear.predictProbability(this.model, fv.getLiblinearRepresentation(false, false, this.alpha), probs); String label = this.alpha.getIndexLabelArray()[labelInt]; if (label.equals("y")) { end = true; } if (end) { result.add(curSent); curSent = new ArrayList<String>(); end = false; } } } return result; } public List<List<String>> predict(String text) { return predict(tokenise(text)); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.type; import com.google.common.collect.ImmutableList; import io.prestosql.RowPagesBuilder; import io.prestosql.Session; import io.prestosql.metadata.Metadata; import io.prestosql.operator.DriverYieldSignal; import io.prestosql.operator.project.PageProcessor; import io.prestosql.spi.Page; import io.prestosql.spi.type.BigintType; import io.prestosql.spi.type.DecimalType; import io.prestosql.spi.type.DoubleType; import io.prestosql.spi.type.SqlDecimal; import io.prestosql.spi.type.Type; import io.prestosql.sql.gen.ExpressionCompiler; import io.prestosql.sql.gen.PageFunctionCompiler; import io.prestosql.sql.parser.SqlParser; import io.prestosql.sql.planner.Symbol; import io.prestosql.sql.planner.TypeAnalyzer; import io.prestosql.sql.planner.TypeProvider; import io.prestosql.sql.relational.RowExpression; import io.prestosql.sql.relational.SqlToRowExpressionTranslator; import io.prestosql.sql.tree.Expression; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.runner.Runner; import org.openjdk.jmh.runner.RunnerException; import org.openjdk.jmh.runner.options.Options; import org.openjdk.jmh.runner.options.OptionsBuilder; import org.openjdk.jmh.runner.options.VerboseMode; import org.testng.annotations.Test; import java.math.BigInteger; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Random; import java.util.concurrent.TimeUnit; import static com.google.common.collect.Iterables.getOnlyElement; import static io.prestosql.RowPagesBuilder.rowPagesBuilder; import static io.prestosql.SessionTestUtils.TEST_SESSION; import static io.prestosql.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext; import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.DecimalType.createDecimalType; import static io.prestosql.spi.type.DoubleType.DOUBLE; import static io.prestosql.sql.ExpressionTestUtils.createExpression; import static io.prestosql.testing.TestingConnectorSession.SESSION; import static io.prestosql.testing.TestingSession.testSessionBuilder; import static java.lang.String.format; import static java.math.BigInteger.ONE; import static java.math.BigInteger.ZERO; import static java.util.stream.Collectors.toList; import static org.openjdk.jmh.annotations.Scope.Thread; @State(Scope.Thread) @OutputTimeUnit(TimeUnit.MILLISECONDS) @Fork(3) @Warmup(iterations = 20, timeUnit = TimeUnit.MILLISECONDS) @Measurement(iterations = 10, timeUnit = TimeUnit.MILLISECONDS) public class BenchmarkDecimalOperators { private static final int PAGE_SIZE = 30000; private static final DecimalType SHORT_DECIMAL_TYPE = createDecimalType(10, 0); private static final DecimalType LONG_DECIMAL_TYPE = createDecimalType(20, 0); private static final SqlParser SQL_PARSER = new SqlParser(); @State(Thread) public static class CastDoubleToDecimalBenchmarkState extends BaseState { private static final int SCALE = 2; @Param({"10", "35", "BIGINT"}) private String precision = "10"; @Setup public void setup() { addSymbol("d1", DOUBLE); String expression; if (precision.equals("BIGINT")) { setDoubleMaxValue(Long.MAX_VALUE); expression = "CAST(d1 AS BIGINT)"; } else { setDoubleMaxValue(Math.pow(9, Integer.valueOf(precision) - SCALE)); expression = format("CAST(d1 AS DECIMAL(%s, %d))", precision, SCALE); } generateRandomInputPage(); generateProcessor(expression); } } @Benchmark public Object castDoubleToDecimalBenchmark(CastDoubleToDecimalBenchmarkState state) { return execute(state); } @Test public void testCastDoubleToDecimalBenchmark() { CastDoubleToDecimalBenchmarkState state = new CastDoubleToDecimalBenchmarkState(); state.setup(); castDoubleToDecimalBenchmark(state); } @State(Thread) public static class CastDecimalToDoubleBenchmarkState extends BaseState { private static final int SCALE = 10; @Param({"15", "35"}) private String precision = "15"; @Setup public void setup() { addSymbol("v1", createDecimalType(Integer.valueOf(precision), SCALE)); String expression = "CAST(v1 AS DOUBLE)"; generateRandomInputPage(); generateProcessor(expression); } } @Benchmark public Object castDecimalToDoubleBenchmark(CastDecimalToDoubleBenchmarkState state) { return execute(state); } @Test public void testCastDecimalToDoubleBenchmark() { CastDecimalToDoubleBenchmarkState state = new CastDecimalToDoubleBenchmarkState(); state.setup(); castDecimalToDoubleBenchmark(state); } @State(Thread) public static class CastDecimalToVarcharBenchmarkState extends BaseState { private static final int SCALE = 10; @Param({"15", "35"}) private String precision = "35"; @Setup public void setup() { addSymbol("v1", createDecimalType(Integer.valueOf(precision), SCALE)); String expression = "CAST(v1 AS VARCHAR)"; generateRandomInputPage(); generateProcessor(expression); } } @Benchmark public Object castDecimalToVarcharBenchmark(CastDecimalToVarcharBenchmarkState state) { return execute(state); } @Test public void testCastDecimalToVarcharBenchmark() { CastDecimalToVarcharBenchmarkState state = new CastDecimalToVarcharBenchmarkState(); state.setup(); castDecimalToVarcharBenchmark(state); } @State(Thread) public static class AdditionBenchmarkState extends BaseState { @Param({"d1 + d2", "d1 + d2 + d3 + d4", "s1 + s2", "s1 + s2 + s3 + s4", "l1 + l2", "l1 + l2 + l3 + l4", "s2 + l3 + l1 + s4"}) private String expression = "d1 + d2"; @Setup public void setup() { addSymbol("d1", DOUBLE); addSymbol("d2", DOUBLE); addSymbol("d3", DOUBLE); addSymbol("d4", DOUBLE); addSymbol("s1", createDecimalType(10, 5)); addSymbol("s2", createDecimalType(7, 2)); addSymbol("s3", createDecimalType(12, 2)); addSymbol("s4", createDecimalType(2, 1)); addSymbol("l1", createDecimalType(35, 10)); addSymbol("l2", createDecimalType(25, 5)); addSymbol("l3", createDecimalType(20, 6)); addSymbol("l4", createDecimalType(25, 8)); generateRandomInputPage(); generateProcessor(expression); } } @Benchmark public Object additionBenchmark(AdditionBenchmarkState state) { return execute(state); } @Test public void testAdditionBenchmark() { AdditionBenchmarkState state = new AdditionBenchmarkState(); state.setup(); additionBenchmark(state); } @State(Thread) public static class MultiplyBenchmarkState extends BaseState { @Param({"d1 * d2", "d1 * d2 * d3 * d4", "i1 * i2", // short short -> short "s1 * s2", "s1 * s2 * s5 * s6", // short short -> long "s3 * s4", // long short -> long "l2 * s2", "l2 * s2 * s5 * s6", // short long -> long "s1 * l2", // long long -> long "l1 * l2"}) private String expression = "d1 * d2"; @Setup public void setup() { addSymbol("d1", DOUBLE); addSymbol("d2", DOUBLE); addSymbol("d3", DOUBLE); addSymbol("d4", DOUBLE); addSymbol("i1", BIGINT); addSymbol("i2", BIGINT); addSymbol("s1", createDecimalType(5, 2)); addSymbol("s2", createDecimalType(3, 1)); addSymbol("s3", createDecimalType(10, 5)); addSymbol("s4", createDecimalType(10, 2)); addSymbol("s5", createDecimalType(3, 2)); addSymbol("s6", createDecimalType(2, 1)); addSymbol("l1", createDecimalType(19, 10)); addSymbol("l2", createDecimalType(19, 5)); generateRandomInputPage(); generateProcessor(expression); } } @Benchmark public Object multiplyBenchmark(MultiplyBenchmarkState state) { return execute(state); } @Test public void testMultiplyBenchmark() { MultiplyBenchmarkState state = new MultiplyBenchmarkState(); state.setup(); multiplyBenchmark(state); } @State(Thread) public static class DivisionBenchmarkState extends BaseState { @Param({"d1 / d2", "d1 / d2 / d3 / d4", "i1 / i2", "i1 / i2 / i3 / i4", // short short -> short "s1 / s2", "s1 / s2 / s2 / s2", // short short -> long "s1 / s3", // short long -> short "s2 / l1", // long short -> long "l1 / s2", // short long -> long "s3 / l1", // long long -> long "l2 / l3", "l2 / l4 / l4 / l4", "l2 / s4 / s4 / s4"}) private String expression = "d1 / d2"; @Setup public void setup() { addSymbol("d1", DOUBLE); addSymbol("d2", DOUBLE); addSymbol("d3", DOUBLE); addSymbol("d4", DOUBLE); addSymbol("i1", BIGINT); addSymbol("i2", BIGINT); addSymbol("i3", BIGINT); addSymbol("i4", BIGINT); addSymbol("s1", createDecimalType(8, 3)); addSymbol("s2", createDecimalType(6, 2)); addSymbol("s3", createDecimalType(17, 7)); addSymbol("s4", createDecimalType(3, 2)); addSymbol("l1", createDecimalType(19, 3)); addSymbol("l2", createDecimalType(20, 3)); addSymbol("l3", createDecimalType(21, 10)); addSymbol("l4", createDecimalType(19, 4)); generateRandomInputPage(); generateProcessor(expression); } } @Benchmark public Object divisionBenchmark(DivisionBenchmarkState state) { return execute(state); } @Test public void testDivisionBenchmark() { DivisionBenchmarkState state = new DivisionBenchmarkState(); state.setup(); divisionBenchmark(state); } @State(Thread) public static class ModuloBenchmarkState extends BaseState { @Param({"d1 % d2", "d1 % d2 % d3 % d4", "i1 % i2", "i1 % i2 % i3 % i4", // short short -> short "s1 % s2", "s1 % s2 % s2 % s2", // short long -> short "s2 % l2", // long short -> long "l3 % s3", // short long -> long "s4 % l3", // long long -> long "l2 % l3", "l2 % l3 % l4 % l1"}) private String expression = "d1 % d2"; @Setup public void setup() { addSymbol("d1", DOUBLE); addSymbol("d2", DOUBLE); addSymbol("d3", DOUBLE); addSymbol("d4", DOUBLE); addSymbol("i1", BIGINT); addSymbol("i2", BIGINT); addSymbol("i3", BIGINT); addSymbol("i4", BIGINT); addSymbol("s1", createDecimalType(8, 3)); addSymbol("s2", createDecimalType(6, 2)); addSymbol("s3", createDecimalType(9, 0)); addSymbol("s4", createDecimalType(12, 2)); addSymbol("l1", createDecimalType(19, 3)); addSymbol("l2", createDecimalType(20, 3)); addSymbol("l3", createDecimalType(21, 10)); addSymbol("l4", createDecimalType(19, 4)); generateRandomInputPage(); generateProcessor(expression); } } @Benchmark public Object moduloBenchmark(ModuloBenchmarkState state) { return execute(state); } @Test public void testModuloBenchmark() { ModuloBenchmarkState state = new ModuloBenchmarkState(); state.setup(); moduloBenchmark(state); } @State(Thread) public static class InequalityBenchmarkState extends BaseState { @Param({"d1 < d2", "d1 < d2 AND d1 < d3 AND d1 < d4 AND d2 < d3 AND d2 < d4 AND d3 < d4", "s1 < s2", "s1 < s2 AND s1 < s3 AND s1 < s4 AND s2 < s3 AND s2 < s4 AND s3 < s4", "l1 < l2", "l1 < l2 AND l1 < l3 AND l1 < l4 AND l2 < l3 AND l2 < l4 AND l3 < l4"}) private String expression = "d1 < d2"; @Setup public void setup() { addSymbol("d1", DOUBLE); addSymbol("d2", DOUBLE); addSymbol("d3", DOUBLE); addSymbol("d4", DOUBLE); addSymbol("s1", SHORT_DECIMAL_TYPE); addSymbol("s2", SHORT_DECIMAL_TYPE); addSymbol("s3", SHORT_DECIMAL_TYPE); addSymbol("s4", SHORT_DECIMAL_TYPE); addSymbol("l1", LONG_DECIMAL_TYPE); addSymbol("l2", LONG_DECIMAL_TYPE); addSymbol("l3", LONG_DECIMAL_TYPE); addSymbol("l4", LONG_DECIMAL_TYPE); generateInputPage(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11); generateProcessor(expression); } } @Benchmark public Object inequalityBenchmark(InequalityBenchmarkState state) { return execute(state); } @Test public void testInequalityBenchmark() { InequalityBenchmarkState state = new InequalityBenchmarkState(); state.setup(); inequalityBenchmark(state); } @State(Thread) public static class DecimalToShortDecimalCastBenchmarkState extends BaseState { @Param({"cast(l_38_30 as decimal(8, 0))", "cast(l_26_18 as decimal(8, 0))", "cast(l_20_12 as decimal(8, 0))", "cast(l_20_8 as decimal(8, 0))", "cast(s_17_9 as decimal(8, 0))"}) private String expression = "cast(l_38_30 as decimal(8, 0))"; @Setup public void setup() { addSymbol("l_38_30", createDecimalType(38, 30)); addSymbol("l_26_18", createDecimalType(26, 18)); addSymbol("l_20_12", createDecimalType(20, 12)); addSymbol("l_20_8", createDecimalType(20, 8)); addSymbol("s_17_9", createDecimalType(17, 9)); generateInputPage(10000, 10000, 10000, 10000, 10000); generateProcessor(expression); } } @Benchmark public Object decimalToShortDecimalCastBenchmark(DecimalToShortDecimalCastBenchmarkState state) { return execute(state); } @Test public void testDecimalToShortDecimalCastBenchmark() { DecimalToShortDecimalCastBenchmarkState state = new DecimalToShortDecimalCastBenchmarkState(); state.setup(); decimalToShortDecimalCastBenchmark(state); } private Object execute(BaseState state) { return ImmutableList.copyOf( state.getProcessor().process( SESSION, new DriverYieldSignal(), newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()), state.getInputPage())); } private static class BaseState { private final Metadata metadata = createTestMetadataManager(); private final TypeAnalyzer typeAnalyzer = new TypeAnalyzer(new SqlParser(), metadata); private final Session session = testSessionBuilder().build(); private final Random random = new Random(); protected final Map<String, Symbol> symbols = new HashMap<>(); protected final Map<Symbol, Type> symbolTypes = new HashMap<>(); private final Map<Symbol, Integer> sourceLayout = new HashMap<>(); protected final List<Type> types = new LinkedList<>(); protected Page inputPage; private PageProcessor processor; private double doubleMaxValue = 2L << 31; public Page getInputPage() { return inputPage; } public PageProcessor getProcessor() { return processor; } protected void addSymbol(String name, Type type) { Symbol symbol = new Symbol(name); symbols.put(name, symbol); symbolTypes.put(symbol, type); sourceLayout.put(symbol, types.size()); types.add(type); } protected void generateRandomInputPage() { RowPagesBuilder buildPagesBuilder = rowPagesBuilder(types); for (int i = 0; i < PAGE_SIZE; i++) { Object[] values = types.stream() .map(this::generateRandomValue) .collect(toList()).toArray(); buildPagesBuilder.row(values); } inputPage = getOnlyElement(buildPagesBuilder.build()); } protected void generateInputPage(int... initialValues) { RowPagesBuilder buildPagesBuilder = rowPagesBuilder(types); buildPagesBuilder.addSequencePage(PAGE_SIZE, initialValues); inputPage = getOnlyElement(buildPagesBuilder.build()); } protected void generateProcessor(String expression) { processor = new ExpressionCompiler(metadata, new PageFunctionCompiler(metadata, 0)).compilePageProcessor(Optional.empty(), ImmutableList.of(rowExpression(expression))).get(); } protected void setDoubleMaxValue(double doubleMaxValue) { this.doubleMaxValue = doubleMaxValue; } private RowExpression rowExpression(String value) { Expression expression = createExpression(value, metadata, TypeProvider.copyOf(symbolTypes)); return SqlToRowExpressionTranslator.translate( expression, typeAnalyzer.getTypes(TEST_SESSION, TypeProvider.copyOf(symbolTypes), expression), sourceLayout, metadata, TEST_SESSION, true); } private Object generateRandomValue(Type type) { if (type instanceof DoubleType) { return random.nextDouble() * (2L * doubleMaxValue) - doubleMaxValue; } if (type instanceof DecimalType) { return randomDecimal((DecimalType) type); } if (type instanceof BigintType) { int randomInt = random.nextInt(); return randomInt == 0 ? 1 : randomInt; } throw new UnsupportedOperationException(type.toString()); } private SqlDecimal randomDecimal(DecimalType type) { int maxBits = (int) (Math.log(Math.pow(10, type.getPrecision())) / Math.log(2)); BigInteger bigInteger = new BigInteger(maxBits, random); if (bigInteger.equals(ZERO)) { bigInteger = ONE; } if (random.nextBoolean()) { bigInteger = bigInteger.negate(); } return new SqlDecimal(bigInteger, type.getPrecision(), type.getScale()); } } public static void main(String[] args) throws RunnerException { Options options = new OptionsBuilder() .verbosity(VerboseMode.NORMAL) .include(".*" + BenchmarkDecimalOperators.class.getSimpleName() + ".*") .build(); new Runner(options).run(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.http.nio; import io.netty.buffer.ByteBufUtil; import io.netty.buffer.Unpooled; import io.netty.handler.codec.TooLongFrameException; import io.netty.handler.codec.http.DefaultFullHttpRequest; import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.HttpHeaderNames; import io.netty.handler.codec.http.HttpHeaderValues; import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpUtil; import io.netty.handler.codec.http.HttpVersion; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.BindHttpException; import org.elasticsearch.http.CorsHandler; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.http.NullDispatcher; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.nio.NioSocketChannel; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.nio.NioGroupFactory; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ENABLED; import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; import static org.elasticsearch.rest.RestStatus.OK; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; /** * Tests for the {@link NioHttpServerTransport} class. */ public class NioHttpServerTransportTests extends ESTestCase { private NetworkService networkService; private ThreadPool threadPool; private MockBigArrays bigArrays; private MockPageCacheRecycler pageRecycler; @Before public void setup() throws Exception { networkService = new NetworkService(Collections.emptyList()); threadPool = new TestThreadPool("test"); pageRecycler = new MockPageCacheRecycler(Settings.EMPTY); bigArrays = new MockBigArrays(pageRecycler, new NoneCircuitBreakerService()); } @After public void shutdown() throws Exception { if (threadPool != null) { threadPool.shutdownNow(); } threadPool = null; networkService = null; bigArrays = null; } /** * Test that {@link NioHttpServerTransport} supports the "Expect: 100-continue" HTTP header * @throws InterruptedException if the client communication with the server is interrupted */ public void testExpectContinueHeader() throws InterruptedException { final Settings settings = createSettings(); final int contentLength = randomIntBetween(1, HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH.get(settings).bytesAsInt()); runExpectHeaderTest(settings, HttpHeaderValues.CONTINUE.toString(), contentLength, HttpResponseStatus.CONTINUE); } /** * Test that {@link NioHttpServerTransport} responds to a * 100-continue expectation with too large a content-length * with a 413 status. * @throws InterruptedException if the client communication with the server is interrupted */ public void testExpectContinueHeaderContentLengthTooLong() throws InterruptedException { final String key = HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH.getKey(); final int maxContentLength = randomIntBetween(1, 104857600); final Settings settings = createBuilderWithPort().put(key, maxContentLength + "b").build(); final int contentLength = randomIntBetween(maxContentLength + 1, Integer.MAX_VALUE); runExpectHeaderTest( settings, HttpHeaderValues.CONTINUE.toString(), contentLength, HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE); } /** * Test that {@link NioHttpServerTransport} responds to an unsupported expectation with a 417 status. * @throws InterruptedException if the client communication with the server is interrupted */ public void testExpectUnsupportedExpectation() throws InterruptedException { final Settings settings = createSettings(); runExpectHeaderTest(settings, "chocolate=yummy", 0, HttpResponseStatus.EXPECTATION_FAILED); } private void runExpectHeaderTest( final Settings settings, final String expectation, final int contentLength, final HttpResponseStatus expectedStatus) throws InterruptedException { final HttpServerTransport.Dispatcher dispatcher = new HttpServerTransport.Dispatcher() { @Override public void dispatchRequest(RestRequest request, RestChannel channel, ThreadContext threadContext) { channel.sendResponse(new BytesRestResponse(OK, BytesRestResponse.TEXT_CONTENT_TYPE, new BytesArray("done"))); } @Override public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, Throwable cause) { logger.error(new ParameterizedMessage("--> Unexpected bad request [{}]", FakeRestRequest.requestToString(channel.request())), cause); throw new AssertionError(); } }; try (NioHttpServerTransport transport = new NioHttpServerTransport(settings, networkService, bigArrays, pageRecycler, threadPool, xContentRegistry(), dispatcher, new NioGroupFactory(settings, logger), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { transport.start(); final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); try (NioHttpClient client = new NioHttpClient()) { final FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/"); request.headers().set(HttpHeaderNames.EXPECT, expectation); HttpUtil.setContentLength(request, contentLength); final FullHttpResponse response = client.send(remoteAddress.address(), request); try { assertThat(response.status(), equalTo(expectedStatus)); if (expectedStatus.equals(HttpResponseStatus.CONTINUE)) { final FullHttpRequest continuationRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/", Unpooled.EMPTY_BUFFER); final FullHttpResponse continuationResponse = client.send(remoteAddress.address(), continuationRequest); try { assertThat(continuationResponse.status(), is(HttpResponseStatus.OK)); assertThat( new String(ByteBufUtil.getBytes(continuationResponse.content()), StandardCharsets.UTF_8), is("done") ); } finally { continuationResponse.release(); } } } finally { response.release(); } } } } public void testBindUnavailableAddress() { final Settings initialSettings = createSettings(); try (NioHttpServerTransport transport = new NioHttpServerTransport(initialSettings, networkService, bigArrays, pageRecycler, threadPool, xContentRegistry(), new NullDispatcher(), new NioGroupFactory(Settings.EMPTY, logger), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { transport.start(); TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); Settings settings = Settings.builder() .put("http.port", remoteAddress.getPort()) .put("network.host", remoteAddress.getAddress()) .build(); try (NioHttpServerTransport otherTransport = new NioHttpServerTransport(settings, networkService, bigArrays, pageRecycler, threadPool, xContentRegistry(), new NullDispatcher(), new NioGroupFactory(Settings.EMPTY, logger), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { BindHttpException bindHttpException = expectThrows(BindHttpException.class, () -> otherTransport.start()); assertEquals( "Failed to bind to " + NetworkAddress.format(remoteAddress.address()), bindHttpException.getMessage() ); } } } public void testCorsRequest() throws InterruptedException { final HttpServerTransport.Dispatcher dispatcher = new HttpServerTransport.Dispatcher() { @Override public void dispatchRequest(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) { logger.error("--> Unexpected successful request [{}]", FakeRestRequest.requestToString(request)); throw new AssertionError(); } @Override public void dispatchBadRequest(final RestChannel channel, final ThreadContext threadContext, final Throwable cause) { logger.error(new ParameterizedMessage("--> Unexpected bad request [{}]", FakeRestRequest.requestToString(channel.request())), cause); throw new AssertionError(); } }; final Settings settings = createBuilderWithPort() .put(SETTING_CORS_ENABLED.getKey(), true) .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "elastic.co") .build(); try (NioHttpServerTransport transport = new NioHttpServerTransport(settings, networkService, bigArrays, pageRecycler, threadPool, xContentRegistry(), dispatcher, new NioGroupFactory(settings, logger), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { transport.start(); final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); // Test pre-flight request try (NioHttpClient client = new NioHttpClient()) { final FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.OPTIONS, "/"); request.headers().add(CorsHandler.ORIGIN, "elastic.co"); request.headers().add(CorsHandler.ACCESS_CONTROL_REQUEST_METHOD, "POST"); final FullHttpResponse response = client.send(remoteAddress.address(), request); try { assertThat(response.status(), equalTo(HttpResponseStatus.OK)); assertThat(response.headers().get(CorsHandler.ACCESS_CONTROL_ALLOW_ORIGIN), equalTo("elastic.co")); assertThat(response.headers().get(CorsHandler.VARY), equalTo(CorsHandler.ORIGIN)); assertTrue(response.headers().contains(CorsHandler.DATE)); } finally { response.release(); } } // Test short-circuited request try (NioHttpClient client = new NioHttpClient()) { final FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/"); request.headers().add(CorsHandler.ORIGIN, "elastic2.co"); final FullHttpResponse response = client.send(remoteAddress.address(), request); try { assertThat(response.status(), equalTo(HttpResponseStatus.FORBIDDEN)); } finally { response.release(); } } } } public void testLargeCompressedResponse() throws InterruptedException { final String responseString = randomAlphaOfLength(4 * 1024 * 1024); final String url = "/thing"; final HttpServerTransport.Dispatcher dispatcher = new HttpServerTransport.Dispatcher() { @Override public void dispatchRequest(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) { if (url.equals(request.uri())) { channel.sendResponse(new BytesRestResponse(OK, responseString)); } else { logger.error("--> Unexpected successful uri [{}]", request.uri()); throw new AssertionError(); } } @Override public void dispatchBadRequest(final RestChannel channel, final ThreadContext threadContext, final Throwable cause) { logger.error(new ParameterizedMessage("--> Unexpected bad request [{}]", FakeRestRequest.requestToString(channel.request())), cause); throw new AssertionError(); } }; try (NioHttpServerTransport transport = new NioHttpServerTransport( Settings.EMPTY, networkService, bigArrays, pageRecycler, threadPool, xContentRegistry(), dispatcher, new NioGroupFactory(Settings.EMPTY, logger), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { transport.start(); final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); try (NioHttpClient client = new NioHttpClient()) { DefaultFullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, url); request.headers().add(HttpHeaderNames.ACCEPT_ENCODING, randomFrom("deflate", "gzip")); final FullHttpResponse response = client.send(remoteAddress.address(), request); try { assertThat(response.status(), equalTo(HttpResponseStatus.OK)); byte[] bytes = new byte[response.content().readableBytes()]; response.content().readBytes(bytes); assertThat(new String(bytes, StandardCharsets.UTF_8), equalTo(responseString)); } finally { response.release(); } } } } public void testBadRequest() throws InterruptedException { final AtomicReference<Throwable> causeReference = new AtomicReference<>(); final HttpServerTransport.Dispatcher dispatcher = new HttpServerTransport.Dispatcher() { @Override public void dispatchRequest(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) { logger.error("--> Unexpected successful request [{}]", FakeRestRequest.requestToString(request)); throw new AssertionError(); } @Override public void dispatchBadRequest(final RestChannel channel, final ThreadContext threadContext, final Throwable cause) { causeReference.set(cause); try { final ElasticsearchException e = new ElasticsearchException("you sent a bad request and you should feel bad"); channel.sendResponse(new BytesRestResponse(channel, BAD_REQUEST, e)); } catch (final IOException e) { throw new AssertionError(e); } } }; final Settings settings; final int maxInitialLineLength; final Setting<ByteSizeValue> httpMaxInitialLineLengthSetting = HttpTransportSettings.SETTING_HTTP_MAX_INITIAL_LINE_LENGTH; if (randomBoolean()) { maxInitialLineLength = httpMaxInitialLineLengthSetting.getDefault(Settings.EMPTY).bytesAsInt(); settings = createSettings(); } else { maxInitialLineLength = randomIntBetween(1, 8192); settings = createBuilderWithPort().put(httpMaxInitialLineLengthSetting.getKey(), maxInitialLineLength + "b").build(); } try (NioHttpServerTransport transport = new NioHttpServerTransport(settings, networkService, bigArrays, pageRecycler, threadPool, xContentRegistry(), dispatcher, new NioGroupFactory(settings, logger), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { transport.start(); final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); try (NioHttpClient client = new NioHttpClient()) { final String url = "/" + new String(new byte[maxInitialLineLength], Charset.forName("UTF-8")); final FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, url); final FullHttpResponse response = client.send(remoteAddress.address(), request); try { assertThat(response.status(), equalTo(HttpResponseStatus.BAD_REQUEST)); assertThat( new String(response.content().array(), Charset.forName("UTF-8")), containsString("you sent a bad request and you should feel bad")); } finally { response.release(); } } } assertNotNull(causeReference.get()); assertThat(causeReference.get(), instanceOf(TooLongFrameException.class)); } public void testReadTimeout() throws Exception { final HttpServerTransport.Dispatcher dispatcher = new HttpServerTransport.Dispatcher() { @Override public void dispatchRequest(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) { logger.error("--> Unexpected successful request [{}]", FakeRestRequest.requestToString(request)); throw new AssertionError("Should not have received a dispatched request"); } @Override public void dispatchBadRequest(final RestChannel channel, final ThreadContext threadContext, final Throwable cause) { logger.error(new ParameterizedMessage("--> Unexpected bad request [{}]", FakeRestRequest.requestToString(channel.request())), cause); throw new AssertionError("Should not have received a dispatched request"); } }; Settings settings = createBuilderWithPort() .put(HttpTransportSettings.SETTING_HTTP_READ_TIMEOUT.getKey(), new TimeValue(randomIntBetween(100, 300))) .build(); try (NioHttpServerTransport transport = new NioHttpServerTransport(settings, networkService, bigArrays, pageRecycler, threadPool, xContentRegistry(), dispatcher, new NioGroupFactory(settings, logger), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { transport.start(); final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); try (NioHttpClient client = new NioHttpClient()) { NioSocketChannel channel = null; try { CountDownLatch channelClosedLatch = new CountDownLatch(1); channel = client.connect(remoteAddress.address()); channel.addCloseListener((r, t) -> channelClosedLatch.countDown()); assertTrue("Channel should be closed due to read timeout", channelClosedLatch.await(1, TimeUnit.MINUTES)); } finally { if (channel != null) { channel.close(); } } } } } private Settings createSettings() { return createBuilderWithPort().build(); } private Settings.Builder createBuilderWithPort() { return Settings.builder().put(HttpTransportSettings.SETTING_HTTP_PORT.getKey(), getPortRange()); } }
/* ======================================================================== * PlantUML : a free UML diagram generator * ======================================================================== * * (C) Copyright 2009-2020, Arnaud Roques * * Project Info: https://plantuml.com * * If you like this project or if you find it useful, you can support us at: * * https://plantuml.com/patreon (only 1$ per month!) * https://plantuml.com/paypal * * This file is part of PlantUML. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * Original Author: Arnaud Roques */ package net.sourceforge.plantuml.eggs; import java.awt.geom.Dimension2D; import java.awt.geom.Point2D; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import net.sourceforge.plantuml.BackSlash; import net.sourceforge.plantuml.FileFormatOption; import net.sourceforge.plantuml.PlainDiagram; import net.sourceforge.plantuml.SpriteContainerEmpty; import net.sourceforge.plantuml.core.DiagramDescription; import net.sourceforge.plantuml.core.UmlSource; import net.sourceforge.plantuml.cucadiagram.Display; import net.sourceforge.plantuml.graphic.FontConfiguration; import net.sourceforge.plantuml.graphic.HorizontalAlignment; import net.sourceforge.plantuml.graphic.StringBounder; import net.sourceforge.plantuml.graphic.TextBlock; import net.sourceforge.plantuml.graphic.UDrawable; import net.sourceforge.plantuml.ugraphic.UFont; import net.sourceforge.plantuml.ugraphic.UGraphic; import net.sourceforge.plantuml.ugraphic.UPolygon; import net.sourceforge.plantuml.ugraphic.URectangle; import net.sourceforge.plantuml.ugraphic.UTranslate; import net.sourceforge.plantuml.ugraphic.color.HColor; import net.sourceforge.plantuml.ugraphic.color.HColorSet; import net.sourceforge.plantuml.ugraphic.color.HColorSimple; import net.sourceforge.plantuml.ugraphic.color.HColorUtils; // http://www.redblobgames.com/grids/hexagons/ public class PSystemColors extends PlainDiagram implements UDrawable { private final double rectangleHeight = 28; private final double rectangleWidth = 175; private final HColorSet colors = HColorSet.instance(); private final String paletteCentralColor; private final double size = 60; public PSystemColors(UmlSource source, String option) { super(source); if (option == null) { this.paletteCentralColor = null; } else { this.paletteCentralColor = option.replaceAll("\\#", ""); } } @Override protected UDrawable getRootDrawable(FileFormatOption fileFormatOption) { return this; } public DiagramDescription getDescription() { return new DiagramDescription("(Colors)"); } public void drawU(UGraphic ug) { if (paletteCentralColor != null && colors.getColorOrWhite(paletteCentralColor) instanceof HColorSimple) { drawPalette(ug); } else { drawFull(ug); } } private void drawPalette(UGraphic ug) { double x = (centerHexa(2, 0).getX() + centerHexa(3, 0).getX()) / 2; double y = centerHexa(0, 2).getY() + corner(1).getY(); ug = ug.apply(new UTranslate(x, y)); final UPolygon hexa = getHexa(); final List<String> friends = getColorsCloseTo(paletteCentralColor); int idx = 0; drawOneHexa(ug, friends.get(idx++), 0, 0, hexa); drawOneHexa(ug, friends.get(idx++), 1, 0, hexa); drawOneHexa(ug, friends.get(idx++), 0, 1, hexa); drawOneHexa(ug, friends.get(idx++), -1, 1, hexa); drawOneHexa(ug, friends.get(idx++), -1, 0, hexa); drawOneHexa(ug, friends.get(idx++), -1, -1, hexa); drawOneHexa(ug, friends.get(idx++), 0, -1, hexa); drawOneHexa(ug, friends.get(idx++), 2, 0, hexa); drawOneHexa(ug, friends.get(idx++), 1, 1, hexa); drawOneHexa(ug, friends.get(idx++), 1, 2, hexa); drawOneHexa(ug, friends.get(idx++), 0, 2, hexa); drawOneHexa(ug, friends.get(idx++), -1, 2, hexa); drawOneHexa(ug, friends.get(idx++), -2, 1, hexa); drawOneHexa(ug, friends.get(idx++), -2, 0, hexa); drawOneHexa(ug, friends.get(idx++), -2, -1, hexa); drawOneHexa(ug, friends.get(idx++), -1, -2, hexa); drawOneHexa(ug, friends.get(idx++), 0, -2, hexa); drawOneHexa(ug, friends.get(idx++), 1, -2, hexa); drawOneHexa(ug, friends.get(idx++), 1, -1, hexa); } private Point2D centerHexa(int i, int j) { final double width = getWidth(); final double x = width * i + (j % 2 == 0 ? 0 : width / 2); final double y = size * j * 1.5; return new Point2D.Double(x, y); } private double getWidth() { return Math.sqrt(3) / 2 * 2 * size; } private void drawOneHexa(UGraphic ug, String colorName, int i, int j, UPolygon hexa) { final HColorSimple color = (HColorSimple) colors.getColorOrWhite(colorName); ug = applyColor(ug, color); ug = ug.apply(new UTranslate(centerHexa(i, j))); ug.draw(hexa); final UFont font = UFont.sansSerif(14).bold(); TextBlock tt = getTextName(font, colorName, color); Dimension2D dimText = tt.calculateDimension(ug.getStringBounder()); if (dimText.getWidth() > getWidth()) { tt = getTextName(font, findShortest(ug.getStringBounder(), font, colorName), color); dimText = tt.calculateDimension(ug.getStringBounder()); } tt.drawU(ug.apply(new UTranslate(-dimText.getWidth() / 2, -dimText.getHeight() / 2))); } private String findShortest(StringBounder stringBounder, UFont font, String colorName) { String result = null; double min = Double.MAX_VALUE; for (int i = 1; i < colorName.length() - 1; i++) { if (Character.isLowerCase(colorName.charAt(i))) { continue; } final String candidat = colorName.substring(0, i) + BackSlash.BS_BS_N + colorName.substring(i); final TextBlock tt = getTextName(font, candidat, (HColorSimple) HColorUtils.BLACK); final double width = tt.calculateDimension(stringBounder).getWidth(); if (width < min) { result = candidat; min = width; } } return result; } private UGraphic applyColor(UGraphic ug, HColor color) { return ug.apply(color).apply(color.bg()); } private Point2D corner(int i) { double angle_deg = 60 * i + 30; double angle_rad = Math.PI / 180 * angle_deg; return new Point2D.Double(size * Math.cos(angle_rad), size * Math.sin(angle_rad)); } private UPolygon getHexa() { final UPolygon result = new UPolygon(); for (int i = 0; i < 6; i++) { result.addPoint(corner(i)); } return result; } private List<String> getColorsCloseTo(String other) { final List<String> result = new ArrayList<>(colors.names()); for (Iterator<String> it = result.iterator(); it.hasNext();) { final String candidat = it.next(); final String similar = candidat.replaceAll("Gray", "Grey"); if (candidat.equals(similar)) { continue; } if (result.contains(similar)) { it.remove(); } } if (containsCaseInsensitive(result, other) == false) { result.add(other); } Collections.sort(result, closeComparator(paletteCentralColor)); return result; } private boolean containsCaseInsensitive(Collection<String> source, String target) { for (String s : source) { if (s.equalsIgnoreCase(target)) { return true; } } return false; } private Comparator<String> closeComparator(String center) { final HColorSimple centerColor = (HColorSimple) colors.getColorOrWhite(center); return new Comparator<String>() { public int compare(String col1, String col2) { final double dist1 = centerColor.distance((HColorSimple) colors.getColorOrWhite(col1)); final double dist2 = centerColor.distance((HColorSimple) colors.getColorOrWhite(col2)); return (int) Math.signum(dist1 - dist2); } }; } private void drawFull(UGraphic ug) { final UFont font = UFont.sansSerif(14).bold(); ug = ug.apply(HColorUtils.BLACK); int i = 0; int j = 0; for (String name : colors.names()) { UGraphic tmp = getPositioned(ug, i, j); final HColorSimple color = (HColorSimple) colors.getColorOrWhite(name); applyColor(tmp, color).draw(new URectangle(rectangleWidth, rectangleHeight)); final TextBlock tt = getTextName(font, name, color); final Dimension2D dimText = tt.calculateDimension(ug.getStringBounder()); final double dy = (rectangleHeight - dimText.getHeight()) / 2; final double dx = (rectangleWidth - dimText.getWidth()) / 2; tt.drawU(tmp.apply(new UTranslate(dx, dy))); if (j++ == 20) { j = 0; i++; } } } private TextBlock getTextName(final UFont font, String name, final HColorSimple color) { final HColorSimple opposite = color.opposite(); final FontConfiguration fc = new FontConfiguration(font, opposite, HColorUtils.BLUE, true); final TextBlock tt = Display.getWithNewlines(name).create(fc, HorizontalAlignment.CENTER, new SpriteContainerEmpty()); return tt; } private UGraphic getPositioned(UGraphic ug, int i, int j) { return ug.apply(new UTranslate(rectangleWidth * i, rectangleHeight * j)); } }
package cop5555sp15; import java.util.*; import cop5555sp15.ast.*; import static cop5555sp15.TokenStream.Kind.AND; import static cop5555sp15.TokenStream.Kind.ARROW; import static cop5555sp15.TokenStream.Kind.ASSIGN; import static cop5555sp15.TokenStream.Kind.AT; import static cop5555sp15.TokenStream.Kind.BAR; import static cop5555sp15.TokenStream.Kind.BL_FALSE; import static cop5555sp15.TokenStream.Kind.BL_TRUE; import static cop5555sp15.TokenStream.Kind.COLON; import static cop5555sp15.TokenStream.Kind.COMMA; import static cop5555sp15.TokenStream.Kind.DIV; import static cop5555sp15.TokenStream.Kind.DOT; import static cop5555sp15.TokenStream.Kind.EOF; import static cop5555sp15.TokenStream.Kind.EQUAL; import static cop5555sp15.TokenStream.Kind.GE; import static cop5555sp15.TokenStream.Kind.GT; import static cop5555sp15.TokenStream.Kind.IDENT; import static cop5555sp15.TokenStream.Kind.INT_LIT; import static cop5555sp15.TokenStream.Kind.KW_BOOLEAN; import static cop5555sp15.TokenStream.Kind.KW_CLASS; import static cop5555sp15.TokenStream.Kind.KW_DEF; import static cop5555sp15.TokenStream.Kind.KW_ELSE; import static cop5555sp15.TokenStream.Kind.KW_IF; import static cop5555sp15.TokenStream.Kind.KW_IMPORT; import static cop5555sp15.TokenStream.Kind.KW_INT; import static cop5555sp15.TokenStream.Kind.KW_PRINT; import static cop5555sp15.TokenStream.Kind.KW_RETURN; import static cop5555sp15.TokenStream.Kind.KW_STRING; import static cop5555sp15.TokenStream.Kind.KW_WHILE; import static cop5555sp15.TokenStream.Kind.KW_SIZE; import static cop5555sp15.TokenStream.Kind.KW_KEY; import static cop5555sp15.TokenStream.Kind.KW_VALUE; import static cop5555sp15.TokenStream.Kind.LCURLY; import static cop5555sp15.TokenStream.Kind.LE; import static cop5555sp15.TokenStream.Kind.LPAREN; import static cop5555sp15.TokenStream.Kind.LSHIFT; import static cop5555sp15.TokenStream.Kind.LSQUARE; import static cop5555sp15.TokenStream.Kind.LT; import static cop5555sp15.TokenStream.Kind.MINUS; import static cop5555sp15.TokenStream.Kind.MOD; import static cop5555sp15.TokenStream.Kind.NOT; import static cop5555sp15.TokenStream.Kind.NOTEQUAL; import static cop5555sp15.TokenStream.Kind.PLUS; import static cop5555sp15.TokenStream.Kind.RANGE; import static cop5555sp15.TokenStream.Kind.RCURLY; import static cop5555sp15.TokenStream.Kind.RPAREN; import static cop5555sp15.TokenStream.Kind.RSHIFT; import static cop5555sp15.TokenStream.Kind.RSQUARE; import static cop5555sp15.TokenStream.Kind.SEMICOLON; import static cop5555sp15.TokenStream.Kind.STRING_LIT; import static cop5555sp15.TokenStream.Kind.TIMES; import cop5555sp15.TokenStream.Kind; import cop5555sp15.TokenStream.Token; public class Parser { public static final Set<Kind> fctrPredSt = new HashSet<Kind>(Arrays.asList(IDENT,INT_LIT,BL_TRUE,BL_FALSE,STRING_LIT,LPAREN,NOT,MINUS,KW_SIZE,KW_KEY,KW_VALUE,LCURLY,AT)); public static final Set<Kind> relOpPredSt = new HashSet<Kind>(Arrays.asList(BAR,AND,EQUAL,NOTEQUAL,LT,GT,LE,GE)); public static final Set<Kind> stmtPredSt = new HashSet<Kind>(Arrays.asList(IDENT,KW_PRINT,KW_WHILE,KW_IF,MOD,KW_RETURN,SEMICOLON)); public static final int DEBUGMAXPARSER = 0; @SuppressWarnings("serial") public class SyntaxException extends Exception { Token t; Kind[] expected; String msg; SyntaxException(Token t, Kind expected) { this.t = t; msg = ""; this.expected = new Kind[1]; this.expected[0] = expected; } public SyntaxException(Token t, String msg) { this.t = t; this.msg = msg; } public SyntaxException(Token t, Kind[] expected) { this.t = t; msg = ""; this.expected = expected; } public String getMessage() { StringBuilder sb = new StringBuilder(); sb.append(" error at token ").append(t.toString()).append(" ") .append(msg); sb.append(". Expected: "); for (Kind kind : expected) { sb.append(kind).append(" "); } return sb.toString(); } } TokenStream tokens; Token t; Parser(TokenStream tokens) { this.tokens = tokens; t = tokens.nextToken(); } private Kind match(Kind kind) throws SyntaxException { if (isKind(kind)) { if(DEBUGMAXPARSER==1) System.out.println("***"+t.toString()+"***"); consume(); return kind; } throw new SyntaxException(t, kind); } private Kind match(Kind... kinds) throws SyntaxException { Kind kind = t.kind; if (isKind(kinds)) { consume(); return kind; } StringBuilder sb = new StringBuilder(); for (Kind kind1 : kinds) { sb.append(kind1).append(kind1).append(" "); } throw new SyntaxException(t, "expected one of " + sb.toString()); } private boolean isKind(Kind kind) { return (t.kind == kind); } private boolean isInPredSt(Set predSt) { return (predSt.contains(t.kind)); } private void consume() { if (t.kind != EOF) t = tokens.nextToken(); } private boolean isKind(Kind... kinds) { for (Kind kind : kinds) { if (t.kind == kind) return true; } return false; } //This is a convenient way to represent fixed sets of //token kinds. You can pass these to isKind. static final Kind[] REL_OPS = { BAR, AND, EQUAL, NOTEQUAL, LT, GT, LE, GE }; static final Kind[] WEAK_OPS = { PLUS, MINUS }; static final Kind[] STRONG_OPS = { TIMES, DIV }; static final Kind[] VERY_STRONG_OPS = { LSHIFT, RSHIFT }; List<SyntaxException> exceptionList = new ArrayList<SyntaxException>(); public String getErrors() { StringBuilder sb = new StringBuilder(); for(SyntaxException e: exceptionList){ sb.append(e.msg).append('\n'); } return sb.toString(); } public List<SyntaxException> getExceptionList() { return exceptionList; } public Program parse() throws SyntaxException { Program p = null; try { p = Program(); if(p!=null) match(EOF); } catch (SyntaxException e) { exceptionList.add(e); } if(exceptionList.isEmpty()) return p; else { System.err.println("exception list not empty; " + exceptionList.toArray().length + " exceptions"); for(SyntaxException e : exceptionList) System.err.println(e.getMessage()); return null; } } private Program Program() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("programdown"); Token ft = t; List<QualifiedName> impList = null; Block b = null; String className = null; try { impList = ImportList(); match(KW_CLASS); if(isKind(IDENT)) className = t.getText(); match(IDENT); b = Block(); } catch (SyntaxException e) { exceptionList.add(e); } Program p = new Program(ft, impList, className, b); if(DEBUGMAXPARSER==1) System.out.println("programup"); return p; } private List<QualifiedName> ImportList() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("importlistdown"); List<QualifiedName> impList = new ArrayList<QualifiedName>(); Token ft; try { while (isKind(KW_IMPORT)){ try { ft = t; match(KW_IMPORT); StringBuilder impStSb = new StringBuilder(); if(isKind(IDENT)) impStSb.append(t.getText()); match(IDENT); while(isKind(DOT)){ impStSb.append("/"); match(DOT); if(isKind(IDENT)) impStSb.append(t.getText()); match(IDENT); } match(SEMICOLON); impList.add(new QualifiedName(ft, impStSb.toString())); } catch (SyntaxException e) { //if any match inside import statement throws an exception //(except semicolon of course because that terminates import) //then eat everything until you hit the semicolon that terminates //the import statement. but make sure not to eat part of forthcoming //<Block> while(!isKind(SEMICOLON) && !isKind(KW_CLASS) && !isKind(KW_IMPORT)) match(t.kind); if(isKind(SEMICOLON)) match(SEMICOLON); exceptionList.add(e); } } } catch (SyntaxException e) { exceptionList.add(e); } if(DEBUGMAXPARSER==1) System.out.println("importlistup"); return impList; } private Block Block() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("blockdown"); Token ft = t; Block b = null; BlockElem bElem = null; List<BlockElem> bElemList = new ArrayList<BlockElem>(); try { match(LCURLY); while(isKind(KW_DEF) || isInPredSt(stmtPredSt)){ try { if(isKind(KW_DEF)){ bElem = Declaration(); match(SEMICOLON); } else { bElem = Statement(); match(SEMICOLON); } if(bElem!=null) bElemList.add(bElem); } catch (SyntaxException e) { //if any match inside any blocl elem throws an exception //(except semicolon of course because that terminates blockelem) //then eat everything until you hit the semicolon that terminates //the import statement. but make sure not to eat end of the block while(!isKind(SEMICOLON) && !isKind(RCURLY)) match(t.kind); if(isKind(SEMICOLON)) match(SEMICOLON); exceptionList.add(e); } } match(RCURLY); } catch (SyntaxException e) { exceptionList.add(e); } b = new Block(t, bElemList); if(DEBUGMAXPARSER==1) System.out.println("blockup"); return b; } private Statement Statement() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("statementdown"); Token ft = t; Statement st = null; Expression exp = null; LValue lval = null; if(isKind(IDENT)){ lval = LValue(); match(ASSIGN); exp = Expression(); st = new AssignmentStatement(t,lval,exp); } else if(isKind(KW_PRINT)){ match(KW_PRINT); exp = Expression(); st = new PrintStatement(t,exp); } else if(isKind(KW_WHILE)) st = While(); else if(isKind(KW_IF)) st = If(); else if(isKind(MOD)){ match(MOD); exp = Expression(); st = new ExpressionStatement(t,exp); } else if(isKind(KW_RETURN)){ match(KW_RETURN); exp = Expression(); st = new ReturnStatement(t,exp); } if(DEBUGMAXPARSER==1) System.out.println("statementup"); return st; } class DecTailClass { String type; Type tp; Closure cl; DecTailClass(Closure c){ type = "ClosureDec"; cl = c; } //vardec with type DecTailClass(Type typ){ type = "VarDecWithType"; tp = typ; } //vardec no type DecTailClass(int dummy){ type = "VarDecNoType"; } } private Declaration Declaration() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("declaration"); Declaration de = null; Token id = null; Token ft = t; match(KW_DEF); id = t; match(IDENT); DecTailClass d = DecTail(); if(d.type.equals("ClosureDec")) de = new ClosureDec(ft,id,d.cl); else if(d.type.equals("VarDecWithType")) de = new VarDec(ft,id,d.tp); else de = new VarDec(ft,id, new UndeclaredType(id)); if(DEBUGMAXPARSER==1) System.out.println("declaration"); return de; } private DecTailClass DecTail() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("dectaildown"); Closure cl = null; Type tp = null; DecTailClass d = null; //ClosureDec if(isKind(ASSIGN)){ match(ASSIGN); cl = Closure(); d = new DecTailClass(cl); }//VarDec alternative else if(isKind(COLON)){ match(COLON); //System.err.println(t); tp = Type(); d = new DecTailClass(tp); } else d = new DecTailClass(0); if(DEBUGMAXPARSER==1) System.out.println("dectailup"); return d; } private VarDec VarDec() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("vardecdown"); VarDec v = null; Token ft = t; Token id = t; Type t = null; match(IDENT); if(isKind(COLON)){ match(COLON); t = Type(); v = new VarDec(ft, id, t); } else v = new VarDec(ft, id,new UndeclaredType(id)); if(DEBUGMAXPARSER==1) System.out.println("vardecup"); return v;//epsilon } class CompositeValueTypeClass { String type; SimpleType st; Type t; CompositeValueTypeClass(Type tt) { type = "ListType"; t = tt; } CompositeValueTypeClass(SimpleType stt, Type tt) { type = "KeyValueType"; st = stt; t = tt; } } private Type Type() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("typedown"); Token ft = t; CompositeValueTypeClass c = null; Type tp = null; if(isKind(KW_INT)||isKind(KW_BOOLEAN)||isKind(KW_STRING)) tp = SimpleType(); else c = CompositeValueType(); if(c!=null){ switch(c.type) { case "ListType": tp = new ListType(ft, c.t); break; case "KeyValueType": tp = new KeyValueType(ft,c.st,c.t); } } if(DEBUGMAXPARSER==1) System.out.println("typeup"); return tp; } private SimpleType SimpleType() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("simpletypedown"); SimpleType s = new SimpleType(t,t); if(isKind(KW_INT)){ match(KW_INT); } else if(isKind(KW_BOOLEAN)){ match(KW_BOOLEAN); } else match(KW_STRING); if(DEBUGMAXPARSER==1) System.out.println("simpletypeup"); return s; } private CompositeValueTypeClass CompositeValueType() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("compositevaluetypedown"); Type ty = null; SimpleType st = null; CompositeValueTypeClass c = null; match(AT); if(isKind(LSQUARE)){ match(LSQUARE); ty = Type(); match(RSQUARE); } else{ match(AT); match(LSQUARE); st = SimpleType(); match(COLON); ty = Type(); match(RSQUARE); } if(st!=null) c = new CompositeValueTypeClass(st,ty); else c = new CompositeValueTypeClass(ty); if(DEBUGMAXPARSER==1) System.out.println("compositevaluetypeup"); return c; } private Closure Closure() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("closuredown"); Token ft = t; Closure c = null; List<VarDec> l = null; List<Statement> s = new ArrayList<Statement>(); match(LCURLY); l = FormalArgList(); match(ARROW); while(isInPredSt(stmtPredSt)){ try { Statement ss = Statement(); if(ss!=null) s.add(ss); match(SEMICOLON); } catch (SyntaxException e) { while(!isKind(SEMICOLON) && !isKind(RCURLY)) match(t.kind); if(isKind(SEMICOLON)) match(SEMICOLON); exceptionList.add(e); } } match(RCURLY); c = new Closure(ft,l,s); if(DEBUGMAXPARSER==1) System.out.println("closureup"); return c; } private List<VarDec> FormalArgList() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("formalarglistdown"); List<VarDec> l = new ArrayList<VarDec>(); if(isKind(IDENT)){ l.add(VarDec()); while(isKind(COMMA)){ match(COMMA); VarDec v = VarDec(); if(v!=null) l.add(v); } } if(DEBUGMAXPARSER==1) System.out.println("formalarglistup"); return l; } private Statement If() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("ifdown"); Statement st = null; Expression exp = null; Block ifBlock = null; Block elseBlock = null; Token ft = t; match(KW_IF); match(LPAREN); exp = Expression(); match(RPAREN); ifBlock = Block(); elseBlock = Else(); if(elseBlock!=null) st = new IfElseStatement(t, exp, ifBlock, elseBlock); else st = new IfStatement(t,exp, ifBlock); if(DEBUGMAXPARSER==1) System.out.println("ifup"); return st; } private Block Else() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("elsedown"); Block b = null; if(isKind(KW_ELSE)){ match(KW_ELSE); b = Block(); } if(DEBUGMAXPARSER==1) System.out.println("elseup"); return b; } class WhileStarClass { String type; Token t; Expression exp1; Expression exp2; WhileStarClass(Token tt, Expression exp){ type="WhileStar"; t = tt; exp1 = exp; } WhileStarClass(Token tt, Expression eexp1, Expression eexp2){ type="WhileRange"; t = tt; exp1 = eexp1; exp2 = eexp2; } } private Statement While() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("whiledown"); Token ft = t; Statement st = null; Expression exp = null; Block b = null; WhileStarClass wstar = null; match(KW_WHILE); if(isKind(TIMES)) wstar = WhileStar(); else{ match(LPAREN); exp = Expression(); match(RPAREN); } b = Block(); if(wstar!=null){ // RangeExpression has 3 fields and Expression has only 2 if(wstar.type.equals("WhileRange")){ RangeExpression r = new RangeExpression(wstar.t,wstar.exp1,wstar.exp2); st = new WhileRangeStatement(t,r,b); } else st = new WhileStarStatement(t,wstar.exp1,b); } else st = new WhileStatement(t,exp,b); if(DEBUGMAXPARSER==1) System.out.println("whiledown"); return st; } // *(<Expression>) or *(<Expression>..<Expression>) private WhileStarClass WhileStar() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("whilestardown"); Token ft = t; WhileStarClass w = null; Expression exp1 = null; Expression exp2 = null; match(TIMES); match(LPAREN); exp1 = Expression(); if(isKind(RANGE)){ match(RANGE); exp2 = Expression(); } match(RPAREN); if(exp2!=null) w = new WhileStarClass(t, exp1, exp2); else w = new WhileStarClass(t,exp1); if(DEBUGMAXPARSER==1) System.out.println("whilestarup"); return w; } private LValue LValue() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("lvaluedown"); Token ft = t; Expression e = null; LValue l = null; Token id = t; match(IDENT); if(isKind(LSQUARE)) e = LValueTail(); if(e!=null) l = new ExpressionLValue(ft,id,e); else l = new IdentLValue(t,id); if(DEBUGMAXPARSER==1) System.out.println("lvalueup"); return l; } private Expression LValueTail() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("lrvaluedown"); Expression e = null; match(LSQUARE); e = Expression(); match(RSQUARE); if(DEBUGMAXPARSER==1) System.out.println("lrvalueup"); return e; } private List<Expression> ExpressionList() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("expressionlistdown"); List<Expression> l = new ArrayList<Expression>(); Expression tempE1 = null; Expression tempE2 = null; if(isInPredSt(fctrPredSt)){ tempE1 = Expression(); l.add(tempE1); while(isKind(COMMA)){ match(COMMA); tempE2 = Expression(); l.add(tempE2); } } if(DEBUGMAXPARSER==1) System.out.println("expressionlistup"); return l; } private List<KeyValueExpression> KeyValueList() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("keyvaluelistdown"); List<KeyValueExpression> kvl = new ArrayList<KeyValueExpression>(); if(isInPredSt(fctrPredSt)){ kvl.add(KeyValueExpression()); while(isKind(COMMA)){ match(COMMA); kvl.add(KeyValueExpression()); } } if(DEBUGMAXPARSER==1) System.out.println("keyvaluelistup"); return kvl; } private KeyValueExpression KeyValueExpression() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("keyvalueexpressiondown"); Token ft = t; KeyValueExpression kv = null; Expression ky = null; Expression val = null; ky = Expression(); match(COLON); val = Expression(); kv = new KeyValueExpression(t, ky, val); if(DEBUGMAXPARSER==1) System.out.println("keyvalueexpressiondown"); return kv; } //left to right associativity private BinaryExpression bins(List<Token> ops,List<Expression> exps) { BinaryExpression b = null; //System.err.println("**********************************" + exps.toArray().length); if(exps.toArray().length<=2) b = new BinaryExpression(ops.get(0),exps.get(0),ops.get(0),exps.get(1)); else b = new BinaryExpression(ops.get(0), bins(ops.subList(0,ops.toArray().length-1),exps.subList(0,exps.toArray().length-1)), ops.get(ops.toArray().length-1), exps.get(exps.toArray().length-1) ); return b; } private Expression Expression() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("expressiondown"); Expression e = null; Expression t1 = null; List<Expression> tL = new ArrayList<Expression>(); List<Token> oL = new ArrayList<Token>(); t1 = Term(); while(isInPredSt(relOpPredSt)){ oL.add(RelOp()); tL.add(Term()); } if(!tL.isEmpty()){ tL.add(0,t1); e = bins(oL,tL); } else e = t1; if(DEBUGMAXPARSER==1) System.out.println("expressionup"); return e; } private Expression Term() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("termdown"); Expression e = null; Expression e1 = null; List<Expression> eL = new ArrayList<Expression>(); List<Token> oL = new ArrayList<Token>(); e1 = Elem(); while(isKind(PLUS)||isKind(MINUS)){ oL.add(WeakOp()); eL.add(Elem()); } if(!eL.isEmpty()){ eL.add(0,e1); e = bins(oL,eL); } else e = e1; if(DEBUGMAXPARSER==1) System.out.println("termup"); return e; } private Expression Elem() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("elemdown"); Expression e = null; Expression th1 = null; List<Expression> thL = new ArrayList<Expression>(); List<Token> oL = new ArrayList<Token>(); th1 = Thing(); while(isKind(TIMES)||isKind(DIV)){ oL.add(StrongOp()); thL.add(Thing()); } if(!thL.isEmpty()){ thL.add(0,th1); e = bins(oL,thL); } else e = th1; if(DEBUGMAXPARSER==1) System.out.println("elemup"); return e; } private Expression Thing() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("thingdown"); Expression e = null; Expression f1 = null; List<Expression> fL = new ArrayList<Expression>(); List<Token> oL = new ArrayList<Token>(); f1 = Factor(); while(isKind(LSHIFT)||isKind(RSHIFT)){ oL.add(VeryStrongOp()); fL.add(Factor()); } if(!fL.isEmpty()){ fL.add(0,f1); e = bins(oL,fL); } else e = f1; if(DEBUGMAXPARSER==1) System.out.println("thingup"); return e; } //IdentExpression,ListOrMapElemExpression,ClosureEvalExpression class IdentInFactorClass { String type; Token id; Expression e; List<Expression> el; IdentInFactorClass(Token i){ type = "IdentExpression"; id = i; } IdentInFactorClass(Token i, Expression ex){ type = "ListOrMapElemExpression"; id = i; e = ex; } IdentInFactorClass(Token i, List<Expression> l){ type = "ClosureEvalExpression"; id = i; el = l; } void tostring() { System.out.println(type + "\n"); System.out.println(id + "\n"); switch(type){ case "IdentExpression": break; case "ListOrMapElemExpression": System.out.println(e + "\n"); break; case "ClosureEvalExpression": System.out.println(el.get(0) + "\n"); } } } private Expression Factor() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("factordown"); Token ft = t; Expression f = null; Expression e = null; if(isKind(IDENT)) { IdentInFactorClass id = IdentInFactor(); switch(id.type) { case "IdentExpression": f = new IdentExpression(ft,id.id); break; case "ListOrMapElemExpression": f = new ListOrMapElemExpression(ft,id.id,id.e); break; case "ClosureEvalExpression": f = new ClosureEvalExpression(ft,id.id,id.el); } } else if(isKind(INT_LIT)) { f = new IntLitExpression(ft,t.getIntVal()); match(INT_LIT); } else if(isKind(BL_TRUE)){ f = new BooleanLitExpression(ft, t.getBooleanVal()); match(BL_TRUE); } else if(isKind(BL_FALSE)){ f = new BooleanLitExpression(ft, t.getBooleanVal()); match(BL_FALSE); } else if(isKind(STRING_LIT)){ f = new StringLitExpression(ft, t.getText()); match(STRING_LIT); } else if(isKind(NOT)){ Token op = t; match(NOT); e = Factor(); f = new UnaryExpression(ft, op, e); } else if(isKind(MINUS)){ Token op = t; match(MINUS); e = Factor(); f = new UnaryExpression(ft, op, e); } else if(isKind(KW_SIZE)){ match(KW_SIZE); match(LPAREN); e = Expression(); f = new SizeExpression(ft, e); match(RPAREN); } else if(isKind(KW_KEY)){ match(KW_KEY); match(LPAREN); e = Expression(); f = new KeyExpression(ft, e); match(RPAREN); } else if(isKind(KW_VALUE)){ match(KW_VALUE); match(LPAREN); e = Expression(); f = new ValueExpression(ft, e); match(RPAREN); } else if(isKind(LCURLY)){ Closure cl = Closure(); f = new ClosureExpression(ft, cl); } else if(isKind(AT)) { //List is my list, not hers ListClass l = List(); switch(l.type){ case "ListExpression": f = new ListExpression(ft, l.l); break; case "MapListExpression": f = new MapListExpression(ft, l.ml); } } else if(isKind(LPAREN)){ match(LPAREN); f = Expression(); match(RPAREN); } else{ //not really the kind expected but since the exception class is stupid //need to pass some sort of kind throw new SyntaxException(t, KW_VALUE); } if(DEBUGMAXPARSER==1) System.out.println("factorup"); return f; } class ListClass { String type; List<Expression> l; List<KeyValueExpression> ml; //dummy is just for diff signatures ListClass(List<Expression> ll, int dummy){ type = "ListExpression"; l = ll; } ListClass(List<KeyValueExpression> mll){ type = "MapListExpression"; ml = mll; } } private ListClass List() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("listdown"); List<KeyValueExpression> kvl = null; List<Expression> el = null; ListClass l = null; match(AT); // <MapList> i.e. @@[ <KeyValueList> ] if(isKind(AT)){ match(AT); match(LSQUARE); kvl = KeyValueList(); match(RSQUARE); } else{// <List> i.e. @[ <ExpressionList> ] match(LSQUARE); el = ExpressionList(); match(RSQUARE); } if(kvl!=null) l = new ListClass(kvl); else l = new ListClass(el,1); if(DEBUGMAXPARSER==1) System.out.println("listup"); return l; } private IdentInFactorClass IdentInFactor() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("identinfactordown"); IdentInFactorClass id = null; Expression e = null; List<Expression> l = null; Token idT = t; match(IDENT); if(isKind(LSQUARE)){ match(LSQUARE); e = Expression(); match(RSQUARE); } else if(isKind(LPAREN)){ match(LPAREN); l = ExpressionList(); match(RPAREN); } //System.err.println("%%%%%%%%%%%%%%" + idT); if(e!=null || l!=null){ //System.err.println("test1"); if(e!=null) id = new IdentInFactorClass(idT, e); else id = new IdentInFactorClass(idT, l); } else{ //System.err.println("test2"); id = new IdentInFactorClass(idT); } //System.err.println("$$$$$$$$$$$$$$$$$$$ "); //id.tostring(); if(DEBUGMAXPARSER==1) System.out.println("identinfactorup"); return id; } private Token RelOp() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("relopdown"); Token op = t; if(isKind(BAR)) match(BAR); else if(isKind(AND)) match(AND); else if(isKind(EQUAL)) match(EQUAL); else if(isKind(NOTEQUAL)) match(NOTEQUAL); else if(isKind(LT)) match(LT); else if(isKind(GT)) match(GT); else if(isKind(LE)) match(LE); else if(isKind(GE)) match(GE); else{ throw new SyntaxException(t,"relop exception"); } if(DEBUGMAXPARSER==1) System.out.println("relopup"); return op; } private Token WeakOp() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("weakopdown"); Token op = t; if(isKind(PLUS)) match(PLUS); else if(isKind(MINUS)) match(MINUS); else{ throw new SyntaxException(t,"weakop exception"); } if(DEBUGMAXPARSER==1) System.out.println("weakdown"); return op; } private Token StrongOp() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("strongopdown"); Token op = t; if(isKind(TIMES)) match(TIMES); else if(isKind(DIV)) match(DIV); else{ throw new SyntaxException(t,"strongop exception"); } if(DEBUGMAXPARSER==1) System.out.println("strongopup"); return op; } private Token VeryStrongOp() throws SyntaxException { if(DEBUGMAXPARSER==1) System.out.println("verystrongopdown"); Token op = t; if(isKind(LSHIFT)) match(LSHIFT); else if(isKind(RSHIFT)) match(RSHIFT); else{ throw new SyntaxException(t,"verystrongop exception"); } if(DEBUGMAXPARSER==1) System.out.println("verystrongopup"); return op; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator; import com.facebook.airlift.log.Logger; import com.facebook.presto.common.Page; import com.facebook.presto.execution.FragmentResultCacheContext; import com.facebook.presto.execution.ScheduledSplit; import com.facebook.presto.execution.TaskSource; import com.facebook.presto.metadata.Split; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.UpdatablePageSource; import com.facebook.presto.spi.plan.PlanNodeId; import com.facebook.presto.split.RemoteSplit; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.VerifyException; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; import io.airlift.units.Duration; import javax.annotation.concurrent.GuardedBy; import java.io.Closeable; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; import java.util.function.Supplier; import static com.facebook.airlift.concurrent.MoreFutures.getFutureValue; import static com.facebook.presto.operator.Operator.NOT_BLOCKED; import static com.facebook.presto.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR; import static com.facebook.presto.spi.schedule.NodeSelectionStrategy.NO_PREFERENCE; import static com.facebook.presto.util.MoreUninterruptibles.tryLockUninterruptibly; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Throwables.throwIfUnchecked; import static com.google.common.util.concurrent.MoreExecutors.directExecutor; import static java.lang.Boolean.TRUE; import static java.util.Objects.requireNonNull; // // NOTE: As a general strategy the methods should "stage" a change and only // process the actual change before lock release (DriverLockResult.close()). // The assures that only one thread will be working with the operators at a // time and state changer threads are not blocked. // public class Driver implements Closeable { private static final Logger log = Logger.get(Driver.class); private final DriverContext driverContext; private final Optional<FragmentResultCacheContext> fragmentResultCacheContext; private final List<Operator> activeOperators; // this is present only for debugging @SuppressWarnings("unused") private final List<Operator> allOperators; private final Optional<SourceOperator> sourceOperator; private final Optional<DeleteOperator> deleteOperator; // This variable acts as a staging area. When new splits (encapsulated in TaskSource) are // provided to a Driver, the Driver will not process them right away. Instead, the splits are // added to this staging area. This staging area will be drained asynchronously. That's when // the new splits get processed. private final AtomicReference<TaskSource> pendingTaskSourceUpdates = new AtomicReference<>(); private final Map<Operator, ListenableFuture<?>> revokingOperators = new HashMap<>(); private final AtomicReference<State> state = new AtomicReference<>(State.ALIVE); private final DriverLock exclusiveLock = new DriverLock(); @GuardedBy("exclusiveLock") private TaskSource currentTaskSource; private final AtomicReference<SettableFuture<?>> driverBlockedFuture = new AtomicReference<>(); private final AtomicReference<Optional<Iterator<Page>>> cachedResult = new AtomicReference<>(Optional.empty()); private final AtomicReference<Split> split = new AtomicReference<>(); private final List<Page> outputPages = new ArrayList<>(); private enum State { ALIVE, NEED_DESTRUCTION, DESTROYED } public static Driver createDriver(DriverContext driverContext, List<Operator> operators) { requireNonNull(driverContext, "driverContext is null"); requireNonNull(operators, "operators is null"); Driver driver = new Driver(driverContext, operators); driver.initialize(); return driver; } @VisibleForTesting public static Driver createDriver(DriverContext driverContext, Operator firstOperator, Operator... otherOperators) { requireNonNull(driverContext, "driverContext is null"); requireNonNull(firstOperator, "firstOperator is null"); requireNonNull(otherOperators, "otherOperators is null"); ImmutableList<Operator> operators = ImmutableList.<Operator>builder() .add(firstOperator) .add(otherOperators) .build(); return createDriver(driverContext, operators); } private Driver(DriverContext driverContext, List<Operator> operators) { this.driverContext = requireNonNull(driverContext, "driverContext is null"); this.fragmentResultCacheContext = driverContext.getPipelineContext().getTaskContext().getFragmentResultCacheContext(); this.allOperators = ImmutableList.copyOf(requireNonNull(operators, "operators is null")); checkArgument(allOperators.size() > 1, "At least two operators are required"); this.activeOperators = new ArrayList<>(operators); checkArgument(!operators.isEmpty(), "There must be at least one operator"); Optional<SourceOperator> sourceOperator = Optional.empty(); Optional<DeleteOperator> deleteOperator = Optional.empty(); for (Operator operator : operators) { if (operator instanceof SourceOperator) { checkArgument(!sourceOperator.isPresent(), "There must be at most one SourceOperator"); sourceOperator = Optional.of((SourceOperator) operator); } else if (operator instanceof DeleteOperator) { checkArgument(!deleteOperator.isPresent(), "There must be at most one DeleteOperator"); deleteOperator = Optional.of((DeleteOperator) operator); } } this.sourceOperator = sourceOperator; this.deleteOperator = deleteOperator; currentTaskSource = sourceOperator.map(operator -> new TaskSource(operator.getSourceId(), ImmutableSet.of(), false)).orElse(null); // initially the driverBlockedFuture is not blocked (it is completed) SettableFuture<?> future = SettableFuture.create(); future.set(null); driverBlockedFuture.set(future); } // the memory revocation request listeners are added here in a separate initialize() method // instead of the constructor to prevent leaking the "this" reference to // another thread, which will cause unsafe publication of this instance. private void initialize() { activeOperators.stream() .map(Operator::getOperatorContext) .forEach(operatorContext -> operatorContext.setMemoryRevocationRequestListener(() -> driverBlockedFuture.get().set(null))); } public DriverContext getDriverContext() { return driverContext; } public Optional<PlanNodeId> getSourceId() { return sourceOperator.map(SourceOperator::getSourceId); } @Override public void close() { // mark the service for destruction if (!state.compareAndSet(State.ALIVE, State.NEED_DESTRUCTION)) { return; } exclusiveLock.interruptCurrentOwner(); // if we can get the lock, attempt a clean shutdown; otherwise someone else will shutdown tryWithLock(() -> TRUE); } public boolean isFinished() { checkLockNotHeld("Can not check finished status while holding the driver lock"); // if we can get the lock, attempt a clean shutdown; otherwise someone else will shutdown Optional<Boolean> result = tryWithLock(this::isFinishedInternal); return result.orElseGet(() -> state.get() != State.ALIVE || driverContext.isDone()); } @GuardedBy("exclusiveLock") private boolean isFinishedInternal() { checkLockHeld("Lock must be held to call isFinishedInternal"); boolean finished = state.get() != State.ALIVE || driverContext.isDone() || activeOperators.isEmpty() || activeOperators.get(activeOperators.size() - 1).isFinished(); if (finished) { state.compareAndSet(State.ALIVE, State.NEED_DESTRUCTION); } return finished; } public void updateSource(TaskSource sourceUpdate) { checkLockNotHeld("Can not update sources while holding the driver lock"); checkArgument( sourceOperator.isPresent() && sourceOperator.get().getSourceId().equals(sourceUpdate.getPlanNodeId()), "sourceUpdate is for a canonicalPlan node that is different from this Driver's source node"); // stage the new updates pendingTaskSourceUpdates.updateAndGet(current -> current == null ? sourceUpdate : current.update(sourceUpdate)); // attempt to get the lock and process the updates we staged above // updates will be processed in close if and only if we got the lock tryWithLock(() -> TRUE); } @GuardedBy("exclusiveLock") private void processNewSources() { checkLockHeld("Lock must be held to call processNewSources"); // only update if the driver is still alive if (state.get() != State.ALIVE) { return; } TaskSource sourceUpdate = pendingTaskSourceUpdates.getAndSet(null); if (sourceUpdate == null) { return; } // merge the current source and the specified source update TaskSource newSource = currentTaskSource.update(sourceUpdate); // if the update contains no new data, just return if (newSource == currentTaskSource) { return; } // determine new splits to add Set<ScheduledSplit> newSplits = Sets.difference(newSource.getSplits(), currentTaskSource.getSplits()); // add new splits SourceOperator sourceOperator = this.sourceOperator.orElseThrow(VerifyException::new); for (ScheduledSplit newSplit : newSplits) { Split split = newSplit.getSplit(); if (fragmentResultCacheContext.isPresent() && !(split.getConnectorSplit() instanceof RemoteSplit)) { checkState(!this.cachedResult.get().isPresent()); this.cachedResult.set(fragmentResultCacheContext.get().getFragmentResultCacheManager().get(fragmentResultCacheContext.get().getCanonicalPlanFragment(), split)); this.split.set(split); } Supplier<Optional<UpdatablePageSource>> pageSource = sourceOperator.addSplit(split); deleteOperator.ifPresent(deleteOperator -> deleteOperator.setPageSource(pageSource)); } // set no more splits if (newSource.isNoMoreSplits()) { sourceOperator.noMoreSplits(); } currentTaskSource = newSource; } public ListenableFuture<?> processFor(Duration duration) { checkLockNotHeld("Can not process for a duration while holding the driver lock"); requireNonNull(duration, "duration is null"); // if the driver is blocked we don't need to continue SettableFuture<?> blockedFuture = driverBlockedFuture.get(); if (!blockedFuture.isDone()) { return blockedFuture; } long maxRuntime = duration.roundTo(TimeUnit.NANOSECONDS); Optional<ListenableFuture<?>> result = tryWithLock(100, TimeUnit.MILLISECONDS, () -> { OperationTimer operationTimer = createTimer(); driverContext.startProcessTimer(); driverContext.getYieldSignal().setWithDelay(maxRuntime, driverContext.getYieldExecutor()); try { long start = System.nanoTime(); do { ListenableFuture<?> future = processInternal(operationTimer); if (!future.isDone()) { return updateDriverBlockedFuture(future); } } while (System.nanoTime() - start < maxRuntime && !isFinishedInternal()); } finally { driverContext.getYieldSignal().reset(); driverContext.recordProcessed(operationTimer); } return NOT_BLOCKED; }); return result.orElse(NOT_BLOCKED); } public ListenableFuture<?> process() { checkLockNotHeld("Can not process while holding the driver lock"); // if the driver is blocked we don't need to continue SettableFuture<?> blockedFuture = driverBlockedFuture.get(); if (!blockedFuture.isDone()) { return blockedFuture; } Optional<ListenableFuture<?>> result = tryWithLock(100, TimeUnit.MILLISECONDS, () -> { ListenableFuture<?> future = processInternal(createTimer()); return updateDriverBlockedFuture(future); }); return result.orElse(NOT_BLOCKED); } private OperationTimer createTimer() { return new OperationTimer( driverContext.isCpuTimerEnabled(), driverContext.isCpuTimerEnabled() && driverContext.isPerOperatorCpuTimerEnabled(), driverContext.isAllocationTrackingEnabled(), driverContext.isAllocationTrackingEnabled() && driverContext.isPerOperatorAllocationTrackingEnabled()); } private ListenableFuture<?> updateDriverBlockedFuture(ListenableFuture<?> sourceBlockedFuture) { // driverBlockedFuture will be completed as soon as the sourceBlockedFuture is completed // or any of the operators gets a memory revocation request SettableFuture<?> newDriverBlockedFuture = SettableFuture.create(); driverBlockedFuture.set(newDriverBlockedFuture); sourceBlockedFuture.addListener(() -> newDriverBlockedFuture.set(null), directExecutor()); // it's possible that memory revoking is requested for some operator // before we update driverBlockedFuture above and we don't want to miss that // notification, so we check to see whether that's the case before returning. boolean memoryRevokingRequested = activeOperators.stream() .filter(operator -> !revokingOperators.containsKey(operator)) .map(Operator::getOperatorContext) .anyMatch(OperatorContext::isMemoryRevokingRequested); if (memoryRevokingRequested) { newDriverBlockedFuture.set(null); } return newDriverBlockedFuture; } private boolean shouldUseFragmentResultCache() { return fragmentResultCacheContext.isPresent() && split.get() != null && split.get().getConnectorSplit().getNodeSelectionStrategy() != NO_PREFERENCE; } @GuardedBy("exclusiveLock") private ListenableFuture<?> processInternal(OperationTimer operationTimer) { checkLockHeld("Lock must be held to call processInternal"); handleMemoryRevoke(); try { processNewSources(); // If there is only one operator, finish it // Some operators (LookupJoinOperator and HashBuildOperator) are broken and requires finish to be called continuously // TODO remove the second part of the if statement, when these operators are fixed // Note: finish should not be called on the natural source of the pipeline as this could cause the task to finish early if (!activeOperators.isEmpty() && activeOperators.size() != allOperators.size()) { Operator rootOperator = activeOperators.get(0); rootOperator.finish(); rootOperator.getOperatorContext().recordFinish(operationTimer); } boolean movedPage = false; if (cachedResult.get().isPresent()) { Iterator<Page> remainingPages = cachedResult.get().get(); Operator outputOperator = activeOperators.get(activeOperators.size() - 1); if (remainingPages.hasNext()) { Page outputPage = remainingPages.next(); outputPages.add(outputPage); outputOperator.addInput(outputPage); } else { outputOperator.finish(); outputOperator.getOperatorContext().recordFinish(operationTimer); } } else { for (int i = 0; i < activeOperators.size() - 1 && !driverContext.isDone(); i++) { Operator current = activeOperators.get(i); Operator next = activeOperators.get(i + 1); // skip blocked operator if (getBlockedFuture(current).isPresent()) { continue; } // if the current operator is not finished and next operator isn't blocked and needs input... if (!current.isFinished() && !getBlockedFuture(next).isPresent() && next.needsInput()) { // get an output page from current operator Page page = current.getOutput(); current.getOperatorContext().recordGetOutput(operationTimer, page); // For the last non-output operator, we keep the pages for caching purpose. if (shouldUseFragmentResultCache() && i == activeOperators.size() - 2 && page != null) { outputPages.add(page); } // if we got an output page, add it to the next operator if (page != null && page.getPositionCount() != 0) { next.addInput(page); next.getOperatorContext().recordAddInput(operationTimer, page); movedPage = true; } if (current instanceof SourceOperator) { movedPage = true; } } // if current operator is finished... if (current.isFinished()) { // let next operator know there will be no more data next.finish(); next.getOperatorContext().recordFinish(operationTimer); } } } for (int index = activeOperators.size() - 1; index >= 0; index--) { if (activeOperators.get(index).isFinished()) { boolean outputOperatorFinished = index == activeOperators.size() - 1; // close and remove this operator and all source operators List<Operator> finishedOperators = this.activeOperators.subList(0, index + 1); Throwable throwable = closeAndDestroyOperators(finishedOperators); finishedOperators.clear(); if (throwable != null) { throwIfUnchecked(throwable); throw new RuntimeException(throwable); } if (shouldUseFragmentResultCache() && outputOperatorFinished && !cachedResult.get().isPresent()) { checkState(split.get() != null); checkState(fragmentResultCacheContext.isPresent()); fragmentResultCacheContext.get().getFragmentResultCacheManager().put(fragmentResultCacheContext.get().getCanonicalPlanFragment(), split.get(), outputPages); } // Finish the next operator, which is now the first operator. if (!activeOperators.isEmpty()) { Operator newRootOperator = activeOperators.get(0); newRootOperator.finish(); newRootOperator.getOperatorContext().recordFinish(operationTimer); } break; } } // if we did not move any pages, check if we are blocked if (!movedPage) { List<Operator> blockedOperators = new ArrayList<>(); List<ListenableFuture<?>> blockedFutures = new ArrayList<>(); for (Operator operator : activeOperators) { Optional<ListenableFuture<?>> blocked = getBlockedFuture(operator); if (blocked.isPresent()) { blockedOperators.add(operator); blockedFutures.add(blocked.get()); } } if (!blockedFutures.isEmpty()) { // unblock when the first future is complete ListenableFuture<?> blocked = firstFinishedFuture(blockedFutures); // driver records serial blocked time driverContext.recordBlocked(blocked); // each blocked operator is responsible for blocking the execution // until one of the operators can continue for (Operator operator : blockedOperators) { operator.getOperatorContext().recordBlocked(blocked); } return blocked; } } return NOT_BLOCKED; } catch (Throwable t) { List<StackTraceElement> interrupterStack = exclusiveLock.getInterrupterStack(); if (interrupterStack == null) { driverContext.failed(t); throw t; } // Driver thread was interrupted which should only happen if the task is already finished. // If this becomes the actual cause of a failed query there is a bug in the task state machine. Exception exception = new Exception("Interrupted By"); exception.setStackTrace(interrupterStack.stream().toArray(StackTraceElement[]::new)); PrestoException newException = new PrestoException(GENERIC_INTERNAL_ERROR, "Driver was interrupted", exception); newException.addSuppressed(t); driverContext.failed(newException); throw newException; } } @GuardedBy("exclusiveLock") private void handleMemoryRevoke() { for (int i = 0; i < activeOperators.size() && !driverContext.isDone(); i++) { Operator operator = activeOperators.get(i); if (revokingOperators.containsKey(operator)) { checkOperatorFinishedRevoking(operator); } else if (operator.getOperatorContext().isMemoryRevokingRequested()) { ListenableFuture<?> future = operator.startMemoryRevoke(); revokingOperators.put(operator, future); checkOperatorFinishedRevoking(operator); } } } @GuardedBy("exclusiveLock") private void checkOperatorFinishedRevoking(Operator operator) { ListenableFuture<?> future = revokingOperators.get(operator); if (future.isDone()) { getFutureValue(future); // propagate exception if there was some revokingOperators.remove(operator); operator.finishMemoryRevoke(); operator.getOperatorContext().resetMemoryRevokingRequested(); } } @GuardedBy("exclusiveLock") private void destroyIfNecessary() { checkLockHeld("Lock must be held to call destroyIfNecessary"); if (!state.compareAndSet(State.NEED_DESTRUCTION, State.DESTROYED)) { return; } // if we get an error while closing a driver, record it and we will throw it at the end Throwable inFlightException = null; try { inFlightException = closeAndDestroyOperators(activeOperators); if (driverContext.getMemoryUsage() > 0) { log.error("Driver still has memory reserved after freeing all operator memory."); } if (driverContext.getSystemMemoryUsage() > 0) { log.error("Driver still has system memory reserved after freeing all operator memory."); } if (driverContext.getRevocableMemoryUsage() > 0) { log.error("Driver still has revocable memory reserved after freeing all operator memory. Freeing it."); } driverContext.finished(); } catch (Throwable t) { // this shouldn't happen but be safe inFlightException = addSuppressedException( inFlightException, t, "Error destroying driver for task %s", driverContext.getTaskId()); } if (inFlightException != null) { // this will always be an Error or Runtime throwIfUnchecked(inFlightException); throw new RuntimeException(inFlightException); } } private Throwable closeAndDestroyOperators(List<Operator> operators) { // record the current interrupted status (and clear the flag); we'll reset it later boolean wasInterrupted = Thread.interrupted(); Throwable inFlightException = null; try { for (Operator operator : operators) { try { operator.close(); } catch (InterruptedException t) { // don't record the stack wasInterrupted = true; } catch (Throwable t) { inFlightException = addSuppressedException( inFlightException, t, "Error closing operator %s for task %s", operator.getOperatorContext().getOperatorId(), driverContext.getTaskId()); } try { operator.getOperatorContext().destroy(); } catch (Throwable t) { inFlightException = addSuppressedException( inFlightException, t, "Error freeing all allocated memory for operator %s for task %s", operator.getOperatorContext().getOperatorId(), driverContext.getTaskId()); } } } finally { // reset the interrupted flag if (wasInterrupted) { Thread.currentThread().interrupt(); } } return inFlightException; } private Optional<ListenableFuture<?>> getBlockedFuture(Operator operator) { ListenableFuture<?> blocked = revokingOperators.get(operator); if (blocked != null) { // We mark operator as blocked regardless of blocked.isDone(), because finishMemoryRevoke has not been called yet. return Optional.of(blocked); } blocked = operator.isBlocked(); if (!blocked.isDone()) { return Optional.of(blocked); } blocked = operator.getOperatorContext().isWaitingForMemory(); if (!blocked.isDone()) { return Optional.of(blocked); } blocked = operator.getOperatorContext().isWaitingForRevocableMemory(); if (!blocked.isDone()) { return Optional.of(blocked); } return Optional.empty(); } private static Throwable addSuppressedException(Throwable inFlightException, Throwable newException, String message, Object... args) { if (newException instanceof Error) { if (inFlightException == null) { inFlightException = newException; } else { // Self-suppression not permitted if (inFlightException != newException) { inFlightException.addSuppressed(newException); } } } else { // log normal exceptions instead of rethrowing them log.error(newException, message, args); } return inFlightException; } private synchronized void checkLockNotHeld(String message) { checkState(!exclusiveLock.isHeldByCurrentThread(), message); } @GuardedBy("exclusiveLock") private synchronized void checkLockHeld(String message) { checkState(exclusiveLock.isHeldByCurrentThread(), message); } private static ListenableFuture<?> firstFinishedFuture(List<ListenableFuture<?>> futures) { if (futures.size() == 1) { return futures.get(0); } SettableFuture<?> result = SettableFuture.create(); for (ListenableFuture<?> future : futures) { future.addListener(() -> result.set(null), directExecutor()); } return result; } // Note: task can not return null private <T> Optional<T> tryWithLock(Supplier<T> task) { return tryWithLock(0, TimeUnit.MILLISECONDS, task); } // Note: task can not return null private <T> Optional<T> tryWithLock(long timeout, TimeUnit unit, Supplier<T> task) { checkLockNotHeld("Lock can not be reacquired"); boolean acquired = exclusiveLock.tryLock(timeout, unit); if (!acquired) { return Optional.empty(); } Optional<T> result; try { result = Optional.of(task.get()); } finally { try { try { processNewSources(); } finally { destroyIfNecessary(); } } finally { exclusiveLock.unlock(); } } // If there are more source updates available, attempt to reacquire the lock and process them. // This can happen if new sources are added while we're holding the lock here doing work. // NOTE: this is separate duplicate code to make debugging lock reacquisition easier // The first condition is for processing the pending updates if this driver is still ALIVE // The second condition is to destroy the driver if the state is NEED_DESTRUCTION while (((pendingTaskSourceUpdates.get() != null && state.get() == State.ALIVE) || state.get() == State.NEED_DESTRUCTION) && exclusiveLock.tryLock()) { try { try { processNewSources(); } finally { destroyIfNecessary(); } } finally { exclusiveLock.unlock(); } } return result; } private static class DriverLock { private final ReentrantLock lock = new ReentrantLock(); @GuardedBy("this") private Thread currentOwner; @GuardedBy("this") private List<StackTraceElement> interrupterStack; public boolean isHeldByCurrentThread() { return lock.isHeldByCurrentThread(); } public boolean tryLock() { checkState(!lock.isHeldByCurrentThread(), "Lock is not reentrant"); boolean acquired = lock.tryLock(); if (acquired) { setOwner(); } return acquired; } public boolean tryLock(long timeout, TimeUnit unit) { checkState(!lock.isHeldByCurrentThread(), "Lock is not reentrant"); boolean acquired = tryLockUninterruptibly(lock, timeout, unit); if (acquired) { setOwner(); } return acquired; } private synchronized void setOwner() { checkState(lock.isHeldByCurrentThread(), "Current thread does not hold lock"); currentOwner = Thread.currentThread(); // NOTE: We do not use interrupted stack information to know that another // thread has attempted to interrupt the driver, and interrupt this new lock // owner. The interrupted stack information is for debugging purposes only. // In the case of interruption, the caller should (and does) have a separate // state to prevent further processing in the Driver. } public synchronized void unlock() { checkState(lock.isHeldByCurrentThread(), "Current thread does not hold lock"); currentOwner = null; lock.unlock(); } public synchronized List<StackTraceElement> getInterrupterStack() { return interrupterStack; } public synchronized void interruptCurrentOwner() { // there is a benign race condition here were the lock holder // can be change between attempting to get lock and grabbing // the synchronized lock here, but in either case we want to // interrupt the lock holder thread if (interrupterStack == null) { interrupterStack = ImmutableList.copyOf(Thread.currentThread().getStackTrace()); } if (currentOwner != null) { currentOwner.interrupt(); } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.server; import com.facebook.presto.client.QueryResults; import com.facebook.presto.execution.AddColumnTask; import com.facebook.presto.execution.CallTask; import com.facebook.presto.execution.CommitTask; import com.facebook.presto.execution.CreateSchemaTask; import com.facebook.presto.execution.CreateTableTask; import com.facebook.presto.execution.CreateViewTask; import com.facebook.presto.execution.DataDefinitionTask; import com.facebook.presto.execution.DeallocateTask; import com.facebook.presto.execution.DropSchemaTask; import com.facebook.presto.execution.DropTableTask; import com.facebook.presto.execution.DropViewTask; import com.facebook.presto.execution.ForQueryExecution; import com.facebook.presto.execution.GrantTask; import com.facebook.presto.execution.PrepareTask; import com.facebook.presto.execution.QueryExecution; import com.facebook.presto.execution.QueryExecutionMBean; import com.facebook.presto.execution.QueryIdGenerator; import com.facebook.presto.execution.QueryInfo; import com.facebook.presto.execution.QueryManager; import com.facebook.presto.execution.QueryQueueManager; import com.facebook.presto.execution.QueryQueueRule; import com.facebook.presto.execution.QueryQueueRuleFactory; import com.facebook.presto.execution.RemoteTaskFactory; import com.facebook.presto.execution.RenameColumnTask; import com.facebook.presto.execution.RenameSchemaTask; import com.facebook.presto.execution.RenameTableTask; import com.facebook.presto.execution.ResetSessionTask; import com.facebook.presto.execution.RevokeTask; import com.facebook.presto.execution.RollbackTask; import com.facebook.presto.execution.SetSessionTask; import com.facebook.presto.execution.SqlQueryManager; import com.facebook.presto.execution.SqlQueryQueueManager; import com.facebook.presto.execution.StartTransactionTask; import com.facebook.presto.execution.TaskInfo; import com.facebook.presto.execution.resourceGroups.InternalResourceGroupManager; import com.facebook.presto.execution.resourceGroups.LegacyResourceGroupConfigurationManagerFactory; import com.facebook.presto.execution.resourceGroups.ResourceGroupManager; import com.facebook.presto.execution.scheduler.AllAtOnceExecutionPolicy; import com.facebook.presto.execution.scheduler.ExecutionPolicy; import com.facebook.presto.execution.scheduler.PhasedExecutionPolicy; import com.facebook.presto.execution.scheduler.SplitSchedulerStats; import com.facebook.presto.memory.ClusterMemoryManager; import com.facebook.presto.memory.ForMemoryManager; import com.facebook.presto.operator.ForScheduler; import com.facebook.presto.server.remotetask.RemoteTaskStats; import com.facebook.presto.spi.memory.ClusterMemoryPoolManager; import com.facebook.presto.sql.analyzer.FeaturesConfig; import com.facebook.presto.sql.analyzer.QueryExplainer; import com.facebook.presto.sql.tree.AddColumn; import com.facebook.presto.sql.tree.Call; import com.facebook.presto.sql.tree.Commit; import com.facebook.presto.sql.tree.CreateSchema; import com.facebook.presto.sql.tree.CreateTable; import com.facebook.presto.sql.tree.CreateTableAsSelect; import com.facebook.presto.sql.tree.CreateView; import com.facebook.presto.sql.tree.Deallocate; import com.facebook.presto.sql.tree.Delete; import com.facebook.presto.sql.tree.DescribeInput; import com.facebook.presto.sql.tree.DescribeOutput; import com.facebook.presto.sql.tree.DropSchema; import com.facebook.presto.sql.tree.DropTable; import com.facebook.presto.sql.tree.DropView; import com.facebook.presto.sql.tree.Explain; import com.facebook.presto.sql.tree.Grant; import com.facebook.presto.sql.tree.Insert; import com.facebook.presto.sql.tree.Prepare; import com.facebook.presto.sql.tree.Query; import com.facebook.presto.sql.tree.RenameColumn; import com.facebook.presto.sql.tree.RenameSchema; import com.facebook.presto.sql.tree.RenameTable; import com.facebook.presto.sql.tree.ResetSession; import com.facebook.presto.sql.tree.Revoke; import com.facebook.presto.sql.tree.Rollback; import com.facebook.presto.sql.tree.SetSession; import com.facebook.presto.sql.tree.ShowCatalogs; import com.facebook.presto.sql.tree.ShowColumns; import com.facebook.presto.sql.tree.ShowCreate; import com.facebook.presto.sql.tree.ShowFunctions; import com.facebook.presto.sql.tree.ShowPartitions; import com.facebook.presto.sql.tree.ShowSchemas; import com.facebook.presto.sql.tree.ShowSession; import com.facebook.presto.sql.tree.ShowTables; import com.facebook.presto.sql.tree.StartTransaction; import com.facebook.presto.sql.tree.Statement; import com.facebook.presto.sql.tree.Use; import com.google.inject.Binder; import com.google.inject.Scopes; import com.google.inject.TypeLiteral; import com.google.inject.multibindings.MapBinder; import io.airlift.configuration.AbstractConfigurationAwareModule; import io.airlift.units.Duration; import java.util.List; import java.util.concurrent.ExecutorService; import static com.facebook.presto.execution.DataDefinitionExecution.DataDefinitionExecutionFactory; import static com.facebook.presto.execution.QueryExecution.QueryExecutionFactory; import static com.facebook.presto.execution.SqlQueryExecution.SqlQueryExecutionFactory; import static com.google.inject.multibindings.MapBinder.newMapBinder; import static io.airlift.concurrent.Threads.threadsNamed; import static io.airlift.discovery.client.DiscoveryBinder.discoveryBinder; import static io.airlift.http.client.HttpClientBinder.httpClientBinder; import static io.airlift.http.server.HttpServerBinder.httpServerBinder; import static io.airlift.jaxrs.JaxrsBinder.jaxrsBinder; import static io.airlift.json.JsonCodecBinder.jsonCodecBinder; import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.TimeUnit.SECONDS; import static org.weakref.jmx.ObjectNames.generatedNameOf; import static org.weakref.jmx.guice.ExportBinder.newExporter; public class CoordinatorModule extends AbstractConfigurationAwareModule { @Override protected void setup(Binder binder) { httpServerBinder(binder).bindResource("/", "webapp").withWelcomeFile("index.html"); // presto coordinator announcement discoveryBinder(binder).bindHttpAnnouncement("presto-coordinator"); // statement resource jsonCodecBinder(binder).bindJsonCodec(QueryInfo.class); jsonCodecBinder(binder).bindJsonCodec(TaskInfo.class); jsonCodecBinder(binder).bindJsonCodec(QueryResults.class); jaxrsBinder(binder).bind(StatementResource.class); // query execution visualizer jaxrsBinder(binder).bind(QueryExecutionResource.class); // query manager jaxrsBinder(binder).bind(QueryResource.class); jaxrsBinder(binder).bind(StageResource.class); binder.bind(QueryIdGenerator.class).in(Scopes.SINGLETON); binder.bind(QueryManager.class).to(SqlQueryManager.class).in(Scopes.SINGLETON); binder.bind(InternalResourceGroupManager.class).in(Scopes.SINGLETON); binder.bind(ResourceGroupManager.class).to(InternalResourceGroupManager.class); binder.bind(LegacyResourceGroupConfigurationManagerFactory.class).in(Scopes.SINGLETON); if (buildConfigObject(FeaturesConfig.class).isResourceGroupsEnabled()) { binder.bind(QueryQueueManager.class).to(InternalResourceGroupManager.class); } else { binder.bind(QueryQueueManager.class).to(SqlQueryQueueManager.class).in(Scopes.SINGLETON); binder.bind(new TypeLiteral<List<QueryQueueRule>>() {}).toProvider(QueryQueueRuleFactory.class).in(Scopes.SINGLETON); } newExporter(binder).export(QueryManager.class).withGeneratedName(); // cluster memory manager binder.bind(ClusterMemoryManager.class).in(Scopes.SINGLETON); binder.bind(ClusterMemoryPoolManager.class).to(ClusterMemoryManager.class).in(Scopes.SINGLETON); httpClientBinder(binder).bindHttpClient("memoryManager", ForMemoryManager.class) .withTracing() .withConfigDefaults(config -> { config.setIdleTimeout(new Duration(30, SECONDS)); config.setRequestTimeout(new Duration(10, SECONDS)); }); newExporter(binder).export(ClusterMemoryManager.class).withGeneratedName(); // cluster statistics jaxrsBinder(binder).bind(ClusterStatsResource.class); // query explainer binder.bind(QueryExplainer.class).in(Scopes.SINGLETON); // execution scheduler binder.bind(RemoteTaskFactory.class).to(HttpRemoteTaskFactory.class).in(Scopes.SINGLETON); newExporter(binder).export(RemoteTaskFactory.class).withGeneratedName(); binder.bind(RemoteTaskStats.class).in(Scopes.SINGLETON); newExporter(binder).export(RemoteTaskStats.class).withGeneratedName(); httpClientBinder(binder).bindHttpClient("scheduler", ForScheduler.class) .withTracing() .withConfigDefaults(config -> { config.setIdleTimeout(new Duration(30, SECONDS)); config.setRequestTimeout(new Duration(10, SECONDS)); config.setMaxConnectionsPerServer(250); }); // query execution binder.bind(ExecutorService.class).annotatedWith(ForQueryExecution.class) .toInstance(newCachedThreadPool(threadsNamed("query-execution-%s"))); binder.bind(QueryExecutionMBean.class).in(Scopes.SINGLETON); newExporter(binder).export(QueryExecutionMBean.class).as(generatedNameOf(QueryExecution.class)); MapBinder<Class<? extends Statement>, QueryExecutionFactory<?>> executionBinder = newMapBinder(binder, new TypeLiteral<Class<? extends Statement>>() {}, new TypeLiteral<QueryExecutionFactory<?>>() {}); binder.bind(SplitSchedulerStats.class).in(Scopes.SINGLETON); newExporter(binder).export(SplitSchedulerStats.class).withGeneratedName(); binder.bind(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(Query.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(Explain.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(ShowCreate.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(ShowColumns.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(ShowPartitions.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(ShowFunctions.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(ShowTables.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(ShowSchemas.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(ShowCatalogs.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(Use.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(ShowSession.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(CreateTableAsSelect.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(Insert.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(Delete.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(DescribeInput.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); executionBinder.addBinding(DescribeOutput.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON); binder.bind(DataDefinitionExecutionFactory.class).in(Scopes.SINGLETON); bindDataDefinitionTask(binder, executionBinder, CreateSchema.class, CreateSchemaTask.class); bindDataDefinitionTask(binder, executionBinder, DropSchema.class, DropSchemaTask.class); bindDataDefinitionTask(binder, executionBinder, RenameSchema.class, RenameSchemaTask.class); bindDataDefinitionTask(binder, executionBinder, AddColumn.class, AddColumnTask.class); bindDataDefinitionTask(binder, executionBinder, CreateTable.class, CreateTableTask.class); bindDataDefinitionTask(binder, executionBinder, RenameTable.class, RenameTableTask.class); bindDataDefinitionTask(binder, executionBinder, RenameColumn.class, RenameColumnTask.class); bindDataDefinitionTask(binder, executionBinder, DropTable.class, DropTableTask.class); bindDataDefinitionTask(binder, executionBinder, CreateView.class, CreateViewTask.class); bindDataDefinitionTask(binder, executionBinder, DropView.class, DropViewTask.class); bindDataDefinitionTask(binder, executionBinder, SetSession.class, SetSessionTask.class); bindDataDefinitionTask(binder, executionBinder, ResetSession.class, ResetSessionTask.class); bindDataDefinitionTask(binder, executionBinder, StartTransaction.class, StartTransactionTask.class); bindDataDefinitionTask(binder, executionBinder, Commit.class, CommitTask.class); bindDataDefinitionTask(binder, executionBinder, Rollback.class, RollbackTask.class); bindDataDefinitionTask(binder, executionBinder, Call.class, CallTask.class); bindDataDefinitionTask(binder, executionBinder, Grant.class, GrantTask.class); bindDataDefinitionTask(binder, executionBinder, Revoke.class, RevokeTask.class); bindDataDefinitionTask(binder, executionBinder, Prepare.class, PrepareTask.class); bindDataDefinitionTask(binder, executionBinder, Deallocate.class, DeallocateTask.class); MapBinder<String, ExecutionPolicy> executionPolicyBinder = newMapBinder(binder, String.class, ExecutionPolicy.class); executionPolicyBinder.addBinding("all-at-once").to(AllAtOnceExecutionPolicy.class); executionPolicyBinder.addBinding("phased").to(PhasedExecutionPolicy.class); } private static <T extends Statement> void bindDataDefinitionTask( Binder binder, MapBinder<Class<? extends Statement>, QueryExecutionFactory<?>> executionBinder, Class<T> statement, Class<? extends DataDefinitionTask<T>> task) { MapBinder<Class<? extends Statement>, DataDefinitionTask<?>> taskBinder = newMapBinder(binder, new TypeLiteral<Class<? extends Statement>>() {}, new TypeLiteral<DataDefinitionTask<?>>() {}); taskBinder.addBinding(statement).to(task).in(Scopes.SINGLETON); executionBinder.addBinding(statement).to(DataDefinitionExecutionFactory.class).in(Scopes.SINGLETON); } }
// ======================================================================== // $Id: WebApplicationHandler.java,v 1.62 2006/01/04 13:55:31 gregwilkins Exp $ // Copyright 1996-2004 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ======================================================================== package net.lightbody.bmp.proxy.jetty.jetty.servlet; import net.lightbody.bmp.proxy.jetty.http.HttpContext; import net.lightbody.bmp.proxy.jetty.http.HttpResponse; import net.lightbody.bmp.proxy.jetty.http.PathMap; import net.lightbody.bmp.proxy.jetty.log.LogFactory; import net.lightbody.bmp.proxy.jetty.util.*; import org.apache.commons.logging.Log; import javax.servlet.*; import javax.servlet.http.*; import java.io.IOException; import java.util.*; /* --------------------------------------------------------------------- */ /** WebApp HttpHandler. * This handler extends the ServletHandler with security, filter and resource * capabilities to provide full J2EE web container support. * <p> * @since Jetty 4.1 * @see net.lightbody.bmp.proxy.jetty.jetty.servlet.WebApplicationContext * @version $Id: WebApplicationHandler.java,v 1.62 2006/01/04 13:55:31 gregwilkins Exp $ * @author Greg Wilkins */ public class WebApplicationHandler extends ServletHandler { private static Log log= LogFactory.getLog(WebApplicationHandler.class); private Map _filterMap= new HashMap(); private List _pathFilters= new ArrayList(); private List _filters= new ArrayList(); private MultiMap _servletFilterMap= new MultiMap(); private boolean _acceptRanges= true; private boolean _filterChainsCached=true; private transient WebApplicationContext _webApplicationContext; protected transient Object _requestListeners; protected transient Object _requestAttributeListeners; protected transient Object _sessionListeners; protected transient Object _contextAttributeListeners; protected transient FilterHolder jsr154FilterHolder; protected transient JSR154Filter jsr154Filter; protected transient HashMap _chainCache[]; protected transient HashMap _namedChainCache[]; /* ------------------------------------------------------------ */ public boolean isAcceptRanges() { return _acceptRanges; } /* ------------------------------------------------------------ */ /** Set if the handler accepts range requests. * Default is false; * @param ar True if the handler should accept ranges */ public void setAcceptRanges(boolean ar) { _acceptRanges= ar; } /* ------------------------------------------------------------ */ /** * @return Returns the jsr154Filter. */ public JSR154Filter getJsr154Filter() { return jsr154Filter; } /* ------------------------------------------------------------ */ public FilterHolder defineFilter(String name, String className) { FilterHolder holder= newFilterHolder(name,className); addFilterHolder(holder); return holder; } /* ------------------------------------------------------------ */ protected FilterHolder newFilterHolder(String name, String className) { return new FilterHolder(this, name, className); } /* ------------------------------------------------------------ */ public void addFilterHolder(FilterHolder holder) { _filterMap.put(holder.getName(), holder); _filters.add(holder); addComponent(holder); } /* ------------------------------------------------------------ */ public FilterHolder getFilter(String name) { return (FilterHolder)_filterMap.get(name); } /* ------------------------------------------------------------ */ /** Add a mapping from a pathSpec to a Filter. * @param pathSpec The path specification * @param filterName The name of the filter (must already be added or defined) * @param dispatches An integer formed by the logical OR of FilterHolder.__REQUEST, * FilterHolder.__FORWARD,FilterHolder.__INCLUDE and/or FilterHolder.__ERROR. * @return The holder of the filter instance. */ public FilterHolder addFilterPathMapping(String pathSpec, String filterName, int dispatches) { FilterHolder holder = (FilterHolder)_filterMap.get(filterName); if (holder==null) throw new IllegalArgumentException("unknown filter: "+filterName); FilterMapping mapping = new FilterMapping(pathSpec,holder,dispatches); _pathFilters.add(mapping); return holder; } /* ------------------------------------------------------------ */ /** * Add a servlet filter mapping * @param servletName The name of the servlet to be filtered. * @param filterName The name of the filter. * @param dispatches An integer formed by the logical OR of FilterHolder.__REQUEST, * FilterHolder.__FORWARD,FilterHolder.__INCLUDE and/or FilterHolder.__ERROR. * @return The holder of the filter instance. */ public FilterHolder addFilterServletMapping(String servletName, String filterName, int dispatches) { FilterHolder holder= (FilterHolder)_filterMap.get(filterName); if (holder == null) throw new IllegalArgumentException("Unknown filter :" + filterName); _servletFilterMap.add(servletName, new FilterMapping(null,holder,dispatches)); return holder; } /* ------------------------------------------------------------ */ public List getFilters() { return _filters; } /* ------------------------------------------------------------ */ public synchronized void addEventListener(EventListener listener) throws IllegalArgumentException { if ((listener instanceof HttpSessionActivationListener) || (listener instanceof HttpSessionAttributeListener) || (listener instanceof HttpSessionBindingListener) || (listener instanceof HttpSessionListener)) { if (_sessionManager != null) _sessionManager.addEventListener(listener); _sessionListeners= LazyList.add(_sessionListeners, listener); } if (listener instanceof ServletRequestListener) { _requestListeners= LazyList.add(_requestListeners, listener); } if (listener instanceof ServletRequestAttributeListener) { _requestAttributeListeners= LazyList.add(_requestAttributeListeners, listener); } if (listener instanceof ServletContextAttributeListener) { _contextAttributeListeners= LazyList.add(_contextAttributeListeners, listener); } super.addEventListener(listener); } /* ------------------------------------------------------------ */ public synchronized void removeEventListener(EventListener listener) { if (_sessionManager != null) _sessionManager.removeEventListener(listener); _sessionListeners= LazyList.remove(_sessionListeners, listener); _requestListeners= LazyList.remove(_requestListeners, listener); _requestAttributeListeners= LazyList.remove(_requestAttributeListeners, listener); _contextAttributeListeners= LazyList.remove(_contextAttributeListeners, listener); super.removeEventListener(listener); } /* ------------------------------------------------------------ */ public void setSessionManager(SessionManager sm) { if (isStarted()) throw new IllegalStateException("Started"); SessionManager old= getSessionManager(); if (getHttpContext() != null) { // recover config and remove listeners from old session manager if (old != null && old != sm) { if (_sessionListeners != null) { for (Iterator i= LazyList.iterator(_sessionListeners); i.hasNext();) { EventListener listener= (EventListener)i.next(); _sessionManager.removeEventListener(listener); } } } // Set listeners and config on new listener. if (sm != null && old != sm) { if (_sessionListeners != null) { for (Iterator i= LazyList.iterator(_sessionListeners); i.hasNext();) { EventListener listener= (EventListener)i.next(); sm.addEventListener(listener); } } } } super.setSessionManager(sm); } /* ----------------------------------------------------------------- */ protected synchronized void doStart() throws Exception { // Start Servlet Handler super.doStart(); if (log.isDebugEnabled()) log.debug("Path Filters: " + _pathFilters); if (log.isDebugEnabled()) log.debug("Servlet Filters: " + _servletFilterMap); if (getHttpContext() instanceof WebApplicationContext) _webApplicationContext= (WebApplicationContext)getHttpContext(); if (_filterChainsCached) { _chainCache = getChainCache(); _namedChainCache = getChainCache(); } } /* ----------------------------------------------------------------- */ private HashMap[] getChainCache() { HashMap[] _chainCache=new HashMap[Dispatcher.__ERROR+1]; _chainCache[Dispatcher.__REQUEST]=new HashMap(); _chainCache[Dispatcher.__FORWARD]=new HashMap(); _chainCache[Dispatcher.__INCLUDE]=new HashMap(); _chainCache[Dispatcher.__ERROR]=new HashMap(); return _chainCache; } /* ------------------------------------------------------------ */ public void initializeServlets() throws Exception { // initialize Filters MultiException mex= new MultiException(); Iterator iter= _filters.iterator(); while (iter.hasNext()) { FilterHolder holder= (FilterHolder)iter.next(); try { holder.start(); } catch (Exception e) { mex.add(e); } } // initialize Servlets try { super.initializeServlets(); } catch (Exception e) { mex.add(e); } jsr154FilterHolder=getFilter("jsr154"); if (jsr154FilterHolder!=null) jsr154Filter= (JSR154Filter)jsr154FilterHolder.getFilter(); log.debug("jsr154filter="+jsr154Filter); if (LazyList.size(_requestAttributeListeners) > 0 || LazyList.size(_requestListeners) > 0) { if (jsr154Filter==null) log.warn("Filter jsr154 not defined for RequestAttributeListeners"); else { jsr154Filter.setRequestAttributeListeners(_requestAttributeListeners); jsr154Filter.setRequestListeners(_requestListeners); } } mex.ifExceptionThrow(); } /* ------------------------------------------------------------ */ protected synchronized void doStop() throws Exception { try { // Stop servlets super.doStop(); // Stop filters for (int i= _filters.size(); i-- > 0;) { FilterHolder holder= (FilterHolder)_filters.get(i); holder.stop(); } } finally { _webApplicationContext= null; _sessionListeners= null; _requestListeners= null; _requestAttributeListeners= null; _contextAttributeListeners= null; } } /* ------------------------------------------------------------ */ public String getErrorPage(int status, ServletHttpRequest request) { String error_page= null; Class exClass= (Class)request.getAttribute(__J_S_ERROR_EXCEPTION_TYPE); if (ServletException.class.equals(exClass)) { error_page= _webApplicationContext.getErrorPage(exClass.getName()); if (error_page == null) { Throwable th= (Throwable)request.getAttribute(__J_S_ERROR_EXCEPTION); while (th instanceof ServletException) th= ((ServletException)th).getRootCause(); if (th != null) exClass= th.getClass(); } } if (error_page == null && exClass != null) { while (error_page == null && exClass != null && _webApplicationContext != null) { error_page= _webApplicationContext.getErrorPage(exClass.getName()); exClass= exClass.getSuperclass(); } if (error_page == null) {} } if (error_page == null && _webApplicationContext != null) error_page= _webApplicationContext.getErrorPage(TypeUtil.toString(status)); return error_page; } /* ------------------------------------------------------------ */ protected void dispatch(String pathInContext, HttpServletRequest request, HttpServletResponse response, ServletHolder servletHolder, int type) throws ServletException, UnavailableException, IOException { if (type == Dispatcher.__REQUEST) { // This is NOT a dispatched request (it it is an initial request) ServletHttpRequest servletHttpRequest= (ServletHttpRequest)request; ServletHttpResponse servletHttpResponse= (ServletHttpResponse)response; // protect web-inf and meta-inf if (StringUtil.startsWithIgnoreCase(pathInContext, "/web-inf") || StringUtil.startsWithIgnoreCase(pathInContext, "/meta-inf")) { response.sendError(HttpResponse.__404_Not_Found); return; } // Security Check if (!getHttpContext().checkSecurityConstraints( pathInContext, servletHttpRequest.getHttpRequest(), servletHttpResponse.getHttpResponse())) return; } else { // This is a dispatched request. // Handle dispatch to j_security_check HttpContext context= getHttpContext(); if (context != null && context instanceof ServletHttpContext && pathInContext != null && pathInContext.endsWith(FormAuthenticator.__J_SECURITY_CHECK)) { ServletHttpRequest servletHttpRequest= (ServletHttpRequest)context.getHttpConnection().getRequest().getWrapper(); ServletHttpResponse servletHttpResponse= servletHttpRequest.getServletHttpResponse(); ServletHttpContext servletContext= (ServletHttpContext)context; if (!servletContext.jSecurityCheck(pathInContext,servletHttpRequest.getHttpRequest(),servletHttpResponse.getHttpResponse())) return; } } // Build and/or cache filter chain FilterChain chain=null; if (pathInContext != null) { chain = getChainForPath(type, pathInContext, servletHolder); } else { chain = getChainForName(type, servletHolder); } if (log.isDebugEnabled()) log.debug("chain="+chain); // Do the handling thang if (chain!=null) chain.doFilter(request, response); else if (servletHolder != null) servletHolder.handle(request, response); else // Not found notFound(request, response); } /* ------------------------------------------------------------ */ private FilterChain getChainForName(int requestType, ServletHolder servletHolder) { if (servletHolder == null) { throw new IllegalStateException("Named dispatch must be to an explicitly named servlet"); } if (_filterChainsCached) { synchronized(this) { if (_namedChainCache[requestType].containsKey(servletHolder.getName())) return (FilterChain)_namedChainCache[requestType].get(servletHolder.getName()); } } // Build list of filters Object filters= null; if (jsr154Filter!=null) { // Slight hack for Named servlets // TODO query JSR how to apply filter to all dispatches filters=LazyList.add(filters,jsr154FilterHolder); } // Servlet filters if (_servletFilterMap.size() > 0) { Object o= _servletFilterMap.get(servletHolder.getName()); for (int i=0; i<LazyList.size(o);i++) { FilterMapping mapping = (FilterMapping)LazyList.get(o,i); if (mapping.appliesTo(null,requestType)) filters=LazyList.add(filters,mapping.getHolder()); } } FilterChain chain = null; if (_filterChainsCached) { synchronized(this) { if (LazyList.size(filters) > 0) chain= new CachedChain(filters, servletHolder); _namedChainCache[requestType].put(servletHolder.getName(),chain); } } else if (LazyList.size(filters) > 0) chain = new Chain(filters, servletHolder); return chain; } /* ------------------------------------------------------------ */ private FilterChain getChainForPath(int requestType, String pathInContext, ServletHolder servletHolder) { if (_filterChainsCached) { synchronized(this) { if(_chainCache[requestType].containsKey(pathInContext)) return (FilterChain)_chainCache[requestType].get(pathInContext); } } // Build list of filters Object filters= null; // Path filters for (int i= 0; i < _pathFilters.size(); i++) { FilterMapping mapping = (FilterMapping)_pathFilters.get(i); if (mapping.appliesTo(pathInContext, requestType)) filters= LazyList.add(filters, mapping.getHolder()); } // Servlet filters if (servletHolder != null && _servletFilterMap.size() > 0) { Object o= _servletFilterMap.get(servletHolder.getName()); for (int i=0; i<LazyList.size(o);i++) { FilterMapping mapping = (FilterMapping)LazyList.get(o,i); if (mapping.appliesTo(null,requestType)) filters=LazyList.add(filters,mapping.getHolder()); } } FilterChain chain = null; if (_filterChainsCached) { synchronized(this) { if (LazyList.size(filters) > 0) chain= new CachedChain(filters, servletHolder); _chainCache[requestType].put(pathInContext,chain); } } else if (LazyList.size(filters) > 0) chain = new Chain(filters, servletHolder); return chain; } /* ------------------------------------------------------------ */ public synchronized void setContextAttribute(String name, Object value) { Object old= super.getContextAttribute(name); super.setContextAttribute(name, value); if (_contextAttributeListeners != null) { ServletContextAttributeEvent event= new ServletContextAttributeEvent(getServletContext(), name, old != null ? old : value); for (int i= 0; i < LazyList.size(_contextAttributeListeners); i++) { ServletContextAttributeListener l= (ServletContextAttributeListener)LazyList.get(_contextAttributeListeners, i); if (old == null) l.attributeAdded(event); else if (value == null) l.attributeRemoved(event); else l.attributeReplaced(event); } } } /* ------------------------------------------------------------ */ public synchronized void removeContextAttribute(String name) { Object old= super.getContextAttribute(name); super.removeContextAttribute(name); if (old != null && _contextAttributeListeners != null) { ServletContextAttributeEvent event= new ServletContextAttributeEvent(getServletContext(), name, old); for (int i= 0; i < LazyList.size(_contextAttributeListeners); i++) { ServletContextAttributeListener l= (ServletContextAttributeListener)LazyList.get(_contextAttributeListeners, i); l.attributeRemoved(event); } } } /* ------------------------------------------------------------ */ /** * @return Returns the filterChainsCached. */ public boolean isFilterChainsCached() { return _filterChainsCached; } /* ------------------------------------------------------------ */ /** Cache filter chains. * If true, filter chains are cached by the URI path within the * context. Caching should not be used if the webapp encodes * information in URLs. * @param filterChainsCached The filterChainsCached to set. */ public void setFilterChainsCached(boolean filterChainsCached) { _filterChainsCached = filterChainsCached; } /* ------------------------------------------------------------ */ /** * @see net.lightbody.bmp.proxy.jetty.util.Container#addComponent(java.lang.Object) */ protected void addComponent(Object o) { if (_filterChainsCached && isStarted()) { synchronized(this) { for (int i=0;i<_chainCache.length;i++) if (_chainCache[i]!=null) _chainCache[i].clear(); } } super.addComponent(o); } /* ------------------------------------------------------------ */ /** * @see net.lightbody.bmp.proxy.jetty.util.Container#removeComponent(java.lang.Object) */ protected void removeComponent(Object o) { if (_filterChainsCached && isStarted()) { synchronized(this) { for (int i=0;i<_chainCache.length;i++) if (_chainCache[i]!=null) _chainCache[i].clear(); } } super.removeComponent(o); } /* ----------------------------------------------------------------- */ public void destroy() { Iterator iter = _filterMap.values().iterator(); while (iter.hasNext()) { Object sh=iter.next(); iter.remove(); removeComponent(sh); } } /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ private static class FilterMapping { private String _pathSpec; private FilterHolder _holder; private int _dispatches; /* ------------------------------------------------------------ */ FilterMapping(String pathSpec,FilterHolder holder,int dispatches) { _pathSpec=pathSpec; _holder=holder; _dispatches=dispatches; } /* ------------------------------------------------------------ */ FilterHolder getHolder() { return _holder; } /* ------------------------------------------------------------ */ /** Check if this filter applies to a path. * @param path The path to check. * @param type The type of request: __REQUEST,__FORWARD,__INCLUDE or __ERROR. * @return True if this filter applies */ boolean appliesTo(String path, int type) { boolean b=((_dispatches&type)!=0 || (_dispatches==0 && type==Dispatcher.__REQUEST)) && (_pathSpec==null || PathMap.match(_pathSpec, path,true)); return b; } } /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ private class Chain implements FilterChain { int _filter= 0; Object _filters; ServletHolder _servletHolder; /* ------------------------------------------------------------ */ Chain(Object filters, ServletHolder servletHolder) { _filters= filters; _servletHolder= servletHolder; } /* ------------------------------------------------------------ */ public void doFilter(ServletRequest request, ServletResponse response) throws IOException, ServletException { if (log.isTraceEnabled()) log.trace("doFilter " + _filter); // pass to next filter if (_filter < LazyList.size(_filters)) { FilterHolder holder= (FilterHolder)LazyList.get(_filters, _filter++); if (log.isTraceEnabled()) log.trace("call filter " + holder); Filter filter= holder.getFilter(); filter.doFilter(request, response, this); return; } // Call servlet if (_servletHolder != null) { if (log.isTraceEnabled()) log.trace("call servlet " + _servletHolder); _servletHolder.handle(request, response); } else // Not found notFound((HttpServletRequest)request, (HttpServletResponse)response); } public String toString() { StringBuffer b = new StringBuffer(); for (int i=0; i<LazyList.size(_filters);i++) { b.append(LazyList.get(_filters, i).toString()); b.append("->"); } b.append(_servletHolder); return b.toString(); } } /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ private class CachedChain implements FilterChain { FilterHolder _filterHolder; ServletHolder _servletHolder; CachedChain _next; /* ------------------------------------------------------------ */ CachedChain(Object filters, ServletHolder servletHolder) { if (LazyList.size(filters)>0) { _filterHolder=(FilterHolder)LazyList.get(filters, 0); filters=LazyList.remove(filters,0); _next=new CachedChain(filters,servletHolder); } else _servletHolder=servletHolder; } public void doFilter(ServletRequest request, ServletResponse response) throws IOException, ServletException { // pass to next filter if (_filterHolder!=null) { if (log.isTraceEnabled()) log.trace("call filter " + _filterHolder); Filter filter= _filterHolder.getFilter(); filter.doFilter(request, response, _next); return; } // Call servlet if (_servletHolder != null) { if (log.isTraceEnabled()) log.trace("call servlet " + _servletHolder); _servletHolder.handle(request, response); } else // Not found notFound((HttpServletRequest)request, (HttpServletResponse)response); } public String toString() { if (_filterHolder!=null) return _filterHolder+"->"+_next.toString(); if (_servletHolder!=null) return _servletHolder.toString(); return "null"; } } public static void main(String[] arg) { ServletHandler mServletHandler = new ServletHandler(); ServletHolder servletHolder = mServletHandler.addServlet("/mPath", "wicket.protocol.http.WicketServlet"); servletHolder.getServletContext().setAttribute("webApplication", "mWebApplication"); servletHolder.getServletContext().setAttribute ("applicationContext", "mApplicationContext"); WebApplicationHandler mWebApplicationHandler = new WebApplicationHandler(); ServletHolder servletHolder2 = mWebApplicationHandler.addServlet("/mpath", "wicket.protocol.http.WicketServlet"); servletHolder2.getServletContext().setAttribute("webApplication", "mWebApplication"); servletHolder2.getServletContext().setAttribute ("applicationContext", "mApplicationContext"); } }
package org.hisp.dhis.webapi.controller; /* * Copyright (c) 2004-2018, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import com.google.common.collect.Sets; import org.apache.commons.lang3.StringUtils; import org.hisp.dhis.common.DhisApiVersion; import org.hisp.dhis.dxf2.webmessage.WebMessage; import org.hisp.dhis.dxf2.webmessage.WebMessageException; import org.hisp.dhis.dxf2.webmessage.WebMessageUtils; import org.hisp.dhis.user.CurrentUserService; import org.hisp.dhis.user.User; import org.hisp.dhis.user.UserGroup; import org.hisp.dhis.user.UserService; import org.hisp.dhis.user.UserSettingKey; import org.hisp.dhis.user.UserSettingService; import org.hisp.dhis.util.ObjectUtils; import org.hisp.dhis.webapi.mvc.annotation.ApiVersion; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; /** * @author Lars Helge Overland */ @RestController @RequestMapping( "/userSettings" ) @ApiVersion( { DhisApiVersion.DEFAULT, DhisApiVersion.ALL } ) public class UserSettingController { @Autowired private UserSettingService userSettingService; @Autowired private UserService userService; @Autowired private CurrentUserService currentUserService; private static final Set<UserSettingKey> USER_SETTING_KEYS = Sets.newHashSet( UserSettingKey.values() ).stream().collect( Collectors.toSet() ); // ------------------------------------------------------------------------- // Resources // ------------------------------------------------------------------------- @GetMapping public Map<String, Serializable> getAllUserSettings( @RequestParam( required = false, defaultValue = "true" ) boolean useFallback, @RequestParam( value = "user", required = false ) String username, @RequestParam( value = "userId", required = false ) String userId, @RequestParam( value = "key", required = false ) Set<String> keys ) throws WebMessageException { User user = getUser( userId, username ); if ( keys == null ) { return userSettingService.getUserSettingsWithFallbackByUserAsMap( user, USER_SETTING_KEYS, useFallback ); } Map<String, Serializable> result = new HashMap<>(); for ( String key : keys ) { UserSettingKey userSettingKey = getUserSettingKey( key ); result.put( userSettingKey.getName(), userSettingService.getUserSetting( userSettingKey ) ); } return result; } @GetMapping( value = "/{key}" ) public String getUserSettingByKey( @PathVariable( value = "key" ) String key, @RequestParam( required = false, defaultValue = "true" ) boolean useFallback, @RequestParam( value = "user", required = false ) String username, @RequestParam( value = "userId", required = false ) String userId ) throws WebMessageException { UserSettingKey userSettingKey = getUserSettingKey( key ); User user = getUser( userId, username ); Serializable value = userSettingService .getUserSettingsWithFallbackByUserAsMap( user, Sets.newHashSet( userSettingKey ), useFallback ) .get( key ); return String.valueOf( value ); } @PostMapping( value = "/{key}" ) public WebMessage setUserSettingByKey( @PathVariable( value = "key" ) String key, @RequestParam( value = "user", required = false ) String username, @RequestParam( value = "userId", required = false ) String userId, @RequestParam( required = false ) String value, @RequestBody( required = false ) String valuePayload ) throws WebMessageException { UserSettingKey userSettingKey = getUserSettingKey( key ); User user = getUser( userId, username ); String newValue = ObjectUtils.firstNonNull( value, valuePayload ); if ( StringUtils.isEmpty( newValue ) ) { throw new WebMessageException( WebMessageUtils.conflict( "You need to specify a new value" ) ); } userSettingService.saveUserSetting( userSettingKey, UserSettingKey.getAsRealClass( key, newValue ), user ); return WebMessageUtils.ok( "User setting saved" ); } @DeleteMapping( value = "/{key}" ) public void deleteUserSettingByKey( @PathVariable( value = "key" ) String key, @RequestParam( value = "user", required = false ) String username, @RequestParam( value = "userId", required = false ) String userId ) throws WebMessageException { UserSettingKey userSettingKey = getUserSettingKey( key ); User user = getUser( userId, username ); userSettingService.deleteUserSetting( userSettingKey, user ); } /** * Attempts to resolve the UserSettingKey based on the name (key) supplied * * @param key the name of a UserSettingKey * @return the UserSettingKey * @throws WebMessageException throws an exception if no UserSettingKey was found */ private UserSettingKey getUserSettingKey( String key ) throws WebMessageException { Optional<UserSettingKey> userSettingKey = UserSettingKey.getByName( key ); if ( !userSettingKey.isPresent() ) { throw new WebMessageException( WebMessageUtils.notFound( "No user setting found with key: " + key ) ); } return userSettingKey.get(); } /** * Tries to find a user based on the uid or username. If none is supplied, currentUser will be returned. * If uid or username is found, it will also make sure the current user has access to the user. * * @param uid the user uid * @param username the user username * @return the user found with uid or username, or current user if no uid or username was specified * @throws WebMessageException throws an exception if user was not found, or current user don't have access */ private User getUser( String uid, String username ) throws WebMessageException { User currentUser = currentUserService.getCurrentUser(); User user; if ( uid == null && username == null ) { return currentUser; } if ( uid != null ) { user = userService.getUser( uid ); } else { user = userService.getUserCredentialsByUsername( username ).getUserInfo(); } if ( user == null ) { throw new WebMessageException( WebMessageUtils .conflict( "Could not find user '" + ObjectUtils.firstNonNull( uid, username ) + "'" ) ); } else { Set<String> userGroups = user.getGroups().stream().map( UserGroup::getUid ).collect( Collectors.toSet() ); if ( !userService.canAddOrUpdateUser( userGroups ) && !currentUser.getUserCredentials().canModifyUser( user.getUserCredentials() ) ) { throw new WebMessageException( WebMessageUtils.unathorized( "You are not authorized to access user: " + user.getUsername() ) ); } } return user; } }
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package com.github.os72.protobuf261; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.NoSuchElementException; /** * This class implements a {@link com.github.os72.protobuf261.ByteString} backed by a * single array of bytes, contiguous in memory. It supports substring by * pointing to only a sub-range of the underlying byte array, meaning that a * substring will reference the full byte-array of the string it's made from, * exactly as with {@link String}. * * @author carlanton@google.com (Carl Haverl) */ class LiteralByteString extends ByteString { protected final byte[] bytes; /** * Creates a {@code LiteralByteString} backed by the given array, without * copying. * * @param bytes array to wrap */ LiteralByteString(byte[] bytes) { this.bytes = bytes; } @Override public byte byteAt(int index) { // Unlike most methods in this class, this one is a direct implementation // ignoring the potential offset because we need to do range-checking in the // substring case anyway. return bytes[index]; } @Override public int size() { return bytes.length; } // ================================================================= // ByteString -> substring @Override public ByteString substring(int beginIndex, int endIndex) { if (beginIndex < 0) { throw new IndexOutOfBoundsException( "Beginning index: " + beginIndex + " < 0"); } if (endIndex > size()) { throw new IndexOutOfBoundsException("End index: " + endIndex + " > " + size()); } int substringLength = endIndex - beginIndex; if (substringLength < 0) { throw new IndexOutOfBoundsException( "Beginning index larger than ending index: " + beginIndex + ", " + endIndex); } ByteString result; if (substringLength == 0) { result = ByteString.EMPTY; } else { result = new BoundedByteString(bytes, getOffsetIntoBytes() + beginIndex, substringLength); } return result; } // ================================================================= // ByteString -> byte[] @Override protected void copyToInternal(byte[] target, int sourceOffset, int targetOffset, int numberToCopy) { // Optimized form, not for subclasses, since we don't call // getOffsetIntoBytes() or check the 'numberToCopy' parameter. System.arraycopy(bytes, sourceOffset, target, targetOffset, numberToCopy); } @Override public void copyTo(ByteBuffer target) { target.put(bytes, getOffsetIntoBytes(), size()); // Copies bytes } @Override public ByteBuffer asReadOnlyByteBuffer() { ByteBuffer byteBuffer = ByteBuffer.wrap(bytes, getOffsetIntoBytes(), size()); return byteBuffer.asReadOnlyBuffer(); } @Override public List<ByteBuffer> asReadOnlyByteBufferList() { // Return the ByteBuffer generated by asReadOnlyByteBuffer() as a singleton List<ByteBuffer> result = new ArrayList<ByteBuffer>(1); result.add(asReadOnlyByteBuffer()); return result; } @Override public void writeTo(OutputStream outputStream) throws IOException { outputStream.write(toByteArray()); } @Override void writeToInternal(OutputStream outputStream, int sourceOffset, int numberToWrite) throws IOException { outputStream.write(bytes, getOffsetIntoBytes() + sourceOffset, numberToWrite); } @Override public String toString(String charsetName) throws UnsupportedEncodingException { return new String(bytes, getOffsetIntoBytes(), size(), charsetName); } // ================================================================= // UTF-8 decoding @Override public boolean isValidUtf8() { int offset = getOffsetIntoBytes(); return Utf8.isValidUtf8(bytes, offset, offset + size()); } @Override protected int partialIsValidUtf8(int state, int offset, int length) { int index = getOffsetIntoBytes() + offset; return Utf8.partialIsValidUtf8(state, bytes, index, index + length); } // ================================================================= // equals() and hashCode() @Override public boolean equals(Object other) { if (other == this) { return true; } if (!(other instanceof ByteString)) { return false; } if (size() != ((ByteString) other).size()) { return false; } if (size() == 0) { return true; } if (other instanceof LiteralByteString) { return equalsRange((LiteralByteString) other, 0, size()); } else if (other instanceof RopeByteString) { return other.equals(this); } else { throw new IllegalArgumentException( "Has a new type of ByteString been created? Found " + other.getClass()); } } /** * Check equality of the substring of given length of this object starting at * zero with another {@code LiteralByteString} substring starting at offset. * * @param other what to compare a substring in * @param offset offset into other * @param length number of bytes to compare * @return true for equality of substrings, else false. */ boolean equalsRange(LiteralByteString other, int offset, int length) { if (length > other.size()) { throw new IllegalArgumentException( "Length too large: " + length + size()); } if (offset + length > other.size()) { throw new IllegalArgumentException( "Ran off end of other: " + offset + ", " + length + ", " + other.size()); } byte[] thisBytes = bytes; byte[] otherBytes = other.bytes; int thisLimit = getOffsetIntoBytes() + length; for (int thisIndex = getOffsetIntoBytes(), otherIndex = other.getOffsetIntoBytes() + offset; (thisIndex < thisLimit); ++thisIndex, ++otherIndex) { if (thisBytes[thisIndex] != otherBytes[otherIndex]) { return false; } } return true; } /** * Cached hash value. Intentionally accessed via a data race, which * is safe because of the Java Memory Model's "no out-of-thin-air values" * guarantees for ints. */ private int hash = 0; /** * Compute the hashCode using the traditional algorithm from {@link * ByteString}. * * @return hashCode value */ @Override public int hashCode() { int h = hash; if (h == 0) { int size = size(); h = partialHash(size, 0, size); if (h == 0) { h = 1; } hash = h; } return h; } @Override protected int peekCachedHashCode() { return hash; } @Override protected int partialHash(int h, int offset, int length) { return hashCode(h, bytes, getOffsetIntoBytes() + offset, length); } static int hashCode(int h, byte[] bytes, int offset, int length) { for (int i = offset; i < offset + length; i++) { h = h * 31 + bytes[i]; } return h; } static int hashCode(byte[] bytes) { int h = hashCode(bytes.length, bytes, 0, bytes.length); return h == 0 ? 1 : h; } // ================================================================= // Input stream @Override public InputStream newInput() { return new ByteArrayInputStream(bytes, getOffsetIntoBytes(), size()); // No copy } @Override public CodedInputStream newCodedInput() { // We trust CodedInputStream not to modify the bytes, or to give anyone // else access to them. return CodedInputStream.newInstance(this); } // ================================================================= // ByteIterator @Override public ByteIterator iterator() { return new LiteralByteIterator(); } private class LiteralByteIterator implements ByteIterator { private int position; private final int limit; private LiteralByteIterator() { position = 0; limit = size(); } public boolean hasNext() { return (position < limit); } public Byte next() { // Boxing calls Byte.valueOf(byte), which does not instantiate. return nextByte(); } public byte nextByte() { try { return bytes[position++]; } catch (ArrayIndexOutOfBoundsException e) { throw new NoSuchElementException(e.getMessage()); } } public void remove() { throw new UnsupportedOperationException(); } } // ================================================================= // Internal methods @Override protected int getTreeDepth() { return 0; } @Override protected boolean isBalanced() { return true; } /** * Offset into {@code bytes[]} to use, non-zero for substrings. * * @return always 0 for this class */ protected int getOffsetIntoBytes() { return 0; } }
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.adwords.jaxws.v201809.rm; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlElementDecl; import javax.xml.bind.annotation.XmlRegistry; import javax.xml.namespace.QName; import com.google.api.ads.adwords.jaxws.v201809.cm.ApiException; import com.google.api.ads.adwords.jaxws.v201809.cm.SoapHeader; import com.google.api.ads.adwords.jaxws.v201809.cm.SoapResponseHeader; /** * This object contains factory methods for each * Java content interface and Java element interface * generated in the com.google.api.ads.adwords.jaxws.v201809.rm package. * <p>An ObjectFactory allows you to programatically * construct new instances of the Java representation * for XML content. The Java representation of XML * content can consist of schema derived interfaces * and classes representing the binding of schema * type definitions, element declarations and model * groups. Factory methods for each of these are * provided in this class. * */ @XmlRegistry public class ObjectFactory { private final static QName _ApiExceptionFault_QNAME = new QName("https://adwords.google.com/api/adwords/rm/v201809", "ApiExceptionFault"); private final static QName _ResponseHeader_QNAME = new QName("https://adwords.google.com/api/adwords/rm/v201809", "ResponseHeader"); private final static QName _RequestHeader_QNAME = new QName("https://adwords.google.com/api/adwords/rm/v201809", "RequestHeader"); /** * Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: com.google.api.ads.adwords.jaxws.v201809.rm * */ public ObjectFactory() { } /** * Create an instance of {@link RuleBasedUserList } * */ public RuleBasedUserList createRuleBasedUserList() { return new RuleBasedUserList(); } /** * Create an instance of {@link SimilarUserList } * */ public SimilarUserList createSimilarUserList() { return new SimilarUserList(); } /** * Create an instance of {@link OfflineDataUploadPage } * */ public OfflineDataUploadPage createOfflineDataUploadPage() { return new OfflineDataUploadPage(); } /** * Create an instance of {@link DateKey } * */ public DateKey createDateKey() { return new DateKey(); } /** * Create an instance of {@link LogicalUserListOperand } * */ public LogicalUserListOperand createLogicalUserListOperand() { return new LogicalUserListOperand(); } /** * Create an instance of {@link CustomAffinityTokenOperation } * */ public CustomAffinityTokenOperation createCustomAffinityTokenOperation() { return new CustomAffinityTokenOperation(); } /** * Create an instance of {@link CustomAffinity } * */ public CustomAffinity createCustomAffinity() { return new CustomAffinity(); } /** * Create an instance of {@link UserListError } * */ public UserListError createUserListError() { return new UserListError(); } /** * Create an instance of {@link RuleItem } * */ public RuleItem createRuleItem() { return new RuleItem(); } /** * Create an instance of {@link OfflineDataUpload } * */ public OfflineDataUpload createOfflineDataUpload() { return new OfflineDataUpload(); } /** * Create an instance of {@link Member } * */ public Member createMember() { return new Member(); } /** * Create an instance of {@link StringRuleItem } * */ public StringRuleItem createStringRuleItem() { return new StringRuleItem(); } /** * Create an instance of {@link UserListOperation } * */ public UserListOperation createUserListOperation() { return new UserListOperation(); } /** * Create an instance of {@link CustomAffinityReturnValue } * */ public CustomAffinityReturnValue createCustomAffinityReturnValue() { return new CustomAffinityReturnValue(); } /** * Create an instance of {@link CustomAffinityError } * */ public CustomAffinityError createCustomAffinityError() { return new CustomAffinityError(); } /** * Create an instance of {@link RelativeDate } * */ public RelativeDate createRelativeDate() { return new RelativeDate(); } /** * Create an instance of {@link MutateMembersOperand } * */ public MutateMembersOperand createMutateMembersOperand() { return new MutateMembersOperand(); } /** * Create an instance of {@link OfflineDataUploadOperation } * */ public OfflineDataUploadOperation createOfflineDataUploadOperation() { return new OfflineDataUploadOperation(); } /** * Create an instance of {@link Rule } * */ public Rule createRule() { return new Rule(); } /** * Create an instance of {@link UserIdentifier } * */ public UserIdentifier createUserIdentifier() { return new UserIdentifier(); } /** * Create an instance of {@link CustomAffinityPage } * */ public CustomAffinityPage createCustomAffinityPage() { return new CustomAffinityPage(); } /** * Create an instance of {@link MoneyWithCurrency } * */ public MoneyWithCurrency createMoneyWithCurrency() { return new MoneyWithCurrency(); } /** * Create an instance of {@link StoreSalesTransaction } * */ public StoreSalesTransaction createStoreSalesTransaction() { return new StoreSalesTransaction(); } /** * Create an instance of {@link DataUploadResult } * */ public DataUploadResult createDataUploadResult() { return new DataUploadResult(); } /** * Create an instance of {@link CustomAffinityTokenReturnValue } * */ public CustomAffinityTokenReturnValue createCustomAffinityTokenReturnValue() { return new CustomAffinityTokenReturnValue(); } /** * Create an instance of {@link StringKey } * */ public StringKey createStringKey() { return new StringKey(); } /** * Create an instance of {@link DateSpecificRuleUserList } * */ public DateSpecificRuleUserList createDateSpecificRuleUserList() { return new DateSpecificRuleUserList(); } /** * Create an instance of {@link CustomAffinityToken } * */ public CustomAffinityToken createCustomAffinityToken() { return new CustomAffinityToken(); } /** * Create an instance of {@link OfflineDataUploadError } * */ public OfflineDataUploadError createOfflineDataUploadError() { return new OfflineDataUploadError(); } /** * Create an instance of {@link NumberRuleItem } * */ public NumberRuleItem createNumberRuleItem() { return new NumberRuleItem(); } /** * Create an instance of {@link FirstPartyUploadMetadata } * */ public FirstPartyUploadMetadata createFirstPartyUploadMetadata() { return new FirstPartyUploadMetadata(); } /** * Create an instance of {@link MutateMembersError } * */ public MutateMembersError createMutateMembersError() { return new MutateMembersError(); } /** * Create an instance of {@link UploadMetadata } * */ public UploadMetadata createUploadMetadata() { return new UploadMetadata(); } /** * Create an instance of {@link CustomAffinityOperation } * */ public CustomAffinityOperation createCustomAffinityOperation() { return new CustomAffinityOperation(); } /** * Create an instance of {@link ExpressionRuleUserList } * */ public ExpressionRuleUserList createExpressionRuleUserList() { return new ExpressionRuleUserList(); } /** * Create an instance of {@link UserList } * */ public UserList createUserList() { return new UserList(); } /** * Create an instance of {@link UserListPage } * */ public UserListPage createUserListPage() { return new UserListPage(); } /** * Create an instance of {@link OfflineData } * */ public OfflineData createOfflineData() { return new OfflineData(); } /** * Create an instance of {@link OfflineDataUploadReturnValue } * */ public OfflineDataUploadReturnValue createOfflineDataUploadReturnValue() { return new OfflineDataUploadReturnValue(); } /** * Create an instance of {@link DateRuleItem } * */ public DateRuleItem createDateRuleItem() { return new DateRuleItem(); } /** * Create an instance of {@link LogicalUserList } * */ public LogicalUserList createLogicalUserList() { return new LogicalUserList(); } /** * Create an instance of {@link BasicUserList } * */ public BasicUserList createBasicUserList() { return new BasicUserList(); } /** * Create an instance of {@link CombinedRuleUserList } * */ public CombinedRuleUserList createCombinedRuleUserList() { return new CombinedRuleUserList(); } /** * Create an instance of {@link RuleItemGroup } * */ public RuleItemGroup createRuleItemGroup() { return new RuleItemGroup(); } /** * Create an instance of {@link UserListReturnValue } * */ public UserListReturnValue createUserListReturnValue() { return new UserListReturnValue(); } /** * Create an instance of {@link AddressInfo } * */ public AddressInfo createAddressInfo() { return new AddressInfo(); } /** * Create an instance of {@link UserListLogicalRule } * */ public UserListLogicalRule createUserListLogicalRule() { return new UserListLogicalRule(); } /** * Create an instance of {@link CrmBasedUserList } * */ public CrmBasedUserList createCrmBasedUserList() { return new CrmBasedUserList(); } /** * Create an instance of {@link MutateMembersOperation } * */ public MutateMembersOperation createMutateMembersOperation() { return new MutateMembersOperation(); } /** * Create an instance of {@link CurrencyCodeError } * */ public CurrencyCodeError createCurrencyCodeError() { return new CurrencyCodeError(); } /** * Create an instance of {@link ThirdPartyUploadMetadata } * */ public ThirdPartyUploadMetadata createThirdPartyUploadMetadata() { return new ThirdPartyUploadMetadata(); } /** * Create an instance of {@link UserListConversionType } * */ public UserListConversionType createUserListConversionType() { return new UserListConversionType(); } /** * Create an instance of {@link MutateMembersReturnValue } * */ public MutateMembersReturnValue createMutateMembersReturnValue() { return new MutateMembersReturnValue(); } /** * Create an instance of {@link NumberKey } * */ public NumberKey createNumberKey() { return new NumberKey(); } /** * Create an instance of {@link JAXBElement }{@code <}{@link ApiException }{@code >}} * */ @XmlElementDecl(namespace = "https://adwords.google.com/api/adwords/rm/v201809", name = "ApiExceptionFault") public JAXBElement<ApiException> createApiExceptionFault(ApiException value) { return new JAXBElement<ApiException>(_ApiExceptionFault_QNAME, ApiException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link SoapResponseHeader }{@code >}} * */ @XmlElementDecl(namespace = "https://adwords.google.com/api/adwords/rm/v201809", name = "ResponseHeader") public JAXBElement<SoapResponseHeader> createResponseHeader(SoapResponseHeader value) { return new JAXBElement<SoapResponseHeader>(_ResponseHeader_QNAME, SoapResponseHeader.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link SoapHeader }{@code >}} * */ @XmlElementDecl(namespace = "https://adwords.google.com/api/adwords/rm/v201809", name = "RequestHeader") public JAXBElement<SoapHeader> createRequestHeader(SoapHeader value) { return new JAXBElement<SoapHeader>(_RequestHeader_QNAME, SoapHeader.class, null, value); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.maven.packaging; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import org.apache.camel.spi.Metadata; import org.apache.camel.tooling.model.JsonMapper; import org.apache.camel.tooling.model.MainModel; import org.apache.camel.tooling.model.MainModel.MainGroupModel; import org.apache.camel.tooling.util.JavadocHelper; import org.apache.camel.tooling.util.PackageHelper; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.apache.maven.project.MavenProjectHelper; import org.jboss.forge.roaster.Roaster; import org.jboss.forge.roaster.model.source.AnnotationSource; import org.jboss.forge.roaster.model.source.FieldSource; import org.jboss.forge.roaster.model.source.JavaClassSource; import org.jboss.forge.roaster.model.source.MethodSource; import org.sonatype.plexus.build.incremental.BuildContext; /** * Prepares camel-main by generating Camel Main configuration metadata for tooling support. */ @Mojo(name = "prepare-main-doc", defaultPhase = LifecyclePhase.PROCESS_CLASSES, threadSafe = true, requiresDependencyResolution = ResolutionScope.COMPILE) public class PrepareCamelMainMojo extends AbstractGeneratorMojo { /** * The output directory for generated spring boot tooling file */ @Parameter(defaultValue = "${project.basedir}/src/main/doc") protected File outFolder; /** * Parses the Camel Main configuration java source file. */ public static List<MainModel.MainOptionModel> parseConfigurationSource(String fileName) throws IOException { return parseConfigurationSource(new File(fileName)); } /** * Parses the Camel Main configuration java source file. */ public static List<MainModel.MainOptionModel> parseConfigurationSource(File file) throws IOException { final List<MainModel.MainOptionModel> answer = new ArrayList<>(); JavaClassSource clazz = (JavaClassSource) Roaster.parse(file); List<FieldSource<JavaClassSource>> fields = clazz.getFields(); // filter out final or static fields fields = fields.stream().filter(f -> !f.isFinal() && !f.isStatic()).collect(Collectors.toList()); fields.forEach(f -> { AnnotationSource as = f.getAnnotation(Metadata.class); String name = f.getName(); String javaType = f.getType().getQualifiedName(); String sourceType = clazz.getQualifiedName(); String defaultValue = f.getStringInitializer(); if (as != null) { defaultValue = as.getStringValue("defaultValue"); } if (defaultValue != null && defaultValue.startsWith("new ")) { // skip constructors defaultValue = null; } // the field must have a setter String setterName = "set" + Character.toUpperCase(name.charAt(0)) + name.substring(1); MethodSource<?> setter = clazz.getMethod(setterName, javaType); if (setter != null) { String desc = setter.getJavaDoc().getFullText(); boolean deprecated = clazz.getAnnotation(Deprecated.class) != null || setter.getAnnotation(Deprecated.class) != null; String type = fromMainToType(javaType); MainModel.MainOptionModel model = new MainModel.MainOptionModel(); model.setName(name); model.setType(type); model.setJavaType(javaType); model.setDescription(JavadocHelper.sanitizeDescription(desc, false)); model.setSourceType(sourceType); model.setDefaultValue(asDefaultValue(type, defaultValue)); model.setDeprecated(deprecated); List<String> enums = null; // add known enums if ("org.apache.camel.LoggingLevel".equals(javaType)) { enums = Arrays.asList("ERROR,WARN,INFO,DEBUG,TRACE,OFF".split(",")); } else if ("org.apache.camel.ManagementStatisticsLevel".equals(javaType)) { enums = Arrays.asList("Extended,Default,RoutesOnly,Off".split(",")); } else if ("org.apache.camel.spi.RestBindingMode".equals(javaType)) { enums = Arrays.asList("auto,off,json,xml,json_xml".split(",")); } else if ("org.apache.camel.spi.RestHostNameResolver".equals(javaType)) { enums = Arrays.asList("allLocalIp,localIp,localHostName".split(",")); } else if ("org.apache.camel.util.concurrent.ThreadPoolRejectedPolicy".equals(javaType)) { enums = Arrays.asList("Abort,CallerRuns,DiscardOldest,Discard".split(",")); } if (enums == null && as != null) { String text = as.getStringValue("enums"); if (text != null) { enums = Arrays.asList(text.split(",")); } } model.setEnums(enums); answer.add(model); } }); return answer; } private static String fromMainToType(String type) { if ("boolean".equals(type) || "java.lang.Boolean".equals(type)) { return "boolean"; } else if ("int".equals(type) || "java.lang.Integer".equals(type)) { return "integer"; } else if ("long".equals(type) || "java.lang.Long".equals(type)) { return "integer"; } else if ("float".equals(type) || "java.lang.Float".equals(type)) { return "number"; } else if ("double".equals(type) || "java.lang.Double".equals(type)) { return "number"; } else if ("string".equals(type) || "java.lang.String".equals(type)) { return "string"; } else { return "object"; } } private static Object asDefaultValue(String type, String defaultValue) { if (defaultValue != null) { if ("boolean".equals(type)) { return Boolean.parseBoolean(defaultValue); } else if ("integer".equals(type)) { return Integer.parseInt(defaultValue); } } if (defaultValue == null && "boolean".equals(type)) { return "false"; } return defaultValue; } @Override public void execute(MavenProject project, MavenProjectHelper projectHelper, BuildContext buildContext) throws MojoFailureException, MojoExecutionException { outFolder = new File(project.getBasedir(), "src/generated/resources"); super.execute(project, projectHelper, buildContext); } @Override public void execute() throws MojoExecutionException, MojoFailureException { // scan for configuration files File[] files = new File(project.getBasedir(), "src/main/java/org/apache/camel/main") .listFiles(f -> f.isFile() && f.getName().endsWith("Properties.java")); if (files == null || files.length == 0) { return; } final List<MainModel.MainOptionModel> data = new ArrayList<>(); for (File file : files) { getLog().info("Parsing Camel Main configuration file: " + file); try { List<MainModel.MainOptionModel> model = parseConfigurationSource(file); // compute prefix for name String prefix; if (file.getName().contains("Hystrix")) { prefix = "camel.hystrix."; } else if (file.getName().contains("Resilience")) { prefix = "camel.resilience4j."; } else if (file.getName().contains("FaultTolerance")) { prefix = "camel.faulttolerance."; } else if (file.getName().contains("Rest")) { prefix = "camel.rest."; } else if (file.getName().contains("Health")) { prefix = "camel.health."; } else if (file.getName().contains("Lra")) { prefix = "camel.lra."; } else if (file.getName().contains("ThreadPoolProfileConfigurationProperties")) { // skip this file continue; } else if (file.getName().contains("ThreadPoolConfigurationProperties")) { prefix = "camel.threadpool."; } else { prefix = "camel.main."; } final String namePrefix = prefix; model.forEach(m -> m.setName(namePrefix + m.getName())); data.addAll(model); } catch (Exception e) { throw new MojoFailureException("Error parsing file " + file + " due " + e.getMessage(), e); } } // include additional rest configuration from camel-api File camelApiDir = PackageHelper.findCamelDirectory(project.getBasedir(), "core/camel-api"); File restConfig = new File(camelApiDir, "src/main/java/org/apache/camel/spi/RestConfiguration.java"); try { List<MainModel.MainOptionModel> model = parseConfigurationSource(restConfig); model.forEach(m -> m.setName("camel.rest." + m.getName())); data.addAll(model); } catch (Exception e) { throw new MojoFailureException("Error parsing file " + restConfig + " due " + e.getMessage(), e); } // lets sort so they are always ordered (but camel.main in top) data.sort((o1, o2) -> { if (o1.getName().startsWith("camel.main.") && !o2.getName().startsWith("camel.main.")) { return -1; } else if (!o1.getName().startsWith("camel.main.") && o2.getName().startsWith("camel.main.")) { return 1; } else { return o1.getName().compareToIgnoreCase(o2.getName()); } }); if (!data.isEmpty()) { MainModel model = new MainModel(); model.getOptions().addAll(data); model.getGroups().add(new MainGroupModel( "camel.main", "camel-main configurations.", "org.apache.camel.main.DefaultConfigurationProperties")); model.getGroups() .add(new MainGroupModel( "camel.faulttolerance", "camel-fault-tolerance configurations.", "org.apache.camel.main.FaultToleranceConfigurationProperties")); model.getGroups().add(new MainGroupModel( "camel.hystrix", "camel-hystrix configurations.", "org.apache.camel.main.HystrixConfigurationProperties")); model.getGroups() .add(new MainGroupModel( "camel.resilience4j", "camel-resilience4j configurations.", "org.apache.camel.main.Resilience4jConfigurationProperties")); model.getGroups().add( new MainGroupModel("camel.rest", "camel-rest configurations.", "org.apache.camel.spi.RestConfiguration")); model.getGroups().add(new MainGroupModel( "camel.health", "camel-health configurations.", "org.apache.camel.main.HealthConfigurationProperties")); model.getGroups().add(new MainGroupModel( "camel.lra", "camel-lra configurations.", "org.apache.camel.main.LraConfigurationProperties")); model.getGroups() .add(new MainGroupModel( "camel.threadpool", "camel-threadpool configurations.", "org.apache.camel.main.ThreadPoolConfigurationProperties")); String json = JsonMapper.createJsonSchema(model); updateResource(outFolder.toPath(), "META-INF/camel-main-configuration-metadata.json", json); } } }