gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config.merge;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.remote.ConfigOrigin;
import com.thoughtworks.go.domain.BaseCollection;
import com.thoughtworks.go.domain.ConfigErrors;
import com.thoughtworks.go.domain.EnvironmentPipelineMatcher;
import com.thoughtworks.go.util.command.EnvironmentVariableContext;
import java.util.*;
import static com.thoughtworks.go.util.ExceptionUtils.bomb;
import static java.lang.String.format;
import static java.util.Arrays.asList;
/**
* Composite of many EnvironmentConfig instances. Hides elementary environment configurations.
*/
public class MergeEnvironmentConfig extends BaseCollection<EnvironmentConfig> implements EnvironmentConfig {
public static final String CONSISTENT_KV = "ConsistentEnvVariables";
private final ConfigErrors configErrors = new ConfigErrors();
public MergeEnvironmentConfig(EnvironmentConfig... configs) {
this(asList(configs));
}
public MergeEnvironmentConfig(List<EnvironmentConfig> configs) {
boolean allPartsDoesNotHaveSameName = configs.stream()
.peek(this::add)
.map(EnvironmentConfig::name)
.distinct()
.count() > 1;
if(allPartsDoesNotHaveSameName) {
throw new IllegalArgumentException("partial environment configs must all have the same name");
}
}
public EnvironmentConfig getFirstEditablePartOrNull() {
return this.stream().filter(this::isEditable).findFirst().orElse(null);
}
private boolean isEditable(EnvironmentConfig part) {
return part.getOrigin() == null || part.getOrigin().canEdit();
}
public EnvironmentConfig getFirstEditablePart() {
EnvironmentConfig found = getFirstEditablePartOrNull();
if (found == null)
throw bomb("No editable configuration part");
return found;
}
@Override
public void validate(ValidationContext validationContext) {
validateDuplicateEnvironmentVariables();
validateDuplicatePipelines();
validateDuplicateAgents();
}
private void validateDuplicateAgents() {
Set<String> uuids = new HashSet<>();
this.stream().flatMap(part -> part.getAgents().stream())
.map(EnvironmentAgentConfig::getUuid)
.filter(uuid -> !uuids.add(uuid))
.findFirst()
.ifPresent(uuid -> configErrors.add("agent", format("Environment agent '%s' is defined more than once.", uuid)));
}
private void validateDuplicateEnvironmentVariables() {
Set<String> envVariables = new HashSet<>();
this.stream().flatMap(part -> part.getVariables().stream())
.map(EnvironmentVariableConfig::getName)
.filter(varName -> !envVariables.add(varName))
.findFirst()
.ifPresent(varName -> configErrors.add(CONSISTENT_KV, format("Environment variable '%s' is defined more than once with different values", varName)));
}
private void validateDuplicatePipelines() {
Set<CaseInsensitiveString> pipelines = new HashSet<>();
this.stream().flatMap(part -> part.getPipelineNames().stream())
.filter(pipeline -> !pipelines.add(pipeline))
.findFirst()
.ifPresent(pipelineName -> configErrors.add(CONSISTENT_KV, format("Environment pipeline '%s' is defined more than once.", pipelineName)));
}
@Override
public ConfigErrors errors() {
return configErrors;
}
@Override
public void addError(String fieldName, String message) {
configErrors.add(fieldName, message);
}
@Override
public EnvironmentPipelineMatcher createMatcher() {
return new EnvironmentPipelineMatcher(this.name(), this.getAgents().getUuids(), this.getPipelines());
}
@Override
public boolean hasAgent(String uuid) {
return this.stream().anyMatch(part -> part.hasAgent(uuid));
}
@Override
public boolean validateContainsAgentUUIDsFrom(Set<String> uuids) {
return this.getAgents().stream().allMatch(envAgentConfig -> envAgentConfig.validateUuidPresent(this.name(), uuids));
}
@Override
public void validateContainsOnlyPipelines(List<CaseInsensitiveString> pipelineNames) {
this.getPipelines().validateContainsOnlyPipelines(this.name(), pipelineNames);
}
@Override
public boolean containsPipeline(CaseInsensitiveString pipelineName) {
return this.stream().anyMatch(part -> part.containsPipeline(pipelineName));
}
@Override
public void setConfigAttributes(Object attributes) {
if (attributes == null) {
return;
}
this.getFirstEditablePart().setConfigAttributes(attributes);
}
@Override
public void addEnvironmentVariable(String name, String value) {
this.getFirstEditablePart().addEnvironmentVariable(name, value);
}
@Override
public void addEnvironmentVariable(EnvironmentVariableConfig variableConfig) {
this.getFirstEditablePart().addEnvironmentVariable(variableConfig);
}
@Override
public void addAgent(String uuid) {
EnvironmentConfig editablePart = this.getFirstEditablePartOrNull();
if (editablePart != null) {
editablePart.addAgent(uuid);
}
}
@Override
public void addAgentIfNew(String uuid) {
boolean uuidExists = this.stream().anyMatch(part -> part.hasAgent(uuid));
if(!uuidExists){
this.stream().filter(this::isEditable).findFirst().ifPresent(envConfig -> envConfig.addAgentIfNew(uuid));
}
}
@Override
public void addPipeline(CaseInsensitiveString pipelineName) {
this.getFirstEditablePart().addPipeline(pipelineName);
}
@Override
public void removePipeline(CaseInsensitiveString pipelineName) {
this.getFirstEditablePart().removePipeline(pipelineName);
}
@Override
public void removeAgent(String uuid) {
for (EnvironmentConfig part : this) {
if (part.hasAgent(uuid)) {
if (isEditable(part))
part.removeAgent(uuid);
else
throw bomb("cannot remove agent defined in non-editable source");
}
}
}
@Override
public boolean hasName(CaseInsensitiveString environmentName) {
return this.name().equals(environmentName);
}
@Override
public boolean hasVariable(String variableName) {
for (EnvironmentConfig part : this) {
if (part.hasVariable(variableName))
return true;
}
return false;
}
@Override
public boolean contains(String pipelineName) {
for (EnvironmentConfig part : this) {
if (part.contains(pipelineName))
return true;
}
return false;
}
@Override
public CaseInsensitiveString name() {
return this.first().name();
}
@Override
public EnvironmentAgentsConfig getAgents() {
EnvironmentAgentsConfig allAgents = new EnvironmentAgentsConfig();
for (EnvironmentConfig part : this) {
for (EnvironmentAgentConfig partAgent : part.getAgents()) {
if (!allAgents.contains(partAgent))
allAgents.add(partAgent);
}
}
return allAgents;
}
@Override
public EnvironmentVariableContext createEnvironmentContext() {
EnvironmentVariableContext context = new EnvironmentVariableContext(
EnvironmentVariableContext.GO_ENVIRONMENT_NAME, CaseInsensitiveString.str(this.name()));
this.getVariables().addTo(context);
return context;
}
@Override
public List<CaseInsensitiveString> getPipelineNames() {
List<CaseInsensitiveString> allNames = new ArrayList<>();
for (EnvironmentConfig part : this) {
for (CaseInsensitiveString pipe : part.getPipelineNames()) {
if (!allNames.contains(pipe))
allNames.add(pipe);
}
}
return allNames;
}
@Override
public EnvironmentPipelinesConfig getPipelines() {
EnvironmentPipelinesConfig allPipelines = new EnvironmentPipelinesConfig();
for (EnvironmentConfig part : this) {
EnvironmentPipelinesConfig partPipes = part.getPipelines();
for (EnvironmentPipelineConfig partPipe : partPipes) {
if (!allPipelines.containsPipelineNamed(partPipe.getName()))
allPipelines.add(partPipe);
}
}
return allPipelines;
}
@Override
public EnvironmentVariablesConfig getVariables() {
EnvironmentVariablesConfig allVariables = new EnvironmentVariablesConfig();
for (EnvironmentConfig part : this) {
for (EnvironmentVariableConfig partVariable : part.getVariables()) {
if (!allVariables.contains(partVariable))
allVariables.add(partVariable);
}
}
return allVariables;
}
@Override
public EnvironmentVariablesConfig getPlainTextVariables() {
EnvironmentVariablesConfig allVariables = new EnvironmentVariablesConfig();
for (EnvironmentConfig part : this) {
for (EnvironmentVariableConfig partVariable : part.getPlainTextVariables()) {
if (!allVariables.contains(partVariable))
allVariables.add(partVariable);
}
}
return allVariables;
}
@Override
public EnvironmentVariablesConfig getSecureVariables() {
EnvironmentVariablesConfig allVariables = new EnvironmentVariablesConfig();
for (EnvironmentConfig part : this) {
for (EnvironmentVariableConfig partVariable : part.getSecureVariables()) {
if (!allVariables.contains(partVariable))
allVariables.add(partVariable);
}
}
return allVariables;
}
@Override
public EnvironmentConfig getLocal() {
for (EnvironmentConfig part : this) {
if (part.isLocal())
return part;
}
return null;
}
@Override
public boolean isLocal() {
for (EnvironmentConfig part : this) {
if (!part.isLocal())
return false;
}
return true;
}
@Override
public boolean isEnvironmentEmpty() {
for (EnvironmentConfig part : this) {
if (!part.isEnvironmentEmpty())
return false;
}
return true;
}
@Override
public EnvironmentPipelinesConfig getRemotePipelines() {
EnvironmentPipelinesConfig remotes = new EnvironmentPipelinesConfig();
for (EnvironmentConfig part : this) {
remotes.addAll(part.getRemotePipelines());
}
return remotes;
}
@Override
public EnvironmentAgentsConfig getLocalAgents() {
EnvironmentAgentsConfig locals = new EnvironmentAgentsConfig();
for (EnvironmentConfig part : this) {
locals.addAll(part.getLocalAgents());
}
return locals;
}
@Override
public boolean containsPipelineRemotely(CaseInsensitiveString pipelineName) {
for (EnvironmentConfig part : this) {
if (part.containsPipelineRemotely(pipelineName))
return true;
}
return false;
}
@Override
public boolean containsAgentRemotely(String uuid) {
for (EnvironmentConfig part : this) {
if (part.containsAgentRemotely(uuid)) {
return true;
}
}
return false;
}
@Override
public boolean containsEnvironmentVariableRemotely(String variableName) {
for (EnvironmentConfig part : this) {
if (part.containsEnvironmentVariableRemotely(variableName)) {
return true;
}
}
return false;
}
public ConfigOrigin getOriginForPipeline(CaseInsensitiveString pipelineName) {
for (EnvironmentConfig part : this) {
if (part.containsPipeline(pipelineName)) {
return part.getOrigin();
}
}
return null;
}
@Deprecated //To be merged with originForAgent. Use that instead
public ConfigOrigin getOriginForAgent(String agentUUID) {
for (EnvironmentConfig part : this) {
if (part.hasAgent(agentUUID)) {
return part.getOrigin();
}
}
return null;
}
@Override
public Optional<ConfigOrigin> originForAgent(String agentUuid) {
return Optional.ofNullable(getOriginForAgent(agentUuid));
}
public ConfigOrigin getOriginForEnvironmentVariable(String variableName) {
for (EnvironmentConfig part : this) {
if (part.getVariables().hasVariable(variableName)) {
return part.getOrigin();
}
}
return null;
}
@Override
public boolean validateTree(ConfigSaveValidationContext validationContext, CruiseConfig preprocessedConfig) {
validate(validationContext);
boolean isValid = ErrorCollector.getAllErrors(this).isEmpty();
for (EnvironmentConfig part : this) {
isValid = isValid && part.validateTree(validationContext, preprocessedConfig);
}
return isValid;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
EnvironmentConfig that = as(EnvironmentConfig.class, o);
if (that == null)
return false;
if (this.getAgents() != null ? !this.getAgents().equals(that.getAgents()) : that.getAgents() != null) {
return false;
}
if (this.name() != null ? !this.name().equals(that.name()) : that.name() != null) {
return false;
}
if (this.getPipelines() != null ? !this.getPipelines().equals(that.getPipelines()) : that.getPipelines() != null) {
return false;
}
if (this.getVariables() != null ? !this.getVariables().equals(that.getVariables()) : that.getVariables() != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = (this.name() != null ? this.name().hashCode() : 0);
result = 31 * result + (this.getAgents() != null ? this.getAgents().hashCode() : 0);
result = 31 * result + (this.getPipelines() != null ? this.getPipelines().hashCode() : 0);
EnvironmentVariablesConfig variablesConfig = this.getVariables();
result = 31 * result + (variablesConfig != null ? variablesConfig.hashCode() : 0);
return result;
}
private static <T> T as(Class<T> clazz, Object o) {
if (clazz.isInstance(o)) {
return clazz.cast(o);
}
return null;
}
@Override
public ConfigOrigin getOrigin() {
MergeConfigOrigin mergeConfigOrigin = new MergeConfigOrigin();
for (EnvironmentConfig part : this) {
mergeConfigOrigin.add(part.getOrigin());
}
return mergeConfigOrigin;
}
@Override
public void setOrigins(ConfigOrigin origins) {
throw bomb("Cannot set origins on merged config");
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.cache.request.RequestCacheStats;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.joda.time.DateTimeZone;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Arrays;
import java.util.List;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateRange;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
@TestLogging(value = "org.elasticsearch.indices.IndicesRequestCache:TRACE")
public class IndicesRequestCacheIT extends ESIntegTestCase {
// One of the primary purposes of the query cache is to cache aggs results
public void testCacheAggs() throws Exception {
Client client = client();
assertAcked(client.admin().indices().prepareCreate("index")
.addMapping("type", "f", "type=date")
.setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)).get());
indexRandom(true,
client.prepareIndex("index", "type").setSource("f", "2014-03-10T00:00:00.000Z"),
client.prepareIndex("index", "type").setSource("f", "2014-05-13T00:00:00.000Z"));
ensureSearchable("index");
// This is not a random example: serialization with time zones writes shared strings
// which used to not work well with the query cache because of the handles stream output
// see #9500
final SearchResponse r1 = client.prepareSearch("index").setSize(0).setSearchType(SearchType.QUERY_THEN_FETCH)
.addAggregation(dateHistogram("histo").field("f").timeZone(DateTimeZone.forID("+01:00")).minDocCount(0)
.dateHistogramInterval(DateHistogramInterval.MONTH))
.get();
assertSearchResponse(r1);
// The cached is actually used
assertThat(client.admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache()
.getMemorySizeInBytes(), greaterThan(0L));
for (int i = 0; i < 10; ++i) {
final SearchResponse r2 = client.prepareSearch("index").setSize(0)
.setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(dateHistogram("histo").field("f")
.timeZone(DateTimeZone.forID("+01:00")).minDocCount(0).dateHistogramInterval(DateHistogramInterval.MONTH))
.get();
assertSearchResponse(r2);
Histogram h1 = r1.getAggregations().get("histo");
Histogram h2 = r2.getAggregations().get("histo");
final List<? extends Bucket> buckets1 = h1.getBuckets();
final List<? extends Bucket> buckets2 = h2.getBuckets();
assertEquals(buckets1.size(), buckets2.size());
for (int j = 0; j < buckets1.size(); ++j) {
final Bucket b1 = buckets1.get(j);
final Bucket b2 = buckets2.get(j);
assertEquals(b1.getKey(), b2.getKey());
assertEquals(b1.getDocCount(), b2.getDocCount());
}
}
}
public void testQueryRewrite() throws Exception {
Client client = client();
assertAcked(client.admin().indices().prepareCreate("index").addMapping("type", "s", "type=date")
.setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 5).put("index.number_of_routing_shards", 5)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get());
indexRandom(true, client.prepareIndex("index", "type", "1").setRouting("1").setSource("s", "2016-03-19"),
client.prepareIndex("index", "type", "2").setRouting("1").setSource("s", "2016-03-20"),
client.prepareIndex("index", "type", "3").setRouting("1").setSource("s", "2016-03-21"),
client.prepareIndex("index", "type", "4").setRouting("2").setSource("s", "2016-03-22"),
client.prepareIndex("index", "type", "5").setRouting("2").setSource("s", "2016-03-23"),
client.prepareIndex("index", "type", "6").setRouting("2").setSource("s", "2016-03-24"),
client.prepareIndex("index", "type", "7").setRouting("3").setSource("s", "2016-03-25"),
client.prepareIndex("index", "type", "8").setRouting("3").setSource("s", "2016-03-26"),
client.prepareIndex("index", "type", "9").setRouting("3").setSource("s", "2016-03-27"));
ensureSearchable("index");
assertCacheState(client, "index", 0, 0);
// Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache
ForceMergeResponse forceMergeResponse = client.admin().indices().prepareForceMerge("index").setFlush(true).get();
ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse);
refresh();
ensureSearchable("index");
assertCacheState(client, "index", 0, 0);
final SearchResponse r1 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-25")).setPreFilterShardSize(Integer.MAX_VALUE).get();
ElasticsearchAssertions.assertAllSuccessful(r1);
assertThat(r1.getHits().getTotalHits(), equalTo(7L));
assertCacheState(client, "index", 0, 5);
final SearchResponse r2 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26"))
.setPreFilterShardSize(Integer.MAX_VALUE).get();
ElasticsearchAssertions.assertAllSuccessful(r2);
assertThat(r2.getHits().getTotalHits(), equalTo(7L));
assertCacheState(client, "index", 3, 7);
final SearchResponse r3 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-21").lte("2016-03-27")).setPreFilterShardSize(Integer.MAX_VALUE)
.get();
ElasticsearchAssertions.assertAllSuccessful(r3);
assertThat(r3.getHits().getTotalHits(), equalTo(7L));
assertCacheState(client, "index", 6, 9);
}
public void testQueryRewriteMissingValues() throws Exception {
Client client = client();
assertAcked(client.admin().indices().prepareCreate("index").addMapping("type", "s", "type=date")
.setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get());
indexRandom(true, client.prepareIndex("index", "type", "1").setSource("s", "2016-03-19"),
client.prepareIndex("index", "type", "2").setSource("s", "2016-03-20"),
client.prepareIndex("index", "type", "3").setSource("s", "2016-03-21"),
client.prepareIndex("index", "type", "4").setSource("s", "2016-03-22"),
client.prepareIndex("index", "type", "5").setSource("s", "2016-03-23"),
client.prepareIndex("index", "type", "6").setSource("s", "2016-03-24"),
client.prepareIndex("index", "type", "7").setSource("other", "value"),
client.prepareIndex("index", "type", "8").setSource("s", "2016-03-26"),
client.prepareIndex("index", "type", "9").setSource("s", "2016-03-27"));
ensureSearchable("index");
assertCacheState(client, "index", 0, 0);
// Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache
ForceMergeResponse forceMergeResponse = client.admin().indices().prepareForceMerge("index").setFlush(true).get();
ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse);
refresh();
ensureSearchable("index");
assertCacheState(client, "index", 0, 0);
final SearchResponse r1 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")).get();
ElasticsearchAssertions.assertAllSuccessful(r1);
assertThat(r1.getHits().getTotalHits(), equalTo(8L));
assertCacheState(client, "index", 0, 1);
final SearchResponse r2 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")).get();
ElasticsearchAssertions.assertAllSuccessful(r2);
assertThat(r2.getHits().getTotalHits(), equalTo(8L));
assertCacheState(client, "index", 1, 1);
final SearchResponse r3 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")).get();
ElasticsearchAssertions.assertAllSuccessful(r3);
assertThat(r3.getHits().getTotalHits(), equalTo(8L));
assertCacheState(client, "index", 2, 1);
}
public void testQueryRewriteDates() throws Exception {
Client client = client();
assertAcked(client.admin().indices().prepareCreate("index").addMapping("type", "d", "type=date")
.setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get());
indexRandom(true, client.prepareIndex("index", "type", "1").setSource("d", "2014-01-01T00:00:00"),
client.prepareIndex("index", "type", "2").setSource("d", "2014-02-01T00:00:00"),
client.prepareIndex("index", "type", "3").setSource("d", "2014-03-01T00:00:00"),
client.prepareIndex("index", "type", "4").setSource("d", "2014-04-01T00:00:00"),
client.prepareIndex("index", "type", "5").setSource("d", "2014-05-01T00:00:00"),
client.prepareIndex("index", "type", "6").setSource("d", "2014-06-01T00:00:00"),
client.prepareIndex("index", "type", "7").setSource("d", "2014-07-01T00:00:00"),
client.prepareIndex("index", "type", "8").setSource("d", "2014-08-01T00:00:00"),
client.prepareIndex("index", "type", "9").setSource("d", "2014-09-01T00:00:00"));
ensureSearchable("index");
assertCacheState(client, "index", 0, 0);
// Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache
ForceMergeResponse forceMergeResponse = client.admin().indices().prepareForceMerge("index").setFlush(true).get();
ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse);
refresh();
ensureSearchable("index");
assertCacheState(client, "index", 0, 0);
final SearchResponse r1 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("d").gte("2013-01-01T00:00:00").lte("now"))
.get();
ElasticsearchAssertions.assertAllSuccessful(r1);
assertThat(r1.getHits().getTotalHits(), equalTo(9L));
assertCacheState(client, "index", 0, 1);
final SearchResponse r2 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("d").gte("2013-01-01T00:00:00").lte("now"))
.get();
ElasticsearchAssertions.assertAllSuccessful(r2);
assertThat(r2.getHits().getTotalHits(), equalTo(9L));
assertCacheState(client, "index", 1, 1);
final SearchResponse r3 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("d").gte("2013-01-01T00:00:00").lte("now"))
.get();
ElasticsearchAssertions.assertAllSuccessful(r3);
assertThat(r3.getHits().getTotalHits(), equalTo(9L));
assertCacheState(client, "index", 2, 1);
}
public void testQueryRewriteDatesWithNow() throws Exception {
Client client = client();
Settings settings = Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).build();
assertAcked(client.admin().indices().prepareCreate("index-1").addMapping("type", "d", "type=date")
.setSettings(settings).get());
assertAcked(client.admin().indices().prepareCreate("index-2").addMapping("type", "d", "type=date")
.setSettings(settings).get());
assertAcked(client.admin().indices().prepareCreate("index-3").addMapping("type", "d", "type=date")
.setSettings(settings).get());
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", now),
client.prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1)),
client.prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2)),
client.prepareIndex("index-2", "type", "4").setSource("d", now.minusDays(3)),
client.prepareIndex("index-2", "type", "5").setSource("d", now.minusDays(4)),
client.prepareIndex("index-2", "type", "6").setSource("d", now.minusDays(5)),
client.prepareIndex("index-3", "type", "7").setSource("d", now.minusDays(6)),
client.prepareIndex("index-3", "type", "8").setSource("d", now.minusDays(7)),
client.prepareIndex("index-3", "type", "9").setSource("d", now.minusDays(8)));
ensureSearchable("index-1", "index-2", "index-3");
assertCacheState(client, "index-1", 0, 0);
assertCacheState(client, "index-2", 0, 0);
assertCacheState(client, "index-3", 0, 0);
// Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache
ForceMergeResponse forceMergeResponse = client.admin().indices().prepareForceMerge("index-1", "index-2", "index-3").setFlush(true)
.get();
ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse);
refresh();
ensureSearchable("index-1", "index-2", "index-3");
assertCacheState(client, "index-1", 0, 0);
assertCacheState(client, "index-2", 0, 0);
assertCacheState(client, "index-3", 0, 0);
final SearchResponse r1 = client.prepareSearch("index-*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")).get();
ElasticsearchAssertions.assertAllSuccessful(r1);
assertThat(r1.getHits().getTotalHits(), equalTo(8L));
assertCacheState(client, "index-1", 0, 1);
assertCacheState(client, "index-2", 0, 1);
// Because the query will INTERSECT with the 3rd index it will not be
// rewritten and will still contain `now` so won't be recorded as a
// cache miss or cache hit since queries containing now can't be cached
assertCacheState(client, "index-3", 0, 0);
final SearchResponse r2 = client.prepareSearch("index-*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")).get();
ElasticsearchAssertions.assertAllSuccessful(r2);
assertThat(r2.getHits().getTotalHits(), equalTo(8L));
assertCacheState(client, "index-1", 1, 1);
assertCacheState(client, "index-2", 1, 1);
assertCacheState(client, "index-3", 0, 0);
final SearchResponse r3 = client.prepareSearch("index-*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")).get();
ElasticsearchAssertions.assertAllSuccessful(r3);
assertThat(r3.getHits().getTotalHits(), equalTo(8L));
assertCacheState(client, "index-1", 2, 1);
assertCacheState(client, "index-2", 2, 1);
assertCacheState(client, "index-3", 0, 0);
}
public void testCanCache() throws Exception {
Client client = client();
Settings settings = Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2).put("index.number_of_routing_shards", 2)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).build();
assertAcked(client.admin().indices().prepareCreate("index").addMapping("type", "s", "type=date")
.setSettings(settings)
.get());
indexRandom(true, client.prepareIndex("index", "type", "1").setRouting("1").setSource("s", "2016-03-19"),
client.prepareIndex("index", "type", "2").setRouting("1").setSource("s", "2016-03-20"),
client.prepareIndex("index", "type", "3").setRouting("1").setSource("s", "2016-03-21"),
client.prepareIndex("index", "type", "4").setRouting("2").setSource("s", "2016-03-22"),
client.prepareIndex("index", "type", "5").setRouting("2").setSource("s", "2016-03-23"),
client.prepareIndex("index", "type", "6").setRouting("2").setSource("s", "2016-03-24"),
client.prepareIndex("index", "type", "7").setRouting("3").setSource("s", "2016-03-25"),
client.prepareIndex("index", "type", "8").setRouting("3").setSource("s", "2016-03-26"),
client.prepareIndex("index", "type", "9").setRouting("3").setSource("s", "2016-03-27"));
ensureSearchable("index");
assertCacheState(client, "index", 0, 0);
// Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache
ForceMergeResponse forceMergeResponse = client.admin().indices().prepareForceMerge("index").setFlush(true).get();
ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse);
refresh();
ensureSearchable("index");
assertCacheState(client, "index", 0, 0);
// If size > 0 we should no cache by default
final SearchResponse r1 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(1)
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-25")).get();
ElasticsearchAssertions.assertAllSuccessful(r1);
assertThat(r1.getHits().getTotalHits(), equalTo(7L));
assertCacheState(client, "index", 0, 0);
// If search type is DFS_QUERY_THEN_FETCH we should not cache
final SearchResponse r2 = client.prepareSearch("index").setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")).get();
ElasticsearchAssertions.assertAllSuccessful(r2);
assertThat(r2.getHits().getTotalHits(), equalTo(7L));
assertCacheState(client, "index", 0, 0);
// If search type is DFS_QUERY_THEN_FETCH we should not cache even if
// the cache flag is explicitly set on the request
final SearchResponse r3 = client.prepareSearch("index").setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setSize(0)
.setRequestCache(true).setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")).get();
ElasticsearchAssertions.assertAllSuccessful(r3);
assertThat(r3.getHits().getTotalHits(), equalTo(7L));
assertCacheState(client, "index", 0, 0);
// If the request has an non-filter aggregation containing now we should not cache
final SearchResponse r5 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setRequestCache(true).setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26"))
.addAggregation(dateRange("foo").field("s").addRange("now-10y", "now")).get();
ElasticsearchAssertions.assertAllSuccessful(r5);
assertThat(r5.getHits().getTotalHits(), equalTo(7L));
assertCacheState(client, "index", 0, 0);
// If size > 1 and cache flag is set on the request we should cache
final SearchResponse r6 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(1)
.setRequestCache(true).setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-21").lte("2016-03-27")).get();
ElasticsearchAssertions.assertAllSuccessful(r6);
assertThat(r6.getHits().getTotalHits(), equalTo(7L));
assertCacheState(client, "index", 0, 2);
// If the request has a filter aggregation containing now we should cache since it gets rewritten
final SearchResponse r4 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setRequestCache(true).setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26"))
.addAggregation(filter("foo", QueryBuilders.rangeQuery("s").from("now-10y").to("now"))).get();
ElasticsearchAssertions.assertAllSuccessful(r4);
assertThat(r4.getHits().getTotalHits(), equalTo(7L));
assertCacheState(client, "index", 0, 4);
}
public void testCacheWithFilteredAlias() {
Client client = client();
Settings settings = Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).build();
assertAcked(client.admin().indices().prepareCreate("index").addMapping("type", "created_at", "type=date")
.setSettings(settings)
.addAlias(new Alias("last_week").filter(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")))
.get());
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
client.prepareIndex("index", "type", "1").setRouting("1").setSource("created_at",
DateTimeFormatter.ISO_LOCAL_DATE.format(now)).get();
refresh();
// Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache
ForceMergeResponse forceMergeResponse = client.admin().indices().prepareForceMerge("index").setFlush(true).get();
ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse);
assertCacheState(client, "index", 0, 0);
SearchResponse r1 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")).get();
ElasticsearchAssertions.assertAllSuccessful(r1);
assertThat(r1.getHits().getTotalHits(), equalTo(1L));
assertCacheState(client, "index", 0, 1);
r1 = client.prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")).get();
ElasticsearchAssertions.assertAllSuccessful(r1);
assertThat(r1.getHits().getTotalHits(), equalTo(1L));
assertCacheState(client, "index", 1, 1);
r1 = client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get();
ElasticsearchAssertions.assertAllSuccessful(r1);
assertThat(r1.getHits().getTotalHits(), equalTo(1L));
assertCacheState(client, "index", 1, 2);
r1 = client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get();
ElasticsearchAssertions.assertAllSuccessful(r1);
assertThat(r1.getHits().getTotalHits(), equalTo(1L));
assertCacheState(client, "index", 2, 2);
}
private static void assertCacheState(Client client, String index, long expectedHits, long expectedMisses) {
RequestCacheStats requestCacheStats = client.admin().indices().prepareStats(index).setRequestCache(true).get().getTotal()
.getRequestCache();
// Check the hit count and miss count together so if they are not
// correct we can see both values
assertEquals(Arrays.asList(expectedHits, expectedMisses, 0L),
Arrays.asList(requestCacheStats.getHitCount(), requestCacheStats.getMissCount(), requestCacheStats.getEvictions()));
}
}
| |
/**
* Copyright 2017 Hortonworks.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package com.hortonworks.streamline.streams.metrics.storm.ambari;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.tomakehurst.wiremock.junit.WireMockRule;
import com.hortonworks.streamline.streams.metrics.TimeSeriesQuerier;
import org.apache.commons.lang3.tuple.Pair;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
import static com.github.tomakehurst.wiremock.client.WireMock.equalTo;
import static com.github.tomakehurst.wiremock.client.WireMock.get;
import static com.github.tomakehurst.wiremock.client.WireMock.getRequestedFor;
import static com.github.tomakehurst.wiremock.client.WireMock.stubFor;
import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo;
import static com.github.tomakehurst.wiremock.client.WireMock.verify;
import static com.hortonworks.streamline.streams.metrics.storm.ambari.AmbariMetricsServiceWithStormQuerier.COLLECTOR_API_URL;
import static com.hortonworks.streamline.streams.metrics.storm.ambari.AmbariMetricsServiceWithStormQuerier.DEFAULT_APP_ID;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class AmbariMetricsServiceWithStormQuerierTest {
private final String TEST_COLLECTOR_API_PATH = "/ws/v1/timeline/metrics";
@Rule
public WireMockRule wireMockRule = new WireMockRule(18089);
private AmbariMetricsServiceWithStormQuerier querier;
@Before
public void setUp() throws Exception {
querier = new AmbariMetricsServiceWithStormQuerier();
Map<String, String> conf = new HashMap<>();
conf.put(COLLECTOR_API_URL, "http://localhost:18089" + TEST_COLLECTOR_API_PATH);
querier.init(conf);
}
@After
public void tearDown() throws Exception {
}
@Test
public void getMetrics() throws Exception {
stubMetricUrl();
String topologyName = "testTopology";
String componentId = "testComponent";
String metricName = "__test.metric.name";
TimeSeriesQuerier.AggregateFunction aggrFunction = TimeSeriesQuerier.AggregateFunction.SUM;
long from = 1234L;
long to = 5678L;
Map<Long, Double> metrics = querier.getMetrics(topologyName, componentId, metricName, aggrFunction, from, to);
assertResult(metrics, aggrFunction);
verify(getRequestedFor(urlPathEqualTo(TEST_COLLECTOR_API_PATH))
.withQueryParam("appId", equalTo(DEFAULT_APP_ID))
.withQueryParam("hostname", equalTo(""))
.withQueryParam("metricNames", equalTo("topology.testTopology.testComponent.%.--test.metric.name"))
.withQueryParam("startTime", equalTo("1234"))
.withQueryParam("endTime", equalTo("5678")));
}
@Test
public void getMetricsWithStreamAggregation() throws Exception {
stubMetricUrl();
String topologyName = "testTopology";
String componentId = "testComponent";
// this is one of metric which needs stream aggregation
String metricName = "__emit-count";
TimeSeriesQuerier.AggregateFunction aggrFunction = TimeSeriesQuerier.AggregateFunction.SUM;
long from = 1234L;
long to = 5678L;
Map<Long, Double> metrics = querier.getMetrics(topologyName, componentId, metricName, aggrFunction, from, to);
assertResult(metrics, aggrFunction);
verify(getRequestedFor(urlPathEqualTo(TEST_COLLECTOR_API_PATH))
.withQueryParam("appId", equalTo(DEFAULT_APP_ID))
.withQueryParam("hostname", equalTo(""))
.withQueryParam("metricNames", equalTo("topology.testTopology.testComponent.%.--emit-count.%"))
.withQueryParam("startTime", equalTo("1234"))
.withQueryParam("endTime", equalTo("5678")));
}
@Test
public void testAggregateWithWeightedAverage() {
Map<Long, List<Pair<String, Double>>> keyMetric = new HashMap<>();
List<Pair<String, Double>> keyPoints1 = new ArrayList<>();
keyPoints1.add(Pair.of("stream1", 10.0));
keyPoints1.add(Pair.of("stream2", 20.0));
keyMetric.put(1L, keyPoints1);
List<Pair<String, Double>> keyPoints2 = new ArrayList<>();
keyPoints2.add(Pair.of("stream1", 10.0));
keyPoints2.add(Pair.of("stream2", 20.0));
keyMetric.put(2L, keyPoints2);
Map<Long, List<Pair<String, Double>>> weightMetric = new HashMap<>();
List<Pair<String, Double>> weightPoints1 = new ArrayList<>();
weightPoints1.add(Pair.of("stream1", 10.0));
weightPoints1.add(Pair.of("stream2", 5.0));
weightMetric.put(1L, weightPoints1);
List<Pair<String, Double>> weightPoints2 = new ArrayList<>();
weightPoints2.add(Pair.of("stream1", 5.0));
weightPoints2.add(Pair.of("stream2", 10.0));
weightMetric.put(2L, weightPoints2);
Map<Long, Double> ret = querier.aggregateWithApplyingWeightedAverage(keyMetric, weightMetric);
Assert.assertEquals(2, ret.size());
Double aggregated = ret.get(1L);
Assert.assertNotNull(aggregated);
Double expected = 10.0 * 10.0 / (10.0 + 5.0) + 20.0 * 5.0 / (10.0 + 5.0);
Assert.assertEquals(expected, aggregated, 0.00001d);
aggregated = ret.get(2L);
Assert.assertNotNull(aggregated);
expected = 10.0 * 5.0 / (5.0 + 10.0) + 20.0 * 10.0 / (5.0 + 10.0);
Assert.assertEquals(expected, aggregated, 0.00001d);
}
@Test
public void testAggregateWithWeightedAverageLacksWeightInformation() {
Map<Long, List<Pair<String, Double>>> keyMetric = new HashMap<>();
List<Pair<String, Double>> keyPoints1 = new ArrayList<>();
keyPoints1.add(Pair.of("stream1", 10.0));
keyPoints1.add(Pair.of("stream2", 20.0));
keyMetric.put(1L, keyPoints1);
List<Pair<String, Double>> keyPoints2 = new ArrayList<>();
keyPoints2.add(Pair.of("stream1", 10.0));
keyPoints2.add(Pair.of("stream2", 20.0));
keyMetric.put(2L, keyPoints2);
List<Pair<String, Double>> keyPoints3 = new ArrayList<>();
keyPoints3.add(Pair.of("stream1", 10.0));
keyPoints3.add(Pair.of("stream2", 20.0));
keyMetric.put(3L, keyPoints3);
Map<Long, List<Pair<String, Double>>> weightMetric = new HashMap<>();
// no weight for 1L
// total weight is zero for 2L
List<Pair<String, Double>> weightPoints2 = new ArrayList<>();
weightPoints2.add(Pair.of("stream1", 0.0));
weightPoints2.add(Pair.of("stream2", 0.0));
weightMetric.put(2L, weightPoints2);
// no weight for 3L - stream2
List<Pair<String, Double>> weightPoints3 = new ArrayList<>();
weightPoints3.add(Pair.of("stream1", 10.0));
weightMetric.put(3L, weightPoints3);
Map<Long, Double> ret = querier.aggregateWithApplyingWeightedAverage(keyMetric, weightMetric);
Assert.assertEquals(3, ret.size());
Double aggregated = ret.get(1L);
Assert.assertNotNull(aggregated);
Assert.assertEquals(0.0d, aggregated, 0.00001d);
aggregated = ret.get(2L);
Assert.assertNotNull(aggregated);
Assert.assertEquals(0.0d, aggregated, 0.00001d);
aggregated = ret.get(3L);
Assert.assertNotNull(aggregated);
// only weight and value for stream1 is considered
Assert.assertEquals(10.0d, aggregated, 0.00001d);
}
@Test
public void getRawMetrics() throws Exception {
stubMetricUrlForRawMetric();
String metricName = "metric";
String parameters = "precision=seconds,appId=appId";
long from = 1234L;
long to = 5678L;
Map<String, Map<Long, Double>> metrics = querier.getRawMetrics(metricName, parameters, from, to);
assertRawMetricResult(metrics.get("metric"));
verify(getRequestedFor(urlPathEqualTo(TEST_COLLECTOR_API_PATH))
.withQueryParam("appId", equalTo("appId"))
.withQueryParam("metricNames", equalTo("metric"))
.withQueryParam("startTime", equalTo("1234"))
.withQueryParam("endTime", equalTo("5678")));
}
private void stubMetricUrl() throws JsonProcessingException {
Map<String, List<Map<String, ?>>> stubBodyMap = new HashMap<>();
List<Map<String, ?>> metrics = new ArrayList<>();
// system stream
Map<String, Object> metric1 = new HashMap<>();
metric1.put("metricname", "topology.streamline-1-Topology.1-RULE.host1.6700.-1.--emit-count.--ack-ack");
metric1.put("metrics", getTestTimestampToValueMap());
metrics.add(metric1);
// system stream
Map<String, Object> metric2 = new HashMap<>();
metric2.put("metricname", "topology.streamline-1-Topology.1-RULE.host1.6700.-1.--emit-count.--system");
metric2.put("metrics", getTestTimestampToValueMap());
metrics.add(metric2);
// system stream
Map<String, Object> metric3 = new HashMap<>();
metric3.put("metricname", "topology.streamline-1-Topology.1-RULE.host1.6700.-1.--emit-count.--ack-init");
metric3.put("metrics", getTestTimestampToValueMap());
metrics.add(metric3);
// system stream
Map<String, Object> metric4 = new HashMap<>();
metric4.put("metricname", "topology.streamline-1-Topology.1-RULE.host1.6700.-1.--emit-count.--metric");
metric4.put("metrics", getTestTimestampToValueMap());
metrics.add(metric4);
// non-system stream
Map<String, Object> metric5 = new HashMap<>();
metric5.put("metricname", "topology.streamline-1-Topology.1-RULE.host1.6700.-1.--emit-count.stream1");
metric5.put("metrics", getTestTimestampToValueMap());
metrics.add(metric5);
// non-system stream
Map<String, Object> metric6 = new HashMap<>();
metric6.put("metricname", "topology.streamline-1-Topology.1-RULE.host1.6700.-1.--emit-count.stream2");
metric6.put("metrics", getTestTimestampToValueMap());
metrics.add(metric6);
stubBodyMap.put("metrics", metrics);
ObjectMapper objectMapper = new ObjectMapper();
String body = objectMapper.writeValueAsString(stubBodyMap);
stubFor(get(urlPathEqualTo(TEST_COLLECTOR_API_PATH))
.withHeader("Accept", equalTo("application/json"))
.willReturn(aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json")
.withBody(body)));
}
private void stubMetricUrlForRawMetric() {
stubFor(get(urlPathEqualTo(TEST_COLLECTOR_API_PATH))
.withHeader("Accept", equalTo("application/json"))
.willReturn(aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json")
.withBody("{\"metrics\": [ {\"metricname\": \"metric\", \"metrics\": { \"123456\": 456.789, \"567890\": 890.123 } } ] }")));
}
private void assertRawMetricResult(Map<Long, Double> metrics) {
assertTrue(metrics.containsKey(123456L));
assertTrue(metrics.containsKey(567890L));
assertEquals(456.789, metrics.get(123456L), 0.00001);
assertEquals(890.123, metrics.get(567890L), 0.00001);
}
private void assertResult(Map<Long, Double> metrics, TimeSeriesQuerier.AggregateFunction aggrFunction) {
assertTrue(metrics.containsKey(123456L));
assertTrue(metrics.containsKey(567890L));
switch (aggrFunction) {
case SUM:
assertEquals(123.456 * 2, metrics.get(123456L), 0.00001);
assertEquals(456.789 * 2, metrics.get(567890L), 0.00001);
break;
case AVG:
case MAX:
case MIN:
assertEquals(123.456, metrics.get(123456L), 0.00001);
assertEquals(456.789, metrics.get(567890L), 0.00001);
break;
default:
throw new IllegalArgumentException("Not supported aggregated function.");
}
}
private Map<String, Double> getTestTimestampToValueMap() {
Map<String, Double> timestampToValueMap1 = new HashMap<>();
timestampToValueMap1.put("123456", 123.456);
timestampToValueMap1.put("567890", 456.789);
return timestampToValueMap1;
}
}
| |
/*
* The University of Wales, Cardiff Triana Project Software License (Based
* on the Apache Software License Version 1.1)
*
* Copyright (c) 2007 University of Wales, Cardiff. All rights reserved.
*
* Redistribution and use of the software in source and binary forms, with
* or without modification, are permitted provided that the following
* conditions are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The end-user documentation included with the redistribution, if any,
* must include the following acknowledgment: "This product includes
* software developed by the University of Wales, Cardiff for the Triana
* Project (http://www.trianacode.org)." Alternately, this
* acknowledgment may appear in the software itself, if and wherever
* such third-party acknowledgments normally appear.
*
* 4. The names "Triana" and "University of Wales, Cardiff" must not be
* used to endorse or promote products derived from this software
* without prior written permission. For written permission, please
* contact triana@trianacode.org.
*
* 5. Products derived from this software may not be called "Triana," nor
* may Triana appear in their name, without prior written permission of
* the University of Wales, Cardiff.
*
* 6. This software may not be sold, used or incorporated into any product
* for sale to third parties.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
* NO EVENT SHALL UNIVERSITY OF WALES, CARDIFF OR ITS CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*
* ------------------------------------------------------------------------
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Triana Project. For more information on the
* Triana Project, please see. http://www.trianacode.org.
*
* This license is based on the BSD license as adopted by the Apache
* Foundation and is governed by the laws of England and Wales.
*
*/
package org.trianacode.gui.toolmaker;
import org.trianacode.gui.util.Env;
import org.trianacode.gui.windows.WizardInterface;
import org.trianacode.gui.windows.WizardPanel;
import javax.swing.*;
import javax.swing.border.EmptyBorder;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.Vector;
/**
* The final tool wizard panel prompting the user to press finish to generate code
*
* @author Ian Wang
* @version $Revision: 4048 $
*/
public class FinalPanel extends JPanel implements WizardPanel, ChangeListener, ActionListener {
/**
* a reference to the tool panel used for extracting class name etc.
*/
private UnitPanel toolpanel;
private GUIPanel guiPanel;
/**
* the labels displaying tool name, tool box and author
*/
private JLabel toolname = new JLabel("", JLabel.LEFT);
private JLabel toolbox = new JLabel("", JLabel.LEFT);
private JLabel packageName = new JLabel("", JLabel.LEFT);
private JLabel author = new JLabel("", JLabel.LEFT);
private JPanel fileListPanel = null;
private Vector dirItems = null;
private Vector fileItems = null;
private String helpFileName = "";
private String srcFileName = "";
private String srcFileDir = "";
private JCheckBox placeholderChk = new JCheckBox(Env.getString("genToolPlace"), true);
private JTextField toolPlaceHolder = new JTextField(20);
private String currentPlaceHolder = "";
private String guiFileName = "";
private String baseToolboxPath;
private String basePackagePath;
/**
* a flag indicating whether the final panel is displayed
*/
private boolean displayed = false;
/**
* an interface to the main wizard window
*/
private WizardInterface wizard;
/**
* Constructs a panel for editing general properties of a tool.
*/
public FinalPanel(UnitPanel toolpanel, GUIPanel guiPanel) {
this.toolpanel = toolpanel;
this.guiPanel = guiPanel;
initLayout();
}
public boolean isPlaceholderChecked() {
return placeholderChk.isSelected();
}
/**
* Lays out the panel
*/
private void initLayout() {
setLayout(new BorderLayout());
JPanel toolDetails = new JPanel(new BorderLayout());
JPanel instruct = new JPanel(new GridLayout(2, 1));
instruct.add(new JLabel(Env.getString("toolDefComplete"), JLabel.CENTER));
instruct.add(new JLabel(Env.getString("selectToGenerate"), JLabel.CENTER));
instruct.setBorder(new EmptyBorder(0, 0, 10, 0));
toolDetails.add(instruct, BorderLayout.NORTH);
JPanel labels = new JPanel(new GridLayout(4, 1));
labels.add(new JLabel(Env.getString("toolname") + ": ", JLabel.LEFT));
labels.add(new JLabel(Env.getString("toolboxpath") + ": ", JLabel.LEFT));
labels.add(new JLabel(Env.getString("unitPackage") + ": ", JLabel.LEFT));
labels.add(new JLabel(Env.getString("author") + ": ", JLabel.LEFT));
labels.setBorder(new EmptyBorder(0, 0, 0, 3));
toolDetails.add(labels, BorderLayout.WEST);
JPanel contain = new JPanel(new GridLayout(4, 1));
contain.add(toolname);
contain.add(toolbox);
contain.add(packageName);
contain.add(author);
toolDetails.add(contain, BorderLayout.CENTER);
add(toolDetails, BorderLayout.NORTH);
JPanel placeholderPanel = new JPanel(new BorderLayout());
placeholderChk.setToolTipText(Env.getString("genToolPlaceTip"));
placeholderChk.addChangeListener(this);
placeholderPanel.add(placeholderChk, BorderLayout.NORTH);
JPanel inner = new JPanel(new BorderLayout());
inner.add(toolPlaceHolder, BorderLayout.NORTH);
toolPlaceHolder.addActionListener(this);
placeholderPanel.add(inner, BorderLayout.CENTER);
add(placeholderPanel, BorderLayout.CENTER);
fileListPanel = new JPanel();
fileListPanel.setBorder(new EmptyBorder(10, 0, 0, 0));
add(fileListPanel, BorderLayout.SOUTH);
}
public void setWizardInterface(WizardInterface wizard) {
this.wizard = wizard;
}
public WizardInterface getWizardInterface() {
return wizard;
}
public boolean isFinishEnabled() {
return displayed;
}
public boolean isNextEnabled() {
return false;
}
public void panelDisplayed() {
toolname.setText(toolpanel.getUnitName());
toolbox.setText(toolpanel.getToolBox());
packageName.setText(toolpanel.getUnitPackage());
author.setText(toolpanel.getAuthor());
if (toolPlaceHolder.getText().equals("")) {
if (toolpanel.getUnitPackage().equals("")) {
currentPlaceHolder = toolpanel.getUnitName();
} else {
currentPlaceHolder = toolpanel.getUnitPackage() + "." + toolpanel.getUnitName();
}
toolPlaceHolder.setText(currentPlaceHolder);
currentPlaceHolder = getPlaceHolderFile();
}
createFileList();
displayed = true;
wizard.notifyButtonStateChange();
}
public void panelHidden() {
displayed = false;
wizard.notifyButtonStateChange();
}
/**
* @return An array in order of the directories and sub directories that need to be created
*/
public String[] getDirectoriesToCreate() {
return (String[]) dirItems.toArray(new String[dirItems.size()]);
}
/**
* @return an array item[0] is the source code file name, item[1] is the help file name
*/
public String[] getFilesToCreate() {
return (String[]) fileItems.toArray(new String[fileItems.size()]);
}
public String getHelpFileName() {
return helpFileName;
}
public String getSrcFileName() {
return srcFileName;
}
public String getSrcFileDir() {
return srcFileDir;
}
public String getGuiFileName() {
return guiFileName;
}
public String getPlaceHolderToolName() {
return toolPlaceHolder.getText();
}
/**
* repacks the builder window to preferred size;
*/
private void repack() {
Component comp = getParent();
while ((comp != null) && (!(comp instanceof Window))) {
comp = comp.getParent();
}
((Window) comp).pack();
}
/**
* Parse the directories and files to be created and display them in the list component
*/
private void createFileList() {
dirItems = new Vector();
fileItems = new Vector();
baseToolboxPath = toolpanel.getToolBox();
if (!baseToolboxPath.endsWith(Env.separator())) {
baseToolboxPath = baseToolboxPath + Env.separator();
}
String[] splitter = toolpanel.getUnitPackage().split(".");
basePackagePath = baseToolboxPath;
for (int i = 0; i < splitter.length; i++) {
basePackagePath = basePackagePath + splitter[i] + Env.separator();
checkAndAddDir(basePackagePath);
}
if (placeholderChk.isSelected()) {
checkAndAddFile(getPlaceHolderFile());
currentPlaceHolder = getPlaceHolderFile();
}
String baseSrcFileDir = basePackagePath + "src" + Env.separator();
checkAndAddDir(baseSrcFileDir);
srcFileName = baseSrcFileDir + toolpanel.getUnitName() + ".java";
checkAndAddFile(srcFileName);
if (guiPanel.isGenerateCustomPanel()) {
guiFileName = baseSrcFileDir + guiPanel.getCustomPanelName() + ".java";
checkAndAddFile(guiFileName);
}
String pathname = basePackagePath + "classes" + Env.separator();
checkAndAddDir(pathname);
pathname = basePackagePath + "help" + Env.separator();
checkAndAddDir(pathname);
helpFileName = pathname + toolpanel.getHelpFile();
checkAndAddFile(helpFileName);
addFilesToPanel();
}
public String getPlaceHolderFile() {
return baseToolboxPath + toolPlaceHolder.getText().replace('.', File.separatorChar) + ".xml";
}
private void checkAndAddDir(String dirName) {
if (!dirItems.contains(dirName)) {
if (!(new File(dirName)).exists()) {
dirItems.add(dirName);
}
}
}
private void checkAndAddFile(String fileName) {
if (!fileItems.contains(fileName)) {
fileItems.add(fileName);
}
}
private void addFilesToPanel() {
fileListPanel.removeAll();
ArrayList files = new ArrayList(dirItems);
files.addAll(fileItems);
Collections.sort(files);
fileListPanel.setLayout(new GridLayout(1 + files.size(), 1));
JPanel labelpanel = new JPanel(new BorderLayout());
fileListPanel.add(labelpanel);
boolean overwritten = false;
for (Iterator iterator = files.iterator(); iterator.hasNext();) {
String s = (String) iterator.next();
File file = new File(s);
JLabel label = new JLabel(s);
if ((file.exists()) && (!file.isDirectory())) {
label.setForeground(Color.red);
overwritten = true;
}
fileListPanel.add(label);
}
if (overwritten) {
labelpanel.add(new JLabel(Env.getString("createFollowing") + "/"), BorderLayout.WEST);
JLabel label = new JLabel(Env.getString("overwritten"));
label.setForeground(Color.red);
labelpanel.add(label, BorderLayout.CENTER);
} else {
labelpanel.add(new JLabel(Env.getString("createFollowing")), BorderLayout.WEST);
}
repack();
}
public void stateChanged(ChangeEvent e) {
toolPlaceHolder.setEnabled(placeholderChk.isSelected());
String placeNameStr = getPlaceHolderFile();
if (placeholderChk.isSelected()) {
fileItems.remove(currentPlaceHolder);
if (!fileItems.contains(placeNameStr)) {
fileItems.add(placeNameStr);
}
} else {
fileItems.remove(placeNameStr);
}
addFilesToPanel();
}
public void actionPerformed(ActionEvent e) {
fileItems.remove(currentPlaceHolder);
currentPlaceHolder = getPlaceHolderFile();
fileItems.add(currentPlaceHolder);
addFilesToPanel();
}
}
| |
package org.apache.helix.integration.task;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.helix.HelixManager;
import org.apache.helix.HelixManagerFactory;
import org.apache.helix.InstanceType;
import org.apache.helix.api.id.StateModelDefId;
import org.apache.helix.manager.zk.MockParticipant;
import org.apache.helix.manager.zk.MockController;
import org.apache.helix.participant.StateMachineEngine;
import org.apache.helix.task.JobConfig;
import org.apache.helix.task.Task;
import org.apache.helix.task.TaskCallbackContext;
import org.apache.helix.task.TaskDriver;
import org.apache.helix.task.TaskFactory;
import org.apache.helix.task.TaskResult;
import org.apache.helix.task.TaskState;
import org.apache.helix.task.TaskStateModelFactory;
import org.apache.helix.task.Workflow;
import org.apache.helix.testutil.ZkTestBase;
import org.apache.helix.tools.ClusterStateVerifier;
import org.apache.log4j.Logger;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
public class TestTaskRebalancerStopResume extends ZkTestBase {
private static final Logger LOG = Logger.getLogger(TestTaskRebalancerStopResume.class);
private static final int n = 5;
private static final int START_PORT = 12918;
private static final String MASTER_SLAVE_STATE_MODEL = "MasterSlave";
private static final String TIMEOUT_CONFIG = "Timeout";
private static final String TGT_DB = "TestDB";
private static final String JOB_RESOURCE = "SomeJob";
private static final int NUM_PARTITIONS = 20;
private static final int NUM_REPLICAS = 3;
private final String CLUSTER_NAME = "TestTaskRebalancerStopResume";
private final MockParticipant[] _participants = new MockParticipant[n];
private MockController _controller;
private HelixManager _manager;
private TaskDriver _driver;
@BeforeClass
public void beforeClass() throws Exception {
String namespace = "/" + CLUSTER_NAME;
if (_zkclient.exists(namespace)) {
_zkclient.deleteRecursive(namespace);
}
_setupTool.addCluster(CLUSTER_NAME, true);
for (int i = 0; i < n; i++) {
String storageNodeName = "localhost_" + (START_PORT + i);
_setupTool.addInstanceToCluster(CLUSTER_NAME, storageNodeName);
}
// Set up target db
_setupTool.addResourceToCluster(CLUSTER_NAME, TGT_DB, NUM_PARTITIONS, MASTER_SLAVE_STATE_MODEL);
_setupTool.rebalanceStorageCluster(CLUSTER_NAME, TGT_DB, NUM_REPLICAS);
Map<String, TaskFactory> taskFactoryReg = new HashMap<String, TaskFactory>();
taskFactoryReg.put("Reindex", new TaskFactory() {
@Override
public Task createNewTask(TaskCallbackContext context) {
return new ReindexTask(context);
}
});
// start dummy participants
for (int i = 0; i < n; i++) {
String instanceName = "localhost_" + (START_PORT + i);
_participants[i] = new MockParticipant(_zkaddr, CLUSTER_NAME, instanceName);
// Register a Task state model factory.
StateMachineEngine stateMachine = _participants[i].getStateMachineEngine();
stateMachine.registerStateModelFactory(StateModelDefId.from("Task"),
new TaskStateModelFactory(_participants[i], taskFactoryReg));
_participants[i].syncStart();
}
// start controller
String controllerName = "controller_0";
_controller = new MockController(_zkaddr, CLUSTER_NAME, controllerName);
_controller.syncStart();
// create cluster manager
_manager =
HelixManagerFactory.getZKHelixManager(CLUSTER_NAME, "Admin", InstanceType.ADMINISTRATOR,
_zkaddr);
_manager.connect();
_driver = new TaskDriver(_manager);
boolean result =
ClusterStateVerifier.verifyByZkCallback(new ClusterStateVerifier.MasterNbInExtViewVerifier(
_zkaddr, CLUSTER_NAME));
Assert.assertTrue(result);
result =
ClusterStateVerifier
.verifyByZkCallback(new ClusterStateVerifier.BestPossAndExtViewZkVerifier(_zkaddr,
CLUSTER_NAME));
Assert.assertTrue(result);
}
@AfterClass
public void afterClass() throws Exception {
_controller.syncStop();
for (int i = 0; i < n; i++) {
_participants[i].syncStop();
}
_manager.disconnect();
}
@Test
public void stopAndResume() throws Exception {
Map<String, String> commandConfig = ImmutableMap.of(TIMEOUT_CONFIG, String.valueOf(100));
Workflow flow =
WorkflowGenerator.generateDefaultSingleJobWorkflowBuilderWithExtraConfigs(JOB_RESOURCE,
commandConfig).build();
LOG.info("Starting flow " + flow.getName());
_driver.start(flow);
TestUtil.pollForWorkflowState(_manager, JOB_RESOURCE, TaskState.IN_PROGRESS);
LOG.info("Pausing job");
_driver.stop(JOB_RESOURCE);
TestUtil.pollForWorkflowState(_manager, JOB_RESOURCE, TaskState.STOPPED);
LOG.info("Resuming job");
_driver.resume(JOB_RESOURCE);
TestUtil.pollForWorkflowState(_manager, JOB_RESOURCE, TaskState.COMPLETED);
}
@Test
public void stopAndResumeWorkflow() throws Exception {
String workflow = "SomeWorkflow";
Workflow flow = WorkflowGenerator.generateDefaultRepeatedJobWorkflowBuilder(workflow).build();
LOG.info("Starting flow " + workflow);
_driver.start(flow);
TestUtil.pollForWorkflowState(_manager, workflow, TaskState.IN_PROGRESS);
LOG.info("Pausing workflow");
_driver.stop(workflow);
TestUtil.pollForWorkflowState(_manager, workflow, TaskState.STOPPED);
LOG.info("Resuming workflow");
_driver.resume(workflow);
TestUtil.pollForWorkflowState(_manager, workflow, TaskState.COMPLETED);
}
public static class ReindexTask implements Task {
private final long _delay;
private volatile boolean _canceled;
public ReindexTask(TaskCallbackContext context) {
JobConfig jobCfg = context.getJobConfig();
Map<String, String> cfg = jobCfg.getJobCommandConfigMap();
if (cfg == null) {
cfg = Collections.emptyMap();
}
_delay = cfg.containsKey(TIMEOUT_CONFIG) ? Long.parseLong(cfg.get(TIMEOUT_CONFIG)) : 200L;
}
@Override
public TaskResult run() {
long expiry = System.currentTimeMillis() + _delay;
long timeLeft;
while (System.currentTimeMillis() < expiry) {
if (_canceled) {
timeLeft = expiry - System.currentTimeMillis();
return new TaskResult(TaskResult.Status.CANCELED, String.valueOf(timeLeft < 0 ? 0
: timeLeft));
}
sleep(50);
}
timeLeft = expiry - System.currentTimeMillis();
return new TaskResult(TaskResult.Status.COMPLETED,
String.valueOf(timeLeft < 0 ? 0 : timeLeft));
}
@Override
public void cancel() {
_canceled = true;
}
private static void sleep(long d) {
try {
Thread.sleep(d);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
| |
/*
This file is part of the GhostDriver by Ivan De Marino <http://ivandemarino.me>.
Copyright (c) 2012-2014, Ivan De Marino <http://ivandemarino.me>
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package ghostdriver;
import ghostdriver.server.EmptyPageHttpRequestCallback;
import ghostdriver.server.HttpRequestCallback;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.openqa.selenium.Cookie;
import org.openqa.selenium.InvalidCookieDomainException;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.WebDriver;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Date;
import static org.junit.Assert.*;
public class CookieTest extends BaseTestWithServer {
private WebDriver driver;
private final static HttpRequestCallback COOKIE_SETTING_CALLBACK = new EmptyPageHttpRequestCallback() {
@Override
public void call(HttpServletRequest req, HttpServletResponse res) throws IOException {
super.call(req, res);
javax.servlet.http.Cookie cookie = new javax.servlet.http.Cookie("test", "test");
cookie.setDomain(".localhost");
cookie.setMaxAge(360);
res.addCookie(cookie);
cookie = new javax.servlet.http.Cookie("test2", "test2");
cookie.setDomain(".localhost");
res.addCookie(cookie);
}
};
private final static HttpRequestCallback EMPTY_CALLBACK = new EmptyPageHttpRequestCallback();
@Before
public void setup() {
driver = getDriver();
}
@After
public void cleanUp() {
driver.manage().deleteAllCookies();
}
private void goToPage(String path) {
driver.get(server.getBaseUrl() + path);
}
private void goToPage() {
goToPage("");
}
private Cookie[] getCookies() {
return driver.manage().getCookies().toArray(new Cookie[]{});
}
@Test
public void gettingAllCookies() {
server.setHttpHandler("GET", COOKIE_SETTING_CALLBACK);
goToPage();
Cookie[] cookies = getCookies();
assertEquals(2, cookies.length);
Cookie cookie = driver.manage().getCookieNamed("test");
assertEquals("test", cookie.getName());
assertEquals("test", cookie.getValue());
assertEquals(".localhost", cookie.getDomain());
assertEquals("/", cookie.getPath());
assertTrue(cookie.getExpiry() != null);
assertEquals(false, cookie.isSecure());
Cookie cookie2 = driver.manage().getCookieNamed("test2");
assertEquals("test2", cookie2.getName());
assertEquals("test2", cookie2.getValue());
assertEquals(".localhost", cookie2.getDomain());
assertEquals("/", cookie2.getPath());
assertEquals(false, cookie2.isSecure());
assertTrue(cookie2.getExpiry() == null);
}
@Test
public void gettingAllCookiesOnANonCookieSettingPage() {
server.setHttpHandler("GET", EMPTY_CALLBACK);
goToPage();
assertEquals(0, getCookies().length);
}
@Test
public void deletingAllCookies() {
server.setHttpHandler("GET", COOKIE_SETTING_CALLBACK);
goToPage();
driver.manage().deleteAllCookies();
assertEquals(0, getCookies().length);
}
@Test
public void deletingOneCookie() {
server.setHttpHandler("GET", COOKIE_SETTING_CALLBACK);
goToPage();
driver.manage().deleteCookieNamed("test");
Cookie[] cookies = getCookies();
assertEquals(1, cookies.length);
assertEquals("test2", cookies[0].getName());
}
@Test
public void addingACookie() {
server.setHttpHandler("GET", EMPTY_CALLBACK);
goToPage();
driver.manage().addCookie(new Cookie("newCookie", "newValue", ".localhost", "/", null, false, false));
Cookie[] cookies = getCookies();
assertEquals(1, cookies.length);
assertEquals("newCookie", cookies[0].getName());
assertEquals("newValue", cookies[0].getValue());
assertEquals(".localhost", cookies[0].getDomain());
assertEquals("/", cookies[0].getPath());
assertEquals(false, cookies[0].isSecure());
assertEquals(false, cookies[0].isHttpOnly());
}
@Test
public void modifyingACookie() {
server.setHttpHandler("GET", COOKIE_SETTING_CALLBACK);
goToPage();
driver.manage().addCookie(new Cookie("test", "newValue", "localhost", "/", null, false));
Cookie[] cookies = getCookies();
assertEquals(2, cookies.length);
assertEquals("test", cookies[1].getName());
assertEquals("newValue", cookies[1].getValue());
assertEquals(".localhost", cookies[1].getDomain());
assertEquals("/", cookies[1].getPath());
assertEquals(false, cookies[1].isSecure());
assertEquals("test2", cookies[0].getName());
assertEquals("test2", cookies[0].getValue());
assertEquals(".localhost", cookies[0].getDomain());
assertEquals("/", cookies[0].getPath());
assertEquals(false, cookies[0].isSecure());
}
@Test
public void shouldRetainCookieInfo() {
server.setHttpHandler("GET", EMPTY_CALLBACK);
goToPage();
// Added cookie (in a sub-path - allowed)
Cookie addedCookie =
new Cookie.Builder("fish", "cod")
.expiresOn(new Date(System.currentTimeMillis() + 100 * 1000)) //< now + 100sec
.path("/404")
.domain("localhost")
.build();
driver.manage().addCookie(addedCookie);
// Search cookie on the root-path and fail to find it
Cookie retrieved = driver.manage().getCookieNamed("fish");
assertNull(retrieved);
// Go to the "/404" sub-path (to find the cookie)
goToPage("404");
retrieved = driver.manage().getCookieNamed("fish");
assertNotNull(retrieved);
// Check that it all matches
assertEquals(addedCookie.getName(), retrieved.getName());
assertEquals(addedCookie.getValue(), retrieved.getValue());
assertEquals(addedCookie.getExpiry(), retrieved.getExpiry());
assertEquals(addedCookie.isSecure(), retrieved.isSecure());
assertEquals(addedCookie.getPath(), retrieved.getPath());
assertTrue(retrieved.getDomain().contains(addedCookie.getDomain()));
}
@Test(expected = InvalidCookieDomainException.class)
public void shouldNotAllowToCreateCookieOnDifferentDomain() {
goToPage();
// Added cookie (in a sub-path)
Cookie addedCookie = new Cookie.Builder("fish", "cod")
.expiresOn(new Date(System.currentTimeMillis() + 100 * 1000)) //< now + 100sec
.path("/404")
.domain("github.com")
.build();
driver.manage().addCookie(addedCookie);
}
@Test
public void shouldAllowToDeleteCookiesEvenIfNotSet() {
WebDriver d = getDriver();
d.get("https://github.com/");
// Clear all cookies
assertTrue(d.manage().getCookies().size() > 0);
d.manage().deleteAllCookies();
assertEquals(d.manage().getCookies().size(), 0);
// All cookies deleted, call deleteAllCookies again. Should be a no-op.
d.manage().deleteAllCookies();
d.manage().deleteCookieNamed("non_existing_cookie");
assertEquals(d.manage().getCookies().size(), 0);
}
@Test
public void shouldAllowToSetCookieThatIsAlreadyExpired() {
WebDriver d = getDriver();
d.get("https://github.com/");
// Clear all cookies
assertTrue(d.manage().getCookies().size() > 0);
d.manage().deleteAllCookies();
assertEquals(d.manage().getCookies().size(), 0);
// Added cookie that expires in the past
Cookie addedCookie = new Cookie.Builder("expired", "yes")
.expiresOn(new Date(System.currentTimeMillis() - 1000)) //< now - 1 second
.build();
d.manage().addCookie(addedCookie);
Cookie cookie = d.manage().getCookieNamed("expired");
assertNull(cookie);
}
@Test(expected = Exception.class)
public void shouldThrowExceptionIfAddingCookieBeforeLoadingAnyUrl() {
// NOTE: At the time of writing, this test doesn't pass with FirefoxDriver.
// ChromeDriver is fine instead.
String xval = "123456789101112"; //< detro: I buy you a beer if you guess what am I quoting here
WebDriver d = getDriver();
// Set cookie, without opening any page: should throw an exception
d.manage().addCookie(new Cookie("x", xval));
}
@Test
public void shouldBeAbleToCreateCookieViaJavascriptOnGoogle() {
String ckey = "cookiekey";
String cval = "cookieval";
WebDriver d = getDriver();
d.get("http://www.google.com");
JavascriptExecutor js = (JavascriptExecutor) d;
// Of course, no cookie yet(!)
Cookie c = d.manage().getCookieNamed(ckey);
assertNull(c);
// Attempt to create cookie on multiple Google domains
js.executeScript("javascript:(" +
"function() {" +
" cook = document.cookie;" +
" begin = cook.indexOf('"+ckey+"=');" +
" var val;" +
" if (begin !== -1) {" +
" var end = cook.indexOf(\";\",begin);" +
" if (end === -1)" +
" end=cook.length;" +
" val=cook.substring(begin+11,end);" +
" }" +
" val = ['"+cval+"'];" +
" if (val) {" +
" var d=Array('com','co.jp','ca','fr','de','co.uk','it','es','com.br');" +
" for (var i = 0; i < d.length; i++) {" +
" document.cookie = '"+ckey+"='+val+';path=/;domain=.google.'+d[i]+'; ';" +
" }" +
" }" +
"})();");
c = d.manage().getCookieNamed(ckey);
assertNotNull(c);
assertEquals(cval, c.getValue());
// Set cookie as empty
js.executeScript("javascript:(" +
"function() {" +
" var d = Array('com','co.jp','ca','fr','de','co.uk','it','cn','es','com.br');" +
" for(var i = 0; i < d.length; i++) {" +
" document.cookie='"+ckey+"=;path=/;domain=.google.'+d[i]+'; ';" +
" }" +
"})();");
c = d.manage().getCookieNamed(ckey);
assertNotNull(c);
assertEquals("", c.getValue());
}
@Test
public void addingACookieWithDefaults() {
server.setHttpHandler("GET", EMPTY_CALLBACK);
goToPage();
long startTime = new Date().getTime();
driver.manage().addCookie(new Cookie("newCookie", "newValue"));
Cookie[] cookies = getCookies();
assertEquals(1, cookies.length);
assertEquals("newCookie", cookies[0].getName());
assertEquals("newValue", cookies[0].getValue());
assertEquals(".localhost", cookies[0].getDomain());
assertEquals("/", cookies[0].getPath());
assertEquals(false, cookies[0].isSecure());
assertEquals(false, cookies[0].isHttpOnly());
// expiry > 19 years in the future
assertTrue(startTime + 599184000000L <= cookies[0].getExpiry().getTime());
}
}
| |
package net.mostlyoriginal.game.system.tutorial;
import com.artemis.Aspect;
import com.artemis.ComponentMapper;
import com.artemis.Entity;
import com.artemis.annotations.Wire;
import com.artemis.systems.EntityProcessingSystem;
import com.artemis.utils.EntityBuilder;
import net.mostlyoriginal.api.component.basic.Pos;
import net.mostlyoriginal.api.component.graphics.Anim;
import net.mostlyoriginal.api.component.graphics.Invisible;
import net.mostlyoriginal.api.component.graphics.Renderable;
import net.mostlyoriginal.api.event.common.Subscribe;
import net.mostlyoriginal.game.component.ship.ShipComponent;
import net.mostlyoriginal.game.component.tutorial.TutorialStep;
import net.mostlyoriginal.game.component.ui.Button;
import net.mostlyoriginal.game.system.event.SelectConstructionEvent;
import net.mostlyoriginal.game.system.event.StartConstructionEvent;
import net.mostlyoriginal.game.system.ship.ProductionSimulationSystem;
import net.mostlyoriginal.game.system.ui.ButtonSystem;
import net.mostlyoriginal.game.system.ui.ConstructionSystem;
import net.mostlyoriginal.game.system.ui.DilemmaSystem;
/**
* @author Daan van Yperen
*/
@Wire
public class TutorialSystem extends EntityProcessingSystem {
protected ComponentMapper<Pos> mPos;
protected ComponentMapper<Anim> mAnim;
protected ComponentMapper<Button> mButton;
int step = 0;
private Step currentStep = null;
public Entity arrow;
private ConstructionSystem constructionSystem;
private ButtonSystem buttonSystem;
public float payoutDelay = 0;
public Step payoutStepNr = null;
private ProductionSimulationSystem productionSimulationSystem;
private DilemmaSystem dilemmaSystem;
public int requiredRepeats = 0;
public TutorialSystem() {
super(Aspect.getAspectForAll(TutorialStep.class));
}
@Override
protected void initialize() {
super.initialize();
arrow = new EntityBuilder(world).with(new Pos(), new Anim("arrow"), new Renderable(9000), new Invisible()).build();
}
public void activateNextStep() {
if (currentStep == null) {
currentStep = Step.SELECT_ENGINE;
delayedInitStep(currentStep);
} else {
final int nextStep = currentStep.ordinal() + 1;
if (nextStep < Step.values().length) {
arrow.edit().add(new Invisible());
payoutDelay = 0.5f;
payoutStepNr = currentStep;
currentStep = Step.values()[nextStep];
initStep(currentStep);
}
}
}
private void enableAllConstructButtons() {
for (Entity entity : constructionSystem.constructionButton.values()) {
setConstructionButton(entity, true);
}
}
private void enableConstructionButton(Entity entity) {
setConstructionButton(entity, true);
}
private void setConstructionButton(Entity entity, boolean enable) {
final Button button = mButton.get(entity);
button.manualDisable = !enable;
}
private void disableAllConstructButtons() {
for (Entity entity : constructionSystem.constructionButton.values()) {
setConstructionButton(entity, false);
}
}
private void updateHint(Step currentStep) {
String text = null;
if ( currentStep == null ) return;
switch (currentStep) {
case SELECT_ENGINE:
text = "Select the engine.";
break;
case PLACE_ENGINE:
text = "Place engine by clicking any of the red indicators.";
break;
case SELECT_STORAGEPOD:
text = "Storage pods hold fuel, food and biogel.";
break;
case PLACE_STORAGEPOD:
text = "Place "+requiredRepeats+" more storage pod(s).";
break;
case FINISHED:
break;
}
if ( text != null ) {
buttonSystem.hintlabel.text = text;
}
}
private void payoutStep(Step step) {
delayedInitStep(currentStep);
switch (step) {
case SELECT_ENGINE:
break;
case PLACE_ENGINE:
productionSimulationSystem.finishAllConstruction();
break;
case SELECT_STORAGEPOD:
break;
case PLACE_STORAGEPOD:
productionSimulationSystem.finishAllConstruction();
break;
case FINISHED:
break;
}
}
/** Called immediately after a step becomes active */
private void initStep(Step step) {
requiredRepeats = 1;
switch (step) {
case PLACE_ENGINE:
disableAllConstructButtons();
break;
case PLACE_STORAGEPOD:
requiredRepeats = 3;
break;
}
}
/** called with a short delay after step becomes active. */
private void delayedInitStep(Step step) {
switch (step) {
case SELECT_ENGINE:
disableAllConstructButtons();
highlightConstructButton(ShipComponent.Type.ENGINE);
break;
case PLACE_ENGINE:
break;
case SELECT_STORAGEPOD:
highlightConstructButton(ShipComponent.Type.STORAGEPOD);
break;
case PLACE_STORAGEPOD:
highlightConstructButton(ShipComponent.Type.STORAGEPOD);
break;
case FINISHED:
enableAllConstructButtons();
dilemmaSystem.afterTutorialDilemma();
break;
}
}
private void highlightConstructButton(ShipComponent.Type type) {
if ( arrow == null ) return;
final Entity button = constructionSystem.constructionButton.get(type);
final Pos pos = mPos.get(button);
final Pos arrowPos = mPos.get(arrow);
arrowPos.x = pos.x + 4;
arrowPos.y = pos.y + 16;
enableConstructionButton(button);
arrow.edit().remove(Invisible.class);
}
public void complete(Step step) {
if (step == currentStep) {
requiredRepeats--;
if (requiredRepeats <= 0) {
activateNextStep();
}
}
}
@Override
protected void begin() {
updateHint(currentStep);
if (payoutDelay > 0) {
payoutDelay -= world.delta;
if (payoutDelay <= 0) {
payoutStep(payoutStepNr);
}
}
}
@Override
protected void process(Entity e) {
}
/** Construction started on parts. */
@Subscribe
public void handleConstruction( StartConstructionEvent event )
{
switch ( event.type ) {
case ENGINE : complete(TutorialSystem.Step.PLACE_ENGINE);
case STORAGEPOD : complete(TutorialSystem.Step.PLACE_STORAGEPOD);
}
}
/** Constructable selected in UI. */
@Subscribe
public void handleConstructableSelection ( SelectConstructionEvent event )
{
switch ( event.type ) {
case ENGINE : complete(TutorialSystem.Step.SELECT_ENGINE);
case STORAGEPOD : complete(TutorialSystem.Step.SELECT_STORAGEPOD);
}
}
public static enum Step {
SELECT_ENGINE,
PLACE_ENGINE,
SELECT_STORAGEPOD,
PLACE_STORAGEPOD,
FINISHED;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.ipojo.handler.temporal;
import java.lang.reflect.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.felix.ipojo.FieldInterceptor;
import org.apache.felix.ipojo.MethodInterceptor;
import org.apache.felix.ipojo.Nullable;
import org.apache.felix.ipojo.PrimitiveHandler;
import org.apache.felix.ipojo.handler.temporal.ServiceUsage.Usage;
import org.apache.felix.ipojo.handlers.dependency.NullableObject;
import org.apache.felix.ipojo.util.DependencyModel;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Filter;
import org.osgi.framework.ServiceReference;
/**
* Temporal dependency. A temporal dependency waits (block) for the availability
* of the service. If no provider arrives in the specified among of time, a
* runtime exception is thrown.
* @author <a href="mailto:dev@felix.apache.org">Felix Project Team</a>
*/
public class TemporalDependency extends DependencyModel implements
FieldInterceptor, MethodInterceptor {
/**
* The timeout.
*/
private long m_timeout;
/**
* The default implementation.
*/
private String m_di;
/**
* The {@link Nullable} object or Default-Implementation instance if used.
*/
private Object m_nullableObject;
/**
* The handler managing this dependency.
*/
private PrimitiveHandler m_handler;
/**
* The timeout policy. Null injects null, {@link Nullable} injects a nullable object or
* an array with a nullable object, Default-Implementation injects an object
* created from the specified injected implementation or an array with it
* Empty array inject an empty array (must be an aggregate dependency) No
* policy (0) throw a runtime exception when the timeout occurs *
*/
private int m_policy;
/**
* The dependency is injected as a collection.
* The field must be of the {@link Collection} type
*/
private boolean m_collection;
/**
* Enables the proxy mode.
*/
private boolean m_proxy;
/**
* Service Usage (Thread Local).
*/
private ServiceUsage m_usage;
/**
* The proxy object.
* This field is used for scalar proxied temporal dependency.
*/
private Object m_proxyObject;
/**
* Creates a temporal dependency.
* @param spec the service specification
* @param agg is the dependency aggregate ?
* @param collection the dependency field is a collection
* @param proxy enable the proxy-mode
* @param filter the LDAP filter
* @param context service context
* @param timeout timeout
* @param handler Handler managing this dependency
* @param defaultImpl class used as default-implementation
* @param policy onTimeout policy
*/
public TemporalDependency(Class spec, boolean agg, boolean collection, boolean proxy, Filter filter,
BundleContext context, long timeout, int policy,
String defaultImpl, TemporalHandler handler) {
super(spec, agg, true, filter, null,
DependencyModel.DYNAMIC_BINDING_POLICY, context, handler, handler.getInstanceManager());
m_di = defaultImpl;
m_policy = policy;
m_timeout = timeout;
m_handler = handler;
m_collection = collection;
m_proxy = proxy;
if (! proxy) { // No proxy => initialize the Thread local.
m_usage = new ServiceUsage();
} else if (proxy && ! agg) { // Scalar proxy => Create the proxy.
ProxyFactory proxyFactory = new ProxyFactory(this.getClass().getClassLoader());
m_proxyObject = proxyFactory.getProxy(getSpecification(), this);
}
}
/**
* The dependency has been reconfigured.
* @param arg0 new service references
* @param arg1 old service references
* @see org.apache.felix.ipojo.util.DependencyModel#onDependencyReconfiguration(org.osgi.framework.ServiceReference[],
* org.osgi.framework.ServiceReference[])
*/
public void onDependencyReconfiguration(ServiceReference[] arg0,
ServiceReference[] arg1) {
throw new UnsupportedOperationException(
"Reconfiguration not yet supported");
}
/**
* A provider arrives.
* @param ref service reference of the new provider.
* @see org.apache.felix.ipojo.util.DependencyModel#onServiceArrival(org.osgi.framework.ServiceReference)
*/
public synchronized void onServiceArrival(ServiceReference ref) {
// Notify if a thread is waiting.
notifyAll();
}
/**
* A provider leaves.
* @param arg0 leaving service references.
* @see org.apache.felix.ipojo.util.DependencyModel#onServiceDeparture(org.osgi.framework.ServiceReference)
*/
public void onServiceDeparture(ServiceReference arg0) { }
/**
* A provider is modified.
* @param arg0 leaving service references.
* @see org.apache.felix.ipojo.util.DependencyModel#onServiceDeparture(org.osgi.framework.ServiceReference)
*/
public void onServiceModification(ServiceReference arg0) { }
/**
* The code require a value of the monitored field. If providers are
* available, the method return service object(s) immediately. Else, the
* thread is blocked until an arrival. If no provider arrives during the
* among of time specified, the method throws a Runtime Exception.
* @param arg0 POJO instance asking for the service
* @param arg1 field name
* @param arg2 previous value
* @return the object to inject.
* @see org.apache.felix.ipojo.FieldInterceptor#onGet(java.lang.Object, java.lang.String, java.lang.Object)
*/
public synchronized Object onGet(Object arg0, String arg1, Object arg2) {
// Check if the Thread local as a value
if (! m_proxy) {
Usage usage = (Usage) m_usage.get();
if (usage.m_stack > 0) {
return usage.m_object;
}
}
ServiceReference[] refs = getServiceReferences();
if (refs != null) {
// Immediate return.
return getServiceObjects(refs);
} else {
// Begin to wait ...
long enter = System.currentTimeMillis();
boolean exhausted = false;
synchronized (this) {
while (getServiceReference() == null && !exhausted) {
try {
wait(1);
} catch (InterruptedException e) {
// We was interrupted ....
} finally {
long end = System.currentTimeMillis();
exhausted = (end - enter) > m_timeout;
}
}
}
// Check
if (exhausted) {
return onTimeout();
} else {
refs = getServiceReferences();
return getServiceObjects(refs);
}
}
}
/**
* A POJO method will be invoked.
* @param pojo : Pojo object
* @param method : called method
* @param args : arguments
* @see org.apache.felix.ipojo.MethodInterceptor#onEntry(java.lang.Object, java.lang.reflect.Member, java.lang.Object[])
*/
public void onEntry(Object pojo, Member method, Object[] args) {
if (m_usage != null) {
Usage usage = (Usage) m_usage.get();
if (usage.m_stack > 0) {
usage.inc();
m_usage.set(usage); // Set the Thread local as value has been modified
}
}
}
/**
* A POJO method has thrown an error.
* This method does nothing and wait for the finally.
* @param pojo : POJO object.
* @param method : Method object.
* @param throwable : thrown error
* @see org.apache.felix.ipojo.MethodInterceptor#onError(java.lang.Object, java.lang.reflect.Member, java.lang.Throwable)
*/
public void onError(Object pojo, Member method, Throwable throwable) {
// Nothing to do : wait onFinally
}
/**
* A POJO method has returned.
* @param pojo : POJO object.
* @param member : Method object.
* @param returnedObj : returned object (null for void method)
* @see org.apache.felix.ipojo.MethodInterceptor#onExit(java.lang.Object, java.lang.reflect.Member, java.lang.Object)
*/
public void onExit(Object pojo, Member member, Object returnedObj) {
// Nothing to do : wait onFinally
}
/**
* A POJO method is finished.
* @param pojo : POJO object.
* @param method : Method object.
* @see org.apache.felix.ipojo.MethodInterceptor#onFinally(java.lang.Object, java.lang.reflect.Member)
*/
public void onFinally(Object pojo, Member method) {
if (m_usage != null) {
Usage usage = (Usage) m_usage.get();
if (usage.m_stack > 0) {
if (usage.dec()) {
// Exit the method flow => Release all objects
usage.clear();
m_usage.set(usage); // Set the Thread local as value has been modified
}
}
}
}
/**
* Creates and returns object to inject in the dependency.
* This method handles aggregate, collection and proxy cases.
* @param refs the available service references
* @return the object to inject. Can be a 'simple' object, a proxy,
* a collection or an array.
*/
private Object getServiceObjects(ServiceReference [] refs) {
if (m_proxy) {
if (m_proxyObject == null) { // Not aggregate.
return new ServiceCollection(this);
} else {
return m_proxyObject;
}
} else {
// Initialize the thread local object is not already touched.
Usage usage = (Usage) m_usage.get();
if (usage.m_stack == 0) { // uninitialized usage.
if (isAggregate()) {
if (m_collection) {
Collection svc = new ArrayList(refs.length); // Use an array list as collection implementation.
for (int i = 0; i < refs.length; i++) {
svc.add(getService(refs[i]));
}
usage.m_object = svc;
} else {
Object[] svc = (Object[]) Array.newInstance(getSpecification(),
refs.length);
for (int i = 0; i < svc.length; i++) {
svc[i] = getService(refs[i]);
}
usage.m_object = svc;
}
} else {
usage.m_object = getService(refs[0]);
}
usage.inc(); // Start the caching, so set the stack level to 1
m_usage.set(usage);
}
return usage.m_object;
}
}
/**
* Called by the proxy to get a service object to delegate a method.
* This methods manages the waited time and on timeout policies.
* @return a service object or a nullable/default-implmentation object.
*/
public Object getService() {
ServiceReference ref = getServiceReference();
if (ref != null) {
return getService(ref); // Return immediately the service object.
} else {
// Begin to wait ...
long enter = System.currentTimeMillis();
boolean exhausted = false;
synchronized (this) {
while (ref == null && !exhausted) {
try {
wait(1);
} catch (InterruptedException e) {
// We was interrupted ....
} finally {
long end = System.currentTimeMillis();
exhausted = (end - enter) > m_timeout;
ref = getServiceReference();
}
}
}
// Check
if (exhausted) {
Object obj = onTimeout(); // Throw the Runtime Exception
if (obj == null) {
throw new RuntimeException("No service available"); // Runtime Exception to be consistent with iPOJO Core.
} else {
return obj; // Return a nullable or DI
}
} else {
// If not exhausted, ref is not null.
return getService(ref);
}
}
}
/**
* Start method. Initializes the nullable object.
* @see org.apache.felix.ipojo.util.DependencyModel#start()
*/
public void start() {
super.start();
switch (m_policy) {
case TemporalHandler.NULL:
m_nullableObject = null;
break;
case TemporalHandler.NULLABLE:
// To load the proxy we use the POJO class loader. Indeed, this
// classloader imports iPOJO (so can access to Nullable) and has
// access to the service specification.
try {
m_nullableObject = Proxy.newProxyInstance(m_handler
.getInstanceManager().getClazz().getClassLoader(),
new Class[] { getSpecification(), Nullable.class },
new NullableObject()); // NOPMD
if (isAggregate()) {
if (m_collection) {
List list = new ArrayList(1);
list.add(m_nullableObject);
m_nullableObject = list;
} else {
Object[] array = (Object[]) Array.newInstance(
getSpecification(), 1);
array[0] = m_nullableObject;
m_nullableObject = array;
}
}
} catch (NoClassDefFoundError e) {
// A NoClassDefFoundError is thrown if the specification
// uses a
// class not accessible by the actual instance.
// It generally comes from a missing import.
throw new IllegalStateException(
"Cannot create the Nullable object, a referenced class cannot be loaded: "
+ e.getMessage());
}
break;
case TemporalHandler.DEFAULT_IMPLEMENTATION:
// Create the default-implementation object.
try {
Class clazz = m_handler.getInstanceManager().getContext()
.getBundle().loadClass(m_di);
m_nullableObject = clazz.newInstance();
} catch (IllegalAccessException e) {
throw new IllegalStateException(
"Cannot load the default-implementation " + m_di
+ " : " + e.getMessage());
} catch (InstantiationException e) {
throw new IllegalStateException(
"Cannot load the default-implementation " + m_di
+ " : " + e.getMessage());
} catch (ClassNotFoundException e) {
throw new IllegalStateException(
"Cannot load the default-implementation " + m_di
+ " : " + e.getMessage());
}
if (isAggregate()) {
if (m_collection) {
List list = new ArrayList(1);
list.add(m_nullableObject);
m_nullableObject = list;
} else {
Object[] array = (Object[]) Array.newInstance(
getSpecification(), 1);
array[0] = m_nullableObject;
m_nullableObject = array;
}
}
break;
case TemporalHandler.EMPTY:
if (! m_collection) {
m_nullableObject = Array.newInstance(getSpecification(), 0);
} else { // Empty collection
m_nullableObject = new ArrayList(0);
}
break;
default: // Cannot occurs
break;
}
}
/**
* Stop method. Just releases the reference on the nullable object.
* @see org.apache.felix.ipojo.util.DependencyModel#stop()
*/
public void stop() {
super.stop();
m_nullableObject = null;
m_proxyObject = null;
}
/**
* The monitored field receives a value. Nothing to do.
* @param arg0 POJO setting the value.
* @param arg1 field name
* @param arg2 received value
* @see org.apache.felix.ipojo.FieldInterceptor#onSet(java.lang.Object, java.lang.String, java.lang.Object)
*/
public void onSet(Object arg0, String arg1, Object arg2) { }
/**
* Implements the timeout policy according to the specified configuration.
* @return the object to return when the timeout occurs.
*/
Object onTimeout() {
switch (m_policy) {
case TemporalHandler.NULL:
case TemporalHandler.NULLABLE:
case TemporalHandler.DEFAULT_IMPLEMENTATION:
case TemporalHandler.EMPTY:
return m_nullableObject;
default:
// Throws a runtime exception
throw new RuntimeException("Service "
+ getSpecification().getName()
+ " unavailable : timeout");
}
}
long getTimeout() {
return m_timeout;
}
/**
* Creates proxy object for proxied scalar dependencies.
*/
private class ProxyFactory extends ClassLoader {
/**
* Handler classloader, used to load the temporal dependency class.
*/
private ClassLoader m_handlerCL;
/**
* Creates the proxy classloader.
* @param parent the handler classloader.
*/
public ProxyFactory(ClassLoader parent) {
this.m_handlerCL = parent;
}
/**
* Loads a proxy class generated for the given (interface) class.
* @param clazz the service specification to proxy
* @return the Class object of the proxy.
*/
protected Class getProxyClass(Class clazz) {
byte[] clz = ProxyGenerator.dumpProxy(clazz); // Generate the proxy.
return defineClass(clazz.getName() + "$$Proxy", clz, 0, clz.length);
}
/**
* Create a proxy object for the given specification. The proxy
* uses the given temporal dependency to get the service object.
* @param spec the service specification (interface)
* @param dep the temporal dependency used to get the service
* @return the proxy object.
*/
public Object getProxy(Class spec, TemporalDependency dep) {
try {
Class clazz = getProxyClass(getSpecification());
Constructor constructor = clazz.getConstructor(new Class[] {dep.getClass()}); // The proxy constructor
return constructor.newInstance(new Object[] {dep});
} catch (Throwable e) {
m_handler.error("Cannot create the proxy object", e);
m_handler.getInstanceManager().stop();
return null;
}
}
/**
* Loads the given class.
* This class use the classloader of the specification class
* or the handler class loader.
* @param name the class name
* @return the class object
* @throws ClassNotFoundException if the class is not found by the two classloaders.
* @see java.lang.ClassLoader#loadClass(java.lang.String)
*/
public Class loadClass(String name) throws ClassNotFoundException {
try {
return m_handler.getInstanceManager().getContext().getBundle().loadClass(name);
} catch (ClassNotFoundException e) {
return m_handlerCL.loadClass(name);
}
}
}
}
| |
/*
* Copyright 2010-2013 Ning, Inc.
* Copyright 2014-2015 Groupon, Inc
* Copyright 2014-2015 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.beatrix.extbus;
import java.util.UUID;
import javax.annotation.Nullable;
import javax.inject.Inject;
import javax.inject.Named;
import org.killbill.billing.ObjectType;
import org.killbill.billing.callcontext.InternalCallContext;
import org.killbill.billing.entitlement.EntitlementTransitionType;
import org.killbill.billing.entitlement.api.BlockingStateType;
import org.killbill.billing.events.AccountChangeInternalEvent;
import org.killbill.billing.events.AccountCreationInternalEvent;
import org.killbill.billing.events.BlockingTransitionInternalEvent;
import org.killbill.billing.events.BroadcastInternalEvent;
import org.killbill.billing.events.BusInternalEvent;
import org.killbill.billing.events.BusInternalEvent.BusInternalEventType;
import org.killbill.billing.events.ControlTagCreationInternalEvent;
import org.killbill.billing.events.ControlTagDeletionInternalEvent;
import org.killbill.billing.events.CustomFieldCreationEvent;
import org.killbill.billing.events.CustomFieldDeletionEvent;
import org.killbill.billing.events.EntitlementInternalEvent;
import org.killbill.billing.events.InvoiceAdjustmentInternalEvent;
import org.killbill.billing.events.InvoiceCreationInternalEvent;
import org.killbill.billing.events.InvoiceNotificationInternalEvent;
import org.killbill.billing.events.OverdueChangeInternalEvent;
import org.killbill.billing.events.PaymentErrorInternalEvent;
import org.killbill.billing.events.PaymentInfoInternalEvent;
import org.killbill.billing.events.PaymentPluginErrorInternalEvent;
import org.killbill.billing.events.SubscriptionInternalEvent;
import org.killbill.billing.events.TenantConfigChangeInternalEvent;
import org.killbill.billing.events.TenantConfigDeletionInternalEvent;
import org.killbill.billing.events.UserTagCreationInternalEvent;
import org.killbill.billing.events.UserTagDeletionInternalEvent;
import org.killbill.billing.lifecycle.glue.BusModule;
import org.killbill.billing.notification.plugin.api.BroadcastMetadata;
import org.killbill.billing.notification.plugin.api.ExtBusEventType;
import org.killbill.billing.subscription.api.SubscriptionBaseTransitionType;
import org.killbill.billing.util.callcontext.CallOrigin;
import org.killbill.billing.util.callcontext.InternalCallContextFactory;
import org.killbill.billing.util.callcontext.TenantContext;
import org.killbill.billing.util.callcontext.UserType;
import org.killbill.bus.api.BusEvent;
import org.killbill.bus.api.PersistentBus;
import org.killbill.bus.api.PersistentBus.EventBusException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.joda.JodaModule;
import com.google.common.eventbus.AllowConcurrentEvents;
import com.google.common.eventbus.Subscribe;
public class BeatrixListener {
private static final Logger log = LoggerFactory.getLogger(BeatrixListener.class);
private final PersistentBus externalBus;
private final InternalCallContextFactory internalCallContextFactory;
protected final ObjectMapper objectMapper;
@Inject
public BeatrixListener(@Named(BusModule.EXTERNAL_BUS_NAMED) final PersistentBus externalBus,
final InternalCallContextFactory internalCallContextFactory) {
this.externalBus = externalBus;
this.internalCallContextFactory = internalCallContextFactory;
this.objectMapper = new ObjectMapper();
objectMapper.registerModule(new JodaModule());
objectMapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
}
@AllowConcurrentEvents
@Subscribe
public void handleAllInternalKillbillEvents(final BusInternalEvent event) {
final InternalCallContext internalContext = internalCallContextFactory.createInternalCallContext(event.getSearchKey2(), event.getSearchKey1(), "BeatrixListener", CallOrigin.INTERNAL, UserType.SYSTEM, event.getUserToken());
try {
final BusEvent externalEvent = computeExtBusEventEntryFromBusInternalEvent(event, internalContext);
if (externalEvent != null) {
externalBus.post(externalEvent);
}
} catch (final EventBusException e) {
log.warn("Failed to dispatch external bus events", e);
} catch (JsonProcessingException e) {
log.warn("Failed to dispatch external bus events", e);
}
}
private BusEvent computeExtBusEventEntryFromBusInternalEvent(final BusInternalEvent event, final InternalCallContext context) throws JsonProcessingException {
ObjectType objectType = null;
UUID objectId = null;
ExtBusEventType eventBusType = null;
String metaData = null;
UUID accountId = null;
switch (event.getBusEventType()) {
case ACCOUNT_CREATE:
final AccountCreationInternalEvent realEventACR = (AccountCreationInternalEvent) event;
objectType = ObjectType.ACCOUNT;
objectId = realEventACR.getId();
eventBusType = ExtBusEventType.ACCOUNT_CREATION;
break;
case ACCOUNT_CHANGE:
final AccountChangeInternalEvent realEventACH = (AccountChangeInternalEvent) event;
objectType = ObjectType.ACCOUNT;
objectId = realEventACH.getAccountId();
eventBusType = ExtBusEventType.ACCOUNT_CHANGE;
break;
case SUBSCRIPTION_TRANSITION:
final SubscriptionInternalEvent realEventST = (SubscriptionInternalEvent) event;
objectType = ObjectType.SUBSCRIPTION;
objectId = realEventST.getSubscriptionId();
if (realEventST.getTransitionType() == SubscriptionBaseTransitionType.CREATE ||
realEventST.getTransitionType() == SubscriptionBaseTransitionType.RE_CREATE ||
realEventST.getTransitionType() == SubscriptionBaseTransitionType.TRANSFER ||
realEventST.getTransitionType() == SubscriptionBaseTransitionType.MIGRATE_ENTITLEMENT) {
eventBusType = ExtBusEventType.SUBSCRIPTION_CREATION;
} else if (realEventST.getTransitionType() == SubscriptionBaseTransitionType.CANCEL) {
eventBusType = ExtBusEventType.SUBSCRIPTION_CANCEL;
} else if (realEventST.getTransitionType() == SubscriptionBaseTransitionType.PHASE) {
eventBusType = ExtBusEventType.SUBSCRIPTION_PHASE;
} else if (realEventST.getTransitionType() == SubscriptionBaseTransitionType.CHANGE) {
eventBusType = ExtBusEventType.SUBSCRIPTION_CHANGE;
} else if (realEventST.getTransitionType() == SubscriptionBaseTransitionType.UNCANCEL) {
eventBusType = ExtBusEventType.SUBSCRIPTION_UNCANCEL;
}
break;
case BLOCKING_STATE:
final BlockingTransitionInternalEvent realEventBS = (BlockingTransitionInternalEvent) event;
if (realEventBS.getBlockingType() == BlockingStateType.ACCOUNT) {
objectType = ObjectType.ACCOUNT;
} else if (realEventBS.getBlockingType() == BlockingStateType.SUBSCRIPTION_BUNDLE) {
objectType = ObjectType.BUNDLE;
} else if (realEventBS.getBlockingType() == BlockingStateType.SUBSCRIPTION) {
objectType = ObjectType.SUBSCRIPTION;
}
objectId = realEventBS.getBlockableId();
// Probably we should serialize the isTransitionedTo* from BlockingTransitionInternalEvent into the metdata section
break;
case ENTITLEMENT_TRANSITION:
final EntitlementInternalEvent realEventET = (EntitlementInternalEvent) event;
objectType = ObjectType.BUNDLE;
objectId = realEventET.getBundleId();
if (realEventET.getTransitionType() == EntitlementTransitionType.BLOCK_BUNDLE) {
eventBusType = ExtBusEventType.BUNDLE_PAUSE;
} else if (realEventET.getTransitionType() == EntitlementTransitionType.UNBLOCK_BUNDLE) {
eventBusType = ExtBusEventType.BUNDLE_RESUME;
}
break;
case INVOICE_CREATION:
final InvoiceCreationInternalEvent realEventInv = (InvoiceCreationInternalEvent) event;
objectType = ObjectType.INVOICE;
objectId = realEventInv.getInvoiceId();
eventBusType = ExtBusEventType.INVOICE_CREATION;
break;
case INVOICE_NOTIFICATION:
final InvoiceNotificationInternalEvent realEventInvNotification = (InvoiceNotificationInternalEvent) event;
objectType = ObjectType.INVOICE;
objectId = null;
accountId = realEventInvNotification.getAccountId(); // has to be set here because objectId is null with a dryRun Invoice
eventBusType = ExtBusEventType.INVOICE_NOTIFICATION;
break;
case INVOICE_ADJUSTMENT:
final InvoiceAdjustmentInternalEvent realEventInvAdj = (InvoiceAdjustmentInternalEvent) event;
objectType = ObjectType.INVOICE;
objectId = realEventInvAdj.getInvoiceId();
eventBusType = ExtBusEventType.INVOICE_ADJUSTMENT;
break;
case PAYMENT_INFO:
final PaymentInfoInternalEvent realEventPay = (PaymentInfoInternalEvent) event;
objectType = ObjectType.PAYMENT;
objectId = realEventPay.getPaymentId();
eventBusType = ExtBusEventType.PAYMENT_SUCCESS;
break;
case PAYMENT_ERROR:
final PaymentErrorInternalEvent realEventPayErr = (PaymentErrorInternalEvent) event;
objectType = ObjectType.PAYMENT;
objectId = realEventPayErr.getPaymentId();
eventBusType = ExtBusEventType.PAYMENT_FAILED;
accountId = realEventPayErr.getAccountId();
break;
case PAYMENT_PLUGIN_ERROR:
final PaymentPluginErrorInternalEvent realEventPayPluginErr = (PaymentPluginErrorInternalEvent) event;
objectType = ObjectType.PAYMENT;
objectId = realEventPayPluginErr.getPaymentId();
eventBusType = ExtBusEventType.PAYMENT_FAILED;
break;
case OVERDUE_CHANGE:
final OverdueChangeInternalEvent realEventOC = (OverdueChangeInternalEvent) event;
objectType = ObjectType.ACCOUNT;
objectId = realEventOC.getOverdueObjectId();
eventBusType = ExtBusEventType.OVERDUE_CHANGE;
break;
case USER_TAG_CREATION:
final UserTagCreationInternalEvent realUserTagEventCr = (UserTagCreationInternalEvent) event;
objectType = ObjectType.TAG;
objectId = realUserTagEventCr.getTagId();
eventBusType = ExtBusEventType.TAG_CREATION;
break;
case CONTROL_TAG_CREATION:
final ControlTagCreationInternalEvent realTagEventCr = (ControlTagCreationInternalEvent) event;
objectType = ObjectType.TAG;
objectId = realTagEventCr.getTagId();
eventBusType = ExtBusEventType.TAG_CREATION;
break;
case USER_TAG_DELETION:
final UserTagDeletionInternalEvent realUserTagEventDel = (UserTagDeletionInternalEvent) event;
objectType = ObjectType.TAG;
objectId = realUserTagEventDel.getTagId();
eventBusType = ExtBusEventType.TAG_DELETION;
break;
case CONTROL_TAG_DELETION:
final ControlTagDeletionInternalEvent realTagEventDel = (ControlTagDeletionInternalEvent) event;
objectType = ObjectType.TAG;
objectId = realTagEventDel.getTagId();
eventBusType = ExtBusEventType.TAG_DELETION;
break;
case CUSTOM_FIELD_CREATION:
final CustomFieldCreationEvent realCustomFieldEventCr = (CustomFieldCreationEvent) event;
objectType = ObjectType.CUSTOM_FIELD;
objectId = realCustomFieldEventCr.getCustomFieldId();
eventBusType = ExtBusEventType.CUSTOM_FIELD_CREATION;
break;
case CUSTOM_FIELD_DELETION:
final CustomFieldDeletionEvent realCustomFieldEventDel = (CustomFieldDeletionEvent) event;
objectType = ObjectType.CUSTOM_FIELD;
objectId = realCustomFieldEventDel.getCustomFieldId();
eventBusType = ExtBusEventType.CUSTOM_FIELD_DELETION;
break;
case TENANT_CONFIG_CHANGE:
final TenantConfigChangeInternalEvent realTenantConfigEventChg = (TenantConfigChangeInternalEvent) event;
objectType = ObjectType.TENANT_KVS;
objectId = realTenantConfigEventChg.getId();
eventBusType = ExtBusEventType.TENANT_CONFIG_CHANGE;
metaData = realTenantConfigEventChg.getKey();
break;
case TENANT_CONFIG_DELETION:
final TenantConfigDeletionInternalEvent realTenantConfigEventDel = (TenantConfigDeletionInternalEvent) event;
objectType = ObjectType.TENANT_KVS;
objectId = null;
eventBusType = ExtBusEventType.TENANT_CONFIG_DELETION;
metaData = realTenantConfigEventDel.getKey();
break;
case BROADCAST_SERVICE:
final BroadcastInternalEvent realBroadcastEvent = (BroadcastInternalEvent) event;
objectType = ObjectType.SERVICE_BROADCAST;
objectId = null;
eventBusType = ExtBusEventType.BROADCAST_SERVICE;
final BroadcastMetadata metaDataObj = new BroadcastMetadata(realBroadcastEvent.getServiceName(), realBroadcastEvent.getType(), realBroadcastEvent.getJsonEvent());
metaData = objectMapper.writeValueAsString(metaDataObj);
break;
default:
}
final TenantContext tenantContext = internalCallContextFactory.createTenantContext(context);
// See #275
accountId = (accountId == null) ?
getAccountId(event.getBusEventType(), objectId, objectType, tenantContext) :
accountId;
return eventBusType != null ?
new DefaultBusExternalEvent(objectId, objectType, eventBusType, accountId, tenantContext.getTenantId(), metaData, context.getAccountRecordId(), context.getTenantRecordId(), context.getUserToken()) :
null;
}
private UUID getAccountId(final BusInternalEventType eventType, @Nullable final UUID objectId, final ObjectType objectType, final TenantContext context) {
// accountRecord_id is not set for ACCOUNT_CREATE event as we are in the transaction and value is known yet
if (eventType == BusInternalEventType.ACCOUNT_CREATE) {
return objectId;
} else if (eventType == BusInternalEventType.TENANT_CONFIG_CHANGE || eventType == BusInternalEventType.TENANT_CONFIG_DELETION) {
return null;
} else if (objectId == null) {
return null;
} else {
return internalCallContextFactory.getAccountId(objectId, objectType, context);
}
}
}
| |
/*
* Copyright 2015 Ben Manes. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.benmanes.caffeine.cache;
import static com.github.benmanes.caffeine.cache.Specifications.ACCESS_ORDER_DEQUE;
import static com.github.benmanes.caffeine.cache.Specifications.BUILDER_PARAM;
import static com.github.benmanes.caffeine.cache.Specifications.CACHE_LOADER;
import static com.github.benmanes.caffeine.cache.Specifications.CACHE_LOADER_PARAM;
import static com.github.benmanes.caffeine.cache.Specifications.REMOVAL_LISTENER;
import static com.github.benmanes.caffeine.cache.Specifications.STATS_COUNTER;
import static com.github.benmanes.caffeine.cache.Specifications.TICKER;
import static com.github.benmanes.caffeine.cache.Specifications.UNSAFE_ACCESS;
import static com.github.benmanes.caffeine.cache.Specifications.WRITE_ORDER_DEQUE;
import static com.github.benmanes.caffeine.cache.Specifications.WRITE_QUEUE;
import static com.github.benmanes.caffeine.cache.Specifications.kRefQueueType;
import static com.github.benmanes.caffeine.cache.Specifications.kTypeVar;
import static com.github.benmanes.caffeine.cache.Specifications.newFieldOffset;
import static com.github.benmanes.caffeine.cache.Specifications.offsetName;
import static com.github.benmanes.caffeine.cache.Specifications.vRefQueueType;
import static com.github.benmanes.caffeine.cache.Specifications.vTypeVar;
import java.util.Set;
import java.util.concurrent.Executor;
import javax.lang.model.element.Modifier;
import com.squareup.javapoet.FieldSpec;
import com.squareup.javapoet.MethodSpec;
import com.squareup.javapoet.TypeName;
import com.squareup.javapoet.TypeSpec;
/**
* Generates a cache implementation.
*
* @author ben.manes@gmail.com (Ben Manes)
*/
public final class LocalCacheGenerator {
private final Modifier[] publicFinalModifiers = { Modifier.PUBLIC, Modifier.FINAL };
private final Modifier[] protectedFinalModifiers = { Modifier.PROTECTED, Modifier.FINAL };
private final Modifier[] privateFinalModifiers = { Modifier.PRIVATE, Modifier.FINAL };
private final Modifier[] privateVolatileModifiers = { Modifier.PRIVATE, Modifier.VOLATILE };
private final String className;
private final TypeSpec.Builder cache;
private final MethodSpec.Builder constructor;
private final Set<Feature> parentFeatures;
private final Set<Feature> generateFeatures;
LocalCacheGenerator(TypeName superClass, String className, boolean isFinal,
Set<Feature> parentFeatures, Set<Feature> generateFeatures) {
this.className = className;
this.parentFeatures = parentFeatures;
this.generateFeatures = generateFeatures;
this.cache = TypeSpec.classBuilder(className)
.superclass(superClass)
.addModifiers(Modifier.STATIC);
this.constructor = MethodSpec.constructorBuilder();
if (isFinal) {
cache.addModifiers(Modifier.FINAL);
}
}
public TypeSpec generate() {
cache
.addTypeVariable(kTypeVar)
.addTypeVariable(vTypeVar);
constructor
.addParameter(BUILDER_PARAM)
.addParameter(CACHE_LOADER_PARAM)
.addParameter(boolean.class, "async")
.addStatement(parentFeatures.isEmpty()
? "super(builder, async)"
: "super(builder, cacheLoader, async)");
addKeyStrength();
addValueStrength();
addCacheLoader();
addRemovalListener();
addExecutor();
addStats();
addTicker();
addMaximum();
addAccessOrderDeque();
addExpireAfterAccess();
addExpireAfterWrite();
addRefreshAfterWrite();
addWriteOrderDeque();
addWriteQueue();
return cache.addMethod(constructor.build()).build();
}
private void addKeyStrength() {
if (generateFeatures.contains(Feature.WEAK_KEYS)) {
addStrength("collectKeys", "keyReferenceQueue", kRefQueueType);
}
}
private void addValueStrength() {
if (generateFeatures.contains(Feature.INFIRM_VALUES)) {
addStrength("collectValues", "valueReferenceQueue", vRefQueueType);
}
}
private void addRemovalListener() {
if (!generateFeatures.contains(Feature.LISTENING)) {
return;
}
cache.addField(
FieldSpec.builder(REMOVAL_LISTENER, "removalListener", privateFinalModifiers).build());
constructor.addStatement("this.removalListener = builder.getRemovalListener(async)");
cache.addMethod(MethodSpec.methodBuilder("removalListener")
.addModifiers(publicFinalModifiers)
.addStatement("return removalListener")
.returns(REMOVAL_LISTENER)
.build());
cache.addMethod(MethodSpec.methodBuilder("hasRemovalListener")
.addModifiers(protectedFinalModifiers)
.addStatement("return true")
.returns(boolean.class)
.build());
}
private void addExecutor() {
if (!generateFeatures.contains(Feature.EXECUTOR)) {
return;
}
cache.addField(FieldSpec.builder(Executor.class, "executor", privateFinalModifiers).build());
constructor.addStatement("this.executor = builder.getExecutor()");
cache.addMethod(MethodSpec.methodBuilder("executor")
.addModifiers(publicFinalModifiers)
.addStatement("return executor")
.returns(Executor.class)
.build());
}
private void addCacheLoader() {
if (!generateFeatures.contains(Feature.LOADING)) {
return;
}
constructor.addStatement("this.cacheLoader = cacheLoader");
cache.addField(FieldSpec.builder(CACHE_LOADER, "cacheLoader", privateFinalModifiers).build());
cache.addMethod(MethodSpec.methodBuilder("cacheLoader")
.addModifiers(protectedFinalModifiers)
.addStatement("return cacheLoader")
.returns(CACHE_LOADER)
.build());
}
private void addStats() {
if (!generateFeatures.contains(Feature.STATS)) {
return;
}
constructor.addStatement("this.statsCounter = builder.getStatsCounterSupplier().get()");
cache.addField(FieldSpec.builder(STATS_COUNTER, "statsCounter", privateFinalModifiers).build());
cache.addMethod(MethodSpec.methodBuilder("statsCounter")
.addModifiers(publicFinalModifiers)
.addStatement("return statsCounter")
.returns(STATS_COUNTER)
.build());
cache.addMethod(MethodSpec.methodBuilder("isRecordingStats")
.addModifiers(publicFinalModifiers)
.addStatement("return true")
.returns(boolean.class)
.build());
}
private void addTicker() {
if (Feature.usesTicker(parentFeatures) || !Feature.usesTicker(generateFeatures)) {
return;
}
constructor.addStatement("this.ticker = builder.getTicker()");
cache.addField(FieldSpec.builder(TICKER, "ticker", privateFinalModifiers).build());
cache.addMethod(MethodSpec.methodBuilder("ticker")
.addModifiers(publicFinalModifiers)
.addStatement("return ticker")
.returns(TICKER)
.build());
}
private void addMaximum() {
if (Feature.usesMaximum(parentFeatures) || !Feature.usesMaximum(generateFeatures)) {
return;
}
cache.addMethod(MethodSpec.methodBuilder("evicts")
.addModifiers(protectedFinalModifiers)
.addStatement("return true")
.returns(boolean.class)
.build());
constructor.addStatement(
"this.maximum = $T.min(builder.getMaximumWeight(), MAXIMUM_CAPACITY)", Math.class);
cache.addField(FieldSpec.builder(long.class, "maximum", privateVolatileModifiers).build());
cache.addField(newFieldOffset(className, "maximum"));
cache.addMethod(MethodSpec.methodBuilder("maximum")
.addModifiers(protectedFinalModifiers)
.addStatement("return $T.UNSAFE.getLong(this, $N)", UNSAFE_ACCESS, offsetName("maximum"))
.returns(long.class)
.build());
cache.addMethod(MethodSpec.methodBuilder("lazySetMaximum")
.addModifiers(protectedFinalModifiers)
.addStatement("$T.UNSAFE.putOrderedLong(this, $N, $N)",
UNSAFE_ACCESS, offsetName("maximum"), "maximum")
.addParameter(long.class, "maximum")
.build());
cache.addField(FieldSpec.builder(long.class, "weightedSize", privateVolatileModifiers).build());
cache.addField(newFieldOffset(className, "weightedSize"));
cache.addMethod(MethodSpec.methodBuilder("weightedSize")
.addModifiers(protectedFinalModifiers)
.addStatement("return $T.UNSAFE.getLong(this, $N)",
UNSAFE_ACCESS, offsetName("weightedSize"))
.returns(long.class)
.build());
cache.addMethod(MethodSpec.methodBuilder("lazySetWeightedSize")
.addModifiers(protectedFinalModifiers)
.addStatement("$T.UNSAFE.putOrderedLong(this, $N, $N)",
UNSAFE_ACCESS, offsetName("weightedSize"), "weightedSize")
.addParameter(long.class, "weightedSize")
.build());
}
private void addExpireAfterAccess() {
if (!generateFeatures.contains(Feature.EXPIRE_ACCESS)) {
return;
}
constructor.addStatement("this.expiresAfterAccessNanos = builder.getExpiresAfterAccessNanos()");
cache.addField(FieldSpec.builder(long.class, "expiresAfterAccessNanos",
privateVolatileModifiers).build());
cache.addMethod(MethodSpec.methodBuilder("expiresAfterAccess")
.addModifiers(protectedFinalModifiers)
.addStatement("return true")
.returns(boolean.class)
.build());
cache.addMethod(MethodSpec.methodBuilder("expiresAfterAccessNanos")
.addModifiers(protectedFinalModifiers)
.addStatement("return expiresAfterAccessNanos")
.returns(long.class)
.build());
cache.addMethod(MethodSpec.methodBuilder("setExpiresAfterAccessNanos")
.addStatement("this.expiresAfterAccessNanos = expiresAfterAccessNanos")
.addParameter(long.class, "expiresAfterAccessNanos")
.addModifiers(protectedFinalModifiers)
.build());
}
private void addExpireAfterWrite() {
if (!generateFeatures.contains(Feature.EXPIRE_WRITE)) {
return;
}
constructor.addStatement("this.expiresAfterWriteNanos = builder.getExpiresAfterWriteNanos()");
cache.addField(FieldSpec.builder(long.class, "expiresAfterWriteNanos",
privateVolatileModifiers).build());
cache.addMethod(MethodSpec.methodBuilder("expiresAfterWrite")
.addModifiers(protectedFinalModifiers)
.addStatement("return true")
.returns(boolean.class)
.build());
cache.addMethod(MethodSpec.methodBuilder("expiresAfterWriteNanos")
.addModifiers(protectedFinalModifiers)
.addStatement("return expiresAfterWriteNanos")
.returns(long.class)
.build());
cache.addMethod(MethodSpec.methodBuilder("setExpiresAfterWriteNanos")
.addStatement("this.expiresAfterWriteNanos = expiresAfterWriteNanos")
.addParameter(long.class, "expiresAfterWriteNanos")
.addModifiers(protectedFinalModifiers)
.build());
}
private void addRefreshAfterWrite() {
if (!generateFeatures.contains(Feature.REFRESH_WRITE)) {
return;
}
constructor.addStatement("this.refreshAfterWriteNanos = builder.getRefreshAfterWriteNanos()");
cache.addField(FieldSpec.builder(long.class, "refreshAfterWriteNanos",
privateVolatileModifiers).build());
cache.addMethod(MethodSpec.methodBuilder("refreshAfterWrite")
.addModifiers(protectedFinalModifiers)
.addStatement("return true")
.returns(boolean.class)
.build());
cache.addMethod(MethodSpec.methodBuilder("refreshAfterWriteNanos")
.addModifiers(protectedFinalModifiers)
.addStatement("return refreshAfterWriteNanos")
.returns(long.class)
.build());
cache.addMethod(MethodSpec.methodBuilder("setRefreshAfterWriteNanos")
.addStatement("this.refreshAfterWriteNanos = refreshAfterWriteNanos")
.addParameter(long.class, "refreshAfterWriteNanos")
.addModifiers(protectedFinalModifiers)
.build());
}
private void addAccessOrderDeque() {
if (Feature.usesAccessOrderDeque(parentFeatures)
|| !Feature.usesAccessOrderDeque(generateFeatures)) {
return;
}
constructor.addStatement("this.accessOrderDeque = new $T()", ACCESS_ORDER_DEQUE);
cache.addField(
FieldSpec.builder(ACCESS_ORDER_DEQUE, "accessOrderDeque", privateFinalModifiers).build());
cache.addMethod(MethodSpec.methodBuilder("accessOrderDeque")
.addModifiers(protectedFinalModifiers)
.addStatement("return accessOrderDeque")
.returns(ACCESS_ORDER_DEQUE)
.build());
}
private void addWriteOrderDeque() {
if (Feature.usesWriteOrderDeque(parentFeatures)
|| !Feature.usesWriteOrderDeque(generateFeatures)) {
return;
}
constructor.addStatement("this.writeOrderDeque = new $T()", WRITE_ORDER_DEQUE);
cache.addField(
FieldSpec.builder(WRITE_ORDER_DEQUE, "writeOrderDeque", privateFinalModifiers).build());
cache.addMethod(MethodSpec.methodBuilder("writeOrderDeque")
.addModifiers(protectedFinalModifiers)
.addStatement("return writeOrderDeque")
.returns(WRITE_ORDER_DEQUE)
.build());
}
private void addWriteQueue() {
if (Feature.usesWriteQueue(parentFeatures)
|| !Feature.usesWriteQueue(generateFeatures)) {
return;
}
constructor.addStatement("this.writeQueue = new $T()", WRITE_QUEUE);
cache.addField(FieldSpec.builder(WRITE_QUEUE, "writeQueue", privateFinalModifiers).build());
cache.addMethod(MethodSpec.methodBuilder("writeQueue")
.addModifiers(protectedFinalModifiers)
.addStatement("return writeQueue")
.returns(WRITE_QUEUE)
.build());
cache.addMethod(MethodSpec.methodBuilder("buffersWrites")
.addModifiers(protectedFinalModifiers)
.addStatement("return true")
.returns(boolean.class)
.build());
}
/** Adds the reference strength methods for the key or value. */
private void addStrength(String collectName, String queueName, TypeName type) {
cache.addMethod(MethodSpec.methodBuilder(queueName)
.addModifiers(protectedFinalModifiers)
.returns(type)
.addStatement("return $N", queueName)
.build());
cache.addField(FieldSpec.builder(type, queueName, privateFinalModifiers)
.initializer("new $T()", type)
.build());
cache.addMethod(MethodSpec.methodBuilder(collectName)
.addModifiers(protectedFinalModifiers)
.addStatement("return true")
.returns(boolean.class)
.build());
}
}
| |
// This file is part of OpenTSDB.
// Copyright (C) 2010-2012 The OpenTSDB Authors.
//
// This program is free software: you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 2.1 of the License, or (at your
// option) any later version. This program is distributed in the hope that it
// will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
// General Public License for more details. You should have received a copy
// of the GNU Lesser General Public License along with this program. If not,
// see <http://www.gnu.org/licenses/>.
package net.opentsdb.stats;
import java.util.Arrays;
/**
* A histogram to keep track of the approximation of a distribution of values.
* <p/>
* This is not a general purpose implementation of histogram. It's
* specifically designed for "small" values (close to 0) as the primary
* use case is latency histograms.
* <p/>
* All values must be positive ({@code >= 0}).
* <p/>
* The histogram is linear (fixed size buckets) up to a given cutoff
* point. Beyond that point, the histogram becomes exponential (each
* bucket is twice as large as the previous one). This gives good
* granularity for lower values while still allowing a rough
* classification for the "long tail" of larger values.
* <p/>
* Note that this implementation doesn't allow you to directly control
* the number of buckets in the histogram. The number will depend on
* the arguments given to the constructor.
* <p/>
* This class is not synchronized.
*/
public final class Histogram {
/**
* Interval between each bucket for the linear part of the histogram.
*/
private final short interval;
/**
* Inclusive value beyond which we switch to exponential buckets.
*/
private final int cutoff;
/**
* How many linear buckets we have.
* Technically we don't need to store this value but we do in order to
* avoid having to re-compute it in the fast path each time we save a
* new value.
*/
private final short num_linear_buckets;
/**
* The power of 2 used by the first exponential bucket.
* Technically we don't need to store this value but we do in order to
* avoid having to re-compute it in the fast path each time we save a
* new value.
*/
private final short exp_bucket_shift;
/**
* Buckets where we actually store the values.
*/
private final int[] buckets;
/**
* Constructor.
*
* @param max The maximum value of the histogram. Any value greater
* than this will be considered to be "infinity".
* @param interval The interval (size) of each linear bucket.
* @param cutoff The value beyond which to switch to exponential
* buckets. The histogram may actually use this value or a value up
* to {@code interval} greater.
* @throws IllegalArgumentException if any of following conditions are
* not met:
* <pre>
* 0 < interval <= max
* 0 <= cutoff <= max
* </pre>
*/
public Histogram(final int max,
final short interval, final int cutoff) {
if (interval > max) {
throw new IllegalArgumentException("interval > max! interval="
+ interval + ", max=" + max);
} else if (cutoff > max) {
throw new IllegalArgumentException("cutoff > max! cutoff="
+ cutoff + ", max=" + max);
} else if (interval < 1) {
throw new IllegalArgumentException("interval < 1! interval=" + interval);
} else if (cutoff < 0) {
throw new IllegalArgumentException("cutoff < 0! interval=" + cutoff);
}
this.interval = interval;
// One linear bucket every `interval' up to `cutoff'.
num_linear_buckets = (short) (cutoff / interval);
this.cutoff = num_linear_buckets * interval;
this.exp_bucket_shift = (short) log2rounddown(interval);
this.buckets = new int[num_linear_buckets
// Find how many exponential buckets we need, starting from the
// first power of 2 that's less than or equal to `interval'.
+ log2roundup((max - cutoff) >> exp_bucket_shift)
// Add an extra overflow bucket at the end.
+ 1];
}
/**
* Computes the logarithm base 2 (rounded up) of an integer.
* <p/>
* This is essentially equivalent to
* {@code Math.ceil(Math.log(n) / Math.log(2))}
* except it's 3 times faster.
*
* @param n A strictly positive integer.
* @return The logarithm base 2. As a special case, if the integer
* given in argument is 0, this function returns 0. If the integer
* given in argument is negative, the return value is undefined.
* @see #log2rounddown
*/
static final int log2roundup(final int n) {
int log2 = 0;
while (n > 1 << log2) {
log2++;
}
return log2;
}
/**
* Computes the logarithm base 2 (rounded down) of an integer.
* <p/>
* This is essentially equivalent to
* {@code Math.floor(Math.log(n) / Math.log(2))}
* except it's 4.5 times faster. This function is also almost 70%
* faster than {@link #log2roundup}.
*
* @param n A strictly positive integer.
* @return The logarithm base 2. As a special case, if the integer
* given in argument is 0, this function returns 0. If the integer
* given in argument is negative, the return value is undefined.
* @see #log2roundup
*/
static final int log2rounddown(int n) {
int log2 = 0;
while (n > 1) {
n >>>= 1;
log2++;
}
return log2;
}
/**
* Returns the number of buckets in this histogram.
*/
public int buckets() {
return buckets.length;
}
/**
* Adds a value to the histogram.
* <p/>
* This method works in {@code O(1)}.
*
* @param value The value to save.
* @throws IllegalArgumentException if the value given is negative.
*/
public void add(final int value) {
if (value < 0) {
throw new IllegalArgumentException("negative value: " + value);
}
buckets[bucketIndexFor(value)]++;
}
/**
* Returns the value of the <i>p</i>th percentile in this histogram.
* <p/>
* This method works in {@code O(N)} where {@code N} is the number of
* {@link #buckets buckets}.
*
* @param p A strictly positive integer in the range {@code [1; 100]}
* @throws IllegalArgumentException if {@code p} is not valid.
*/
public int percentile(int p) {
if (p < 1 || p > 100) {
throw new IllegalArgumentException("invalid percentile: " + p);
}
int count = 0; // Count of values in the histogram.
for (int i = 0; i < buckets.length; i++) {
count += buckets[i];
}
if (count == 0) { // Empty histogram. Need to special-case it, otherwise
return 0; // the `if (count <= p)' below will be erroneously true.
}
// Find the number of elements at or below which the pth percentile is.
p = count * p / 100;
// Now walk the array backwards and decrement the count until it reaches p.
for (int i = buckets.length - 1; i >= 0; i--) {
count -= buckets[i];
if (count <= p) {
return bucketHighInterval(i);
}
}
return 0;
}
/**
* Prints this histogram in a human readable ASCII format.
* <p/>
* This is equivalent to calling {@link #printAsciiBucket} on every
* bucket.
*
* @param out The buffer to which to write the output.
*/
public void printAscii(final StringBuilder out) {
for (int i = 0; i < buckets.length; i++) {
printAsciiBucket(out, i);
}
}
/**
* Prints a bucket of this histogram in a human readable ASCII format.
*
* @param out The buffer to which to write the output.
* @see #printAscii
*/
final void printAsciiBucket(final StringBuilder out, final int i) {
out.append('[')
.append(bucketLowInterval(i))
.append('-')
.append(i == buckets.length - 1 ? "Inf" : bucketHighInterval(i))
.append("): ")
.append(buckets[i])
.append('\n');
}
/**
* Helper for unit tests that returns the value in the given bucket.
*/
final int valueInBucket(final int index) {
return buckets[index];
}
/**
* Finds the index of the bucket in which the given value should be.
*/
private int bucketIndexFor(final int value) {
if (value < cutoff) {
return value / interval;
}
int bucket = num_linear_buckets // Skip all linear buckets.
// And find which bucket the rest (after `cutoff') should be in.
// Reminder: the first exponential bucket ends at 2^exp_bucket_shift.
+ log2rounddown((value - cutoff) >> exp_bucket_shift);
if (bucket >= buckets.length) {
return buckets.length - 1;
}
return bucket;
}
/**
* Returns the low interval (inclusive) of the given bucket.
*/
private int bucketLowInterval(final int index) {
if (index <= num_linear_buckets) {
return index * interval;
} else {
return cutoff + (1 << (index - num_linear_buckets + exp_bucket_shift));
}
}
/**
* Returns the high interval (exclusive) of the given bucket.
*/
private int bucketHighInterval(final int index) {
if (index == buckets.length - 1) {
return Integer.MAX_VALUE;
} else {
return bucketLowInterval(index + 1);
}
}
public String toString() {
return "Histogram(interval=" + interval + ", cutoff=" + cutoff
+ ", num_linear_buckets=" + num_linear_buckets
+ ", exp_bucket_shift=" + exp_bucket_shift
+ ", buckets=" + Arrays.toString(buckets) + ')';
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.axis2.transport.xmpp;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.axiom.om.OMElement;
import org.apache.axis2.AxisFault;
import org.apache.axis2.addressing.EndpointReference;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.axis2.context.MessageContext;
import org.apache.axis2.context.SessionContext;
import org.apache.axis2.description.Parameter;
import org.apache.axis2.description.ParameterIncludeImpl;
import org.apache.axis2.description.TransportInDescription;
import org.apache.axis2.transport.TransportListener;
import org.apache.axis2.transport.xmpp.util.XMPPConnectionFactory;
import org.apache.axis2.transport.xmpp.util.XMPPConstants;
import org.apache.axis2.transport.xmpp.util.XMPPPacketListener;
import org.apache.axis2.transport.xmpp.util.XMPPServerCredentials;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jivesoftware.smack.Roster;
import org.jivesoftware.smack.XMPPConnection;
import org.jivesoftware.smack.Roster.SubscriptionMode;
public class XMPPListener implements TransportListener {
/**
Uncomment this enable XMPP logging, this is useful for testing.
static {
XMPPConnection.DEBUG_ENABLED = true;
}
**/
private static Log log = LogFactory.getLog(XMPPListener.class);
private ConfigurationContext configurationContext = null;
private XMPPServerCredentials serverCredentials;
/**
* A Map containing the connection factories managed by this,
* keyed by userName-at-jabberServerURL
*/
private Map connectionFactories = new HashMap();
private ExecutorService workerPool;
private static final int WORKERS_MAX_THREADS = 5;
private static final long WORKER_KEEP_ALIVE = 60L;
private static final TimeUnit TIME_UNIT = TimeUnit.SECONDS;
private XMPPConnection xmppConnection = null;
public XMPPListener() {
}
/**
* Initializing the XMPPListener. Retrieve connection details provided in
* xmpp transport receiver, connect to those servers & start listening in
* for messages.
*/
public void init(ConfigurationContext configurationCtx, TransportInDescription transportIn)
throws AxisFault {
log.info("Initializing XMPPListener...");
//allow anyone to send message to listening account
Roster.setDefaultSubscriptionMode(SubscriptionMode.accept_all);
configurationContext = configurationCtx;
initializeConnectionFactories(transportIn);
if (connectionFactories.isEmpty()) {
log.warn("No XMPP connection factories defined." +
"Will not listen for any XMPP messages");
return;
}
}
/**
* Extract connection details & connect to those xmpp servers.
* @see init(ConfigurationContext configurationCtx, TransportInDescription transportIn)
* @param configurationContext
* @param transportIn
*/
private void initializeConnectionFactories(TransportInDescription transportIn) throws AxisFault{
Iterator serversToListenOn = transportIn.getParameters().iterator();
while (serversToListenOn.hasNext()) {
Parameter connection = (Parameter) serversToListenOn.next();
log.info("Trying to establish connection for : "+connection.getName());
ParameterIncludeImpl pi = new ParameterIncludeImpl();
try {
pi.deserializeParameters((OMElement) connection.getValue());
} catch (AxisFault axisFault) {
log.error("Error reading parameters");
}
Iterator params = pi.getParameters().iterator();
serverCredentials = new XMPPServerCredentials();
while (params.hasNext()) {
Parameter param = (Parameter) params.next();
if(XMPPConstants.XMPP_SERVER_URL.equals(param.getName())){
serverCredentials.setServerUrl((String)param.getValue());
}else if(XMPPConstants.XMPP_SERVER_USERNAME.equals(param.getName())){
serverCredentials.setAccountName((String)param.getValue());
}else if(XMPPConstants.XMPP_SERVER_PASSWORD.equals(param.getName())){
serverCredentials.setPassword((String)param.getValue());
}else if(XMPPConstants.XMPP_SERVER_TYPE.equals(param.getName())){
serverCredentials.setServerType((String)param.getValue());
}else if(XMPPConstants.XMPP_DOMAIN_NAME.equals(param.getName())){
serverCredentials.setDomainName((String)param.getValue());
}
}
XMPPConnectionFactory xmppConnectionFactory = new XMPPConnectionFactory();
xmppConnectionFactory.connect(serverCredentials);
connectionFactories.put(serverCredentials.getAccountName() + "@"
+ serverCredentials.getServerUrl(), xmppConnectionFactory);
}
}
/**
* Stop XMPP listener & disconnect from all XMPP Servers
*/
public void stop() {
if (workerPool != null && !workerPool.isShutdown()) {
workerPool.shutdown();
}
//TODO : Iterate through all connections in connectionFactories & call disconnect()
}
/**
* Returns Default EPR for a given Service name & IP
* @param serviceName
* @param ip
*/
public EndpointReference getEPRForService(String serviceName, String ip) throws AxisFault {
return getEPRsForService(serviceName, ip)[0];
}
/**
* Returns all EPRs for a given Service name & IP
* @param serviceName
* @param ip
*/
public EndpointReference[] getEPRsForService(String serviceName, String ip) throws AxisFault {
String domainName = serverCredentials.getDomainName() != null? serverCredentials.getDomainName()
: serverCredentials.getServerUrl();
return new EndpointReference[]{new EndpointReference(XMPPConstants.XMPP_PREFIX +
serverCredentials.getAccountName() +"@"+ domainName +"/services/" + serviceName)};
}
public SessionContext getSessionContext(MessageContext messageContext) {
return null;
}
public void destroy() {
if(xmppConnection != null && xmppConnection.isConnected()){
xmppConnection.disconnect();
}
}
/**
* Start a pool of Workers. For each connection in connectionFactories,
* assign a packer listener. This packet listener will trigger when a
* message arrives.
*/
public void start() throws AxisFault {
// create thread pool of workers
ExecutorService workerPool = new ThreadPoolExecutor(
1,
WORKERS_MAX_THREADS, WORKER_KEEP_ALIVE, TIME_UNIT,
new LinkedBlockingQueue(),
new org.apache.axis2.util.threadpool.DefaultThreadFactory(
new ThreadGroup("XMPP Worker thread group"),
"XMPPWorker"));
Iterator iter = connectionFactories.values().iterator();
while (iter.hasNext()) {
XMPPConnectionFactory connectionFactory = (XMPPConnectionFactory) iter.next();
XMPPPacketListener xmppPacketListener =
new XMPPPacketListener(connectionFactory,this.configurationContext,workerPool);
connectionFactory.listen(xmppPacketListener);
}
}
}
| |
package org.jabref.model.entry;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.jabref.model.entry.specialfields.SpecialField;
/**
* Handling of bibtex fields.
* All bibtex-field related stuff should be placed here!
* Because we can export this information into additional
* config files -> simple extension and definition of new fields
*
* TODO:
* - handling of identically fields with different names (https://github.com/JabRef/jabref/issues/521)
* e.g. LCCN = lib-congress, journaltitle = journal
* - group id for each fields, e.g. standard, jurabib, bio, ...
* - add a additional properties functionality into the BibtexSingleField class
*/
public class InternalBibtexFields {
/**
* These are the fields JabRef always displays as default
* {@link org.jabref.preferences.JabRefPreferences#setLanguageDependentDefaultValues()}
*
* A user can change them. The change is currently stored in the preferences only and not explicitley exposed as separte preferences object
*/
public static final List<String> DEFAULT_GENERAL_FIELDS = Arrays.asList(FieldName.CROSSREF, FieldName.KEYWORDS, FieldName.FILE, FieldName.DOI, FieldName.URL, FieldName.COMMENT, FieldName.OWNER, FieldName.TIMESTAMP);
// contains all bibtex-field objects (BibtexSingleField)
private final Map<String, BibtexSingleField> fieldSet;
// the name with the current time stamp field, needed in case we want to change it
private String timeStampField;
// Lists of fields with special properties
private static final List<String> INTEGER_FIELDS = Arrays.asList(FieldName.CTLMAX_NAMES_FORCED_ETAL,
FieldName.CTLNAMES_SHOW_ETAL, FieldName.CTLALT_STRETCH_FACTOR, FieldName.VOLUMES, FieldName.PMID);
private static final List<String> IEEETRANBSTCTL_YES_NO_FIELDS = Arrays.asList(FieldName.CTLUSE_ARTICLE_NUMBER,
FieldName.CTLUSE_PAPER, FieldName.CTLUSE_URL, FieldName.CTLUSE_FORCED_ETAL, FieldName.CTLUSE_ALT_SPACING,
FieldName.CTLDASH_REPEATED_NAMES);
private static final List<String> BIBLATEX_DATE_FIELDS = Arrays.asList(FieldName.DATE, FieldName.EVENTDATE,
FieldName.ORIGDATE, FieldName.URLDATE);
private static final List<String> BIBLATEX_PERSON_NAME_FIELDS = Arrays.asList(FieldName.AUTHOR, FieldName.EDITOR,
FieldName.EDITORA, FieldName.EDITORB, FieldName.EDITORC, FieldName.TRANSLATOR, FieldName.ANNOTATOR,
FieldName.COMMENTATOR, FieldName.INTRODUCTION, FieldName.FOREWORD, FieldName.AFTERWORD,
FieldName.BOOKAUTHOR, FieldName.HOLDER, FieldName.SHORTAUTHOR, FieldName.SHORTEDITOR, FieldName.SORTNAME,
FieldName.NAMEADDON, FieldName.ASSIGNEE);
private static final List<String> BIBLATEX_EDITOR_TYPE_FIELDS = Arrays.asList(FieldName.EDITORTYPE,
FieldName.EDITORATYPE, FieldName.EDITORBTYPE, FieldName.EDITORCTYPE);
private static final List<String> BIBLATEX_PAGINATION_FIELDS = Arrays.asList(FieldName.PAGINATION,
FieldName.BOOKPAGINATION);
private static final List<String> BIBLATEX_JOURNAL_NAME_FIELDS = Arrays.asList(FieldName.JOURNAL,
FieldName.JOURNALTITLE, FieldName.JOURNALSUBTITLE);
private static final List<String> BIBLATEX_BOOK_NAME_FIELDS = Arrays.asList(FieldName.BOOKTITLE,
FieldName.MAINTITLE, FieldName.MAINSUBTITLE, FieldName.MAINTITLEADDON, FieldName.BOOKSUBTITLE,
FieldName.BOOKTITLEADDON);
private static final List<String> BIBLATEX_LANGUAGE_FIELDS = Arrays.asList(FieldName.LANGUAGE,
FieldName.ORIGLANGUAGE);
private static final List<String> BIBLATEX_MULTI_KEY_FIELDS = Arrays.asList(FieldName.RELATED, FieldName.ENTRYSET);
private static final List<String> VERBATIM_FIELDS = Arrays.asList(FieldName.URL, FieldName.FILE,
FieldName.CTLNAME_FORMAT_STRING, FieldName.CTLNAME_LATEX_CMD, FieldName.CTLNAME_URL_PREFIX);
private static final List<String> SPECIAL_FIELDS = Arrays.asList(SpecialField.PRINTED.getFieldName(),
SpecialField.PRIORITY.getFieldName(), SpecialField.QUALITY.getFieldName(),
SpecialField.RANKING.getFieldName(), SpecialField.READ_STATUS.getFieldName(),
SpecialField.RELEVANCE.getFieldName());
// singleton instance
private static InternalBibtexFields RUNTIME = new InternalBibtexFields(FieldName.TIMESTAMP);
private InternalBibtexFields(String timeStampFieldName) {
fieldSet = new HashMap<>();
BibtexSingleField dummy;
// FIRST: all standard fields
// These are the fields that BibTeX might want to treat, so these
// must conform to BibTeX rules.
add(new BibtexSingleField(FieldName.ADDRESS, true, BibtexSingleField.SMALL_W));
// An annotation. It is not used by the standard bibliography styles,
// but may be used by others that produce an annotated bibliography.
// http://www.ecst.csuchico.edu/~jacobsd/bib/formats/bibtex.html
add(new BibtexSingleField(FieldName.ANNOTE, true, BibtexSingleField.LARGE_W));
add(new BibtexSingleField(FieldName.AUTHOR, true, BibtexSingleField.MEDIUM_W, 280));
add(new BibtexSingleField(FieldName.BOOKTITLE, true, 175));
add(new BibtexSingleField(FieldName.CHAPTER, true, BibtexSingleField.SMALL_W));
dummy = new BibtexSingleField(FieldName.CROSSREF, true, BibtexSingleField.LARGE_W);
dummy.setExtras(EnumSet.of(FieldProperty.CROSSREF, FieldProperty.SINGLE_ENTRY_LINK));
add(dummy);
add(new BibtexSingleField(FieldName.EDITION, true, BibtexSingleField.SMALL_W));
add(new BibtexSingleField(FieldName.EDITOR, true, BibtexSingleField.MEDIUM_W, 280));
dummy = new BibtexSingleField(FieldName.EPRINT, true, BibtexSingleField.SMALL_W);
dummy.setExtras(EnumSet.of(FieldProperty.EPRINT));
add(dummy);
add(new BibtexSingleField(FieldName.HOWPUBLISHED, true, BibtexSingleField.MEDIUM_W));
add(new BibtexSingleField(FieldName.INSTITUTION, true, BibtexSingleField.MEDIUM_W));
dummy = new BibtexSingleField(FieldName.ISBN, true, BibtexSingleField.SMALL_W);
dummy.setExtras(EnumSet.of(FieldProperty.ISBN));
add(dummy);
add(new BibtexSingleField(FieldName.ISSN, true, BibtexSingleField.SMALL_W));
dummy = new BibtexSingleField(FieldName.JOURNAL, true, BibtexSingleField.SMALL_W);
dummy.setExtras(EnumSet.of(FieldProperty.JOURNAL_NAME));
add(dummy);
dummy = new BibtexSingleField(FieldName.JOURNALTITLE, true, BibtexSingleField.SMALL_W);
dummy.setExtras(EnumSet.of(FieldProperty.JOURNAL_NAME));
add(dummy);
add(new BibtexSingleField(FieldName.KEY, true));
dummy = new BibtexSingleField(FieldName.MONTH, true, BibtexSingleField.SMALL_W);
dummy.setExtras(EnumSet.of(FieldProperty.MONTH));
add(dummy);
add(new BibtexSingleField(FieldName.NOTE, true, BibtexSingleField.MEDIUM_W));
add(new BibtexSingleField(FieldName.NUMBER, true, BibtexSingleField.SMALL_W, 60).setNumeric(true));
add(new BibtexSingleField(FieldName.ORGANIZATION, true, BibtexSingleField.MEDIUM_W));
add(new BibtexSingleField(FieldName.PAGES, true, BibtexSingleField.SMALL_W));
add(new BibtexSingleField(FieldName.PUBLISHER, true, BibtexSingleField.MEDIUM_W));
add(new BibtexSingleField(FieldName.SCHOOL, true, BibtexSingleField.MEDIUM_W));
add(new BibtexSingleField(FieldName.SERIES, true, BibtexSingleField.SMALL_W));
add(new BibtexSingleField(FieldName.TITLE, true, 400));
dummy = new BibtexSingleField(FieldName.TYPE, true, BibtexSingleField.SMALL_W);
dummy.getFieldProperties().add(FieldProperty.TYPE);
add(dummy);
add(new BibtexSingleField(FieldName.LANGUAGE, true, BibtexSingleField.SMALL_W));
add(new BibtexSingleField(FieldName.VOLUME, true, BibtexSingleField.SMALL_W, 60).setNumeric(true));
add(new BibtexSingleField(FieldName.YEAR, true, BibtexSingleField.SMALL_W, 60).setNumeric(true));
// custom fields not displayed at editor, but as columns in the UI
for (String fieldName : SPECIAL_FIELDS) {
dummy = new BibtexSingleField(fieldName, false);
dummy.setPrivate();
dummy.setWriteable(false);
dummy.setDisplayable(false);
add(dummy);
}
// some semi-standard fields
dummy = new BibtexSingleField(BibEntry.KEY_FIELD, true);
dummy.setPrivate();
add(dummy);
dummy = new BibtexSingleField(FieldName.DOI, true, BibtexSingleField.SMALL_W);
dummy.setExtras(EnumSet.of(FieldProperty.DOI));
add(dummy);
add(new BibtexSingleField(FieldName.EID, true, BibtexSingleField.SMALL_W));
dummy = new BibtexSingleField(FieldName.DATE, true);
dummy.setExtras(EnumSet.of(FieldProperty.DATE));
add(dummy);
add(new BibtexSingleField(FieldName.PMID, false, BibtexSingleField.SMALL_W, 60).setNumeric(true));
// additional fields ------------------------------------------------------
add(new BibtexSingleField(FieldName.LOCATION, false));
add(new BibtexSingleField(FieldName.ABSTRACT, false, BibtexSingleField.LARGE_W, 400));
dummy = new BibtexSingleField(FieldName.URL, false, BibtexSingleField.SMALL_W);
dummy.setExtras(EnumSet.of(FieldProperty.EXTERNAL, FieldProperty.VERBATIM));
add(dummy);
add(new BibtexSingleField(FieldName.COMMENT, false, BibtexSingleField.MEDIUM_W));
add(new BibtexSingleField(FieldName.KEYWORDS, false, BibtexSingleField.SMALL_W));
dummy = new BibtexSingleField(FieldName.FILE, false);
dummy.setExtras(EnumSet.of(FieldProperty.FILE_EDITOR, FieldProperty.VERBATIM));
add(dummy);
dummy = new BibtexSingleField(FieldName.RELATED, false);
dummy.setExtras(EnumSet.of(FieldProperty.MULTIPLE_ENTRY_LINK));
add(dummy);
// some biblatex fields
dummy = new BibtexSingleField(FieldName.GENDER, true, BibtexSingleField.SMALL_W);
dummy.getFieldProperties().add(FieldProperty.GENDER);
add(dummy);
dummy = new BibtexSingleField(FieldName.PUBSTATE, true, BibtexSingleField.SMALL_W);
dummy.getFieldProperties().add(FieldProperty.PUBLICATION_STATE);
add(dummy);
// some internal fields ----------------------------------------------
dummy = new BibtexSingleField(FieldName.NUMBER_COL, false, 32);
dummy.setPrivate();
dummy.setWriteable(false);
dummy.setDisplayable(false);
add(dummy);
dummy = new BibtexSingleField(FieldName.OWNER, false, BibtexSingleField.SMALL_W);
dummy.setExtras(EnumSet.of(FieldProperty.OWNER));
dummy.setPrivate();
add(dummy);
timeStampField = timeStampFieldName;
dummy = new BibtexSingleField(timeStampFieldName, false, BibtexSingleField.SMALL_W);
dummy.setExtras(EnumSet.of(FieldProperty.DATE));
dummy.setPrivate();
add(dummy);
dummy = new BibtexSingleField(BibEntry.TYPE_HEADER, false, 75);
dummy.setPrivate();
add(dummy);
dummy = new BibtexSingleField(FieldName.SEARCH_INTERNAL, false);
dummy.setPrivate();
dummy.setWriteable(false);
dummy.setDisplayable(false);
add(dummy);
dummy = new BibtexSingleField(FieldName.GROUPSEARCH_INTERNAL, false);
dummy.setPrivate();
dummy.setWriteable(false);
dummy.setDisplayable(false);
add(dummy);
dummy = new BibtexSingleField(FieldName.MARKED_INTERNAL, false);
dummy.setPrivate();
dummy.setWriteable(true); // This field must be written to file!
dummy.setDisplayable(false);
add(dummy);
// IEEEtranBSTCTL fields that should be "yes" or "no"
for (String yesNoField : IEEETRANBSTCTL_YES_NO_FIELDS) {
dummy = new BibtexSingleField(yesNoField, false);
dummy.setExtras(EnumSet.of(FieldProperty.YES_NO));
add(dummy);
}
// Fields that should be an integer value
for (String numericField : INTEGER_FIELDS) {
BibtexSingleField field = fieldSet.get(numericField);
if (field == null) {
field = new BibtexSingleField(numericField, true, BibtexSingleField.SMALL_W).setNumeric(true);
}
field.getFieldProperties().add(FieldProperty.INTEGER);
add(field);
}
// Fields that should be treated as verbatim, so no formatting requirements
for (String fieldText : VERBATIM_FIELDS) {
BibtexSingleField field = fieldSet.get(fieldText);
if (field == null) {
field = new BibtexSingleField(fieldText, true, BibtexSingleField.SMALL_W);
}
field.getFieldProperties().add(FieldProperty.VERBATIM);
add(field);
}
// Set all fields with person names
for (String fieldText : BIBLATEX_PERSON_NAME_FIELDS) {
BibtexSingleField field = fieldSet.get(fieldText);
if (field == null) {
field = new BibtexSingleField(fieldText, true, BibtexSingleField.SMALL_W);
}
field.getFieldProperties().add(FieldProperty.PERSON_NAMES);
add(field);
}
// Set all fields which should contain editor types
for (String fieldText : BIBLATEX_EDITOR_TYPE_FIELDS) {
BibtexSingleField field = fieldSet.get(fieldText);
if (field == null) {
field = new BibtexSingleField(fieldText, true, BibtexSingleField.SMALL_W);
}
field.getFieldProperties().add(FieldProperty.EDITOR_TYPE);
add(field);
}
// Set all fields which are pagination fields
for (String fieldText : BIBLATEX_PAGINATION_FIELDS) {
BibtexSingleField field = fieldSet.get(fieldText);
if (field == null) {
field = new BibtexSingleField(fieldText, true, BibtexSingleField.SMALL_W);
}
field.getFieldProperties().add(FieldProperty.PAGINATION);
add(field);
}
// Set all fields with dates
for (String fieldText : BIBLATEX_DATE_FIELDS) {
BibtexSingleField field = fieldSet.get(fieldText);
if (field == null) {
field = new BibtexSingleField(fieldText, true, BibtexSingleField.SMALL_W);
}
field.getFieldProperties().add(FieldProperty.DATE);
field.getFieldProperties().add(FieldProperty.ISO_DATE);
add(field);
}
// Set all fields with journal names
for (String fieldText : BIBLATEX_JOURNAL_NAME_FIELDS) {
BibtexSingleField field = fieldSet.get(fieldText);
if (field == null) {
field = new BibtexSingleField(fieldText, true, BibtexSingleField.SMALL_W);
}
field.getFieldProperties().add(FieldProperty.JOURNAL_NAME);
add(field);
}
// Set all fields with book names
for (String fieldText : BIBLATEX_BOOK_NAME_FIELDS) {
BibtexSingleField field = fieldSet.get(fieldText);
if (field == null) {
field = new BibtexSingleField(fieldText, true, BibtexSingleField.SMALL_W);
}
field.getFieldProperties().add(FieldProperty.BOOK_NAME);
add(field);
}
// Set all fields containing a language
for (String fieldText : BIBLATEX_LANGUAGE_FIELDS) {
BibtexSingleField field = fieldSet.get(fieldText);
if (field == null) {
field = new BibtexSingleField(fieldText, true, BibtexSingleField.SMALL_W);
}
field.getFieldProperties().add(FieldProperty.LANGUAGE);
add(field);
}
// Set all fields with multiple key links
for (String fieldText : BIBLATEX_MULTI_KEY_FIELDS) {
BibtexSingleField field = fieldSet.get(fieldText);
if (field == null) {
field = new BibtexSingleField(fieldText, true, BibtexSingleField.SMALL_W);
}
field.getFieldProperties().add(FieldProperty.MULTIPLE_ENTRY_LINK);
add(field);
}
}
public static void updateTimeStampField(String timeStampFieldName) {
getField(RUNTIME.timeStampField).ifPresent(field -> {
field.setName(timeStampFieldName);
RUNTIME.timeStampField = timeStampFieldName;
});
}
public static void updateSpecialFields(boolean serializeSpecialFields) {
for (String fieldName : SPECIAL_FIELDS) {
getField(fieldName).ifPresent(field -> {
if (serializeSpecialFields) {
field.setPublic();
} else {
field.setPrivate();
}
field.setWriteable(serializeSpecialFields);
field.setDisplayable(serializeSpecialFields);
});
}
}
/**
* Read the "numericFields" string array from preferences, and activate numeric
* sorting for all fields listed in the array. If an unknown field name is included,
* add a field descriptor for the new field.
*/
public static void setNumericFields(List<String> numFields) {
if (numFields.isEmpty()) {
return;
}
// Build a Set of field names for the fields that should be sorted numerically:
Set<String> nF = new HashSet<>();
nF.addAll(numFields);
// Look through all registered fields, and activate numeric sorting if necessary:
for (String fieldName : InternalBibtexFields.RUNTIME.fieldSet.keySet()) {
BibtexSingleField field = InternalBibtexFields.RUNTIME.fieldSet.get(fieldName);
if (!field.isNumeric() && nF.contains(fieldName)) {
field.setNumeric(nF.contains(fieldName));
}
nF.remove(fieldName); // remove, so we clear the set of all standard fields.
}
// If there are fields left in nF, these must be non-standard fields. Add descriptors for them:
for (String fieldName : nF) {
BibtexSingleField field = new BibtexSingleField(fieldName, false);
field.setNumeric(true);
InternalBibtexFields.RUNTIME.fieldSet.put(fieldName, field);
}
}
/**
* insert a field into the internal list
*/
private void add(BibtexSingleField field) {
// field == null check
String key = field.getFieldName();
fieldSet.put(key, field);
}
// --------------------------------------------------------------------------
// the "static area"
// --------------------------------------------------------------------------
private static Optional<BibtexSingleField> getField(String name) {
if (name != null) {
return Optional.ofNullable(InternalBibtexFields.RUNTIME.fieldSet.get(name.toLowerCase(Locale.ENGLISH)));
}
return Optional.empty();
}
public static Set<FieldProperty> getFieldProperties(String name) {
Optional<BibtexSingleField> sField = InternalBibtexFields.getField(name);
if (sField.isPresent()) {
return sField.get().getFieldProperties();
}
return EnumSet.noneOf(FieldProperty.class);
}
public static double getFieldWeight(String name) {
Optional<BibtexSingleField> sField = InternalBibtexFields.getField(name);
if (sField.isPresent()) {
return sField.get().getWeight();
}
return BibtexSingleField.DEFAULT_FIELD_WEIGHT;
}
public static void setFieldWeight(String fieldName, double weight) {
Optional<BibtexSingleField> sField = InternalBibtexFields.getField(fieldName);
if (sField.isPresent()) {
sField.get().setWeight(weight);
}
}
public static int getFieldLength(String name) {
Optional<BibtexSingleField> sField = InternalBibtexFields.getField(name);
if (sField.isPresent()) {
return sField.get().getLength();
}
return BibtexSingleField.DEFAULT_FIELD_LENGTH;
}
public static boolean isWriteableField(String field) {
Optional<BibtexSingleField> sField = InternalBibtexFields.getField(field);
return !sField.isPresent() || sField.get().isWriteable();
}
public static boolean isDisplayableField(String field) {
Optional<BibtexSingleField> sField = InternalBibtexFields.getField(field);
return !sField.isPresent() || sField.get().isDisplayable();
}
/**
* Returns true if the given field is a standard Bibtex field.
*
* @param field a <code>String</code> value
* @return a <code>boolean</code> value
*/
public static boolean isStandardField(String field) {
Optional<BibtexSingleField> sField = InternalBibtexFields.getField(field);
return sField.isPresent() && sField.get().isStandard();
}
public static boolean isNumeric(String field) {
Optional<BibtexSingleField> sField = InternalBibtexFields.getField(field);
return sField.isPresent() && sField.get().isNumeric();
}
public static boolean isInternalField(String field) {
return field.startsWith("__");
}
/**
* returns a List with all fieldnames
*/
public static List<String> getAllPublicFieldNames() {
// collect all public fields
List<String> publicFields = new ArrayList<>();
for (BibtexSingleField sField : InternalBibtexFields.RUNTIME.fieldSet.values()) {
if (!sField.isPrivate()) {
publicFields.add(sField.getFieldName());
// or export the complete BibtexSingleField ?
// BibtexSingleField.toString() { return fieldname ; }
}
}
// sort the entries
Collections.sort(publicFields);
return publicFields;
}
/**
* returns a List with all fieldnames incl. internal fieldnames
*/
public static List<String> getAllPublicAndInternalFieldNames() {
//add the internal field names to public fields
List<String> publicAndInternalFields = new ArrayList<>();
publicAndInternalFields.addAll(InternalBibtexFields.getAllPublicFieldNames());
publicAndInternalFields.add(FieldName.INTERNAL_ALL_FIELD);
publicAndInternalFields.add(FieldName.INTERNAL_ALL_TEXT_FIELDS_FIELD);
// sort the entries
Collections.sort(publicAndInternalFields);
return publicAndInternalFields;
}
public static List<String> getJournalNameFields() {
return InternalBibtexFields.getAllPublicFieldNames().stream().filter(
fieldName -> InternalBibtexFields.getFieldProperties(fieldName).contains(FieldProperty.JOURNAL_NAME))
.collect(Collectors.toList());
}
public static List<String> getBookNameFields() {
return InternalBibtexFields.getAllPublicFieldNames().stream()
.filter(fieldName -> InternalBibtexFields.getFieldProperties(fieldName).contains(FieldProperty.BOOK_NAME))
.collect(Collectors.toList());
}
public static List<String> getPersonNameFields() {
return InternalBibtexFields.getAllPublicFieldNames().stream().filter(
fieldName -> InternalBibtexFields.getFieldProperties(fieldName).contains(FieldProperty.PERSON_NAMES))
.collect(Collectors.toList());
}
public static List<String> getIEEETranBSTctlYesNoFields() {
return IEEETRANBSTCTL_YES_NO_FIELDS;
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.gamelift.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Properties describing a custom game build.
* </p>
* <p>
* <b>Related operations</b>
* </p>
* <ul>
* <li>
* <p>
* <a>CreateBuild</a>
* </p>
* </li>
* <li>
* <p>
* <a>ListBuilds</a>
* </p>
* </li>
* <li>
* <p>
* <a>DescribeBuild</a>
* </p>
* </li>
* <li>
* <p>
* <a>UpdateBuild</a>
* </p>
* </li>
* <li>
* <p>
* <a>DeleteBuild</a>
* </p>
* </li>
* </ul>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/gamelift-2015-10-01/Build" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Build implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Unique identifier for a build.
* </p>
*/
private String buildId;
/**
* <p>
* Descriptive label that is associated with a build. Build names do not need to be unique. It can be set using
* <a>CreateBuild</a> or <a>UpdateBuild</a>.
* </p>
*/
private String name;
/**
* <p>
* Version that is associated with a build or script. Version strings do not need to be unique. This value can be
* set using <a>CreateBuild</a> or <a>UpdateBuild</a>.
* </p>
*/
private String version;
/**
* <p>
* Current status of the build.
* </p>
* <p>
* Possible build statuses include the following:
* </p>
* <ul>
* <li>
* <p>
* <b>INITIALIZED</b> -- A new build has been defined, but no files have been uploaded. You cannot create fleets for
* builds that are in this status. When a build is successfully created, the build status is set to this value.
* </p>
* </li>
* <li>
* <p>
* <b>READY</b> -- The game build has been successfully uploaded. You can now create new fleets for this build.
* </p>
* </li>
* <li>
* <p>
* <b>FAILED</b> -- The game build upload failed. You cannot create new fleets for this build.
* </p>
* </li>
* </ul>
*/
private String status;
/**
* <p>
* File size of the uploaded game build, expressed in bytes. When the build status is <code>INITIALIZED</code>, this
* value is 0.
* </p>
*/
private Long sizeOnDisk;
/**
* <p>
* Operating system that the game server binaries are built to run on. This value determines the type of fleet
* resources that you can use for this build.
* </p>
*/
private String operatingSystem;
/**
* <p>
* Time stamp indicating when this data object was created. Format is a number expressed in Unix time as
* milliseconds (for example "1469498468.057").
* </p>
*/
private java.util.Date creationTime;
/**
* <p>
* Unique identifier for a build.
* </p>
*
* @param buildId
* Unique identifier for a build.
*/
public void setBuildId(String buildId) {
this.buildId = buildId;
}
/**
* <p>
* Unique identifier for a build.
* </p>
*
* @return Unique identifier for a build.
*/
public String getBuildId() {
return this.buildId;
}
/**
* <p>
* Unique identifier for a build.
* </p>
*
* @param buildId
* Unique identifier for a build.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Build withBuildId(String buildId) {
setBuildId(buildId);
return this;
}
/**
* <p>
* Descriptive label that is associated with a build. Build names do not need to be unique. It can be set using
* <a>CreateBuild</a> or <a>UpdateBuild</a>.
* </p>
*
* @param name
* Descriptive label that is associated with a build. Build names do not need to be unique. It can be set
* using <a>CreateBuild</a> or <a>UpdateBuild</a>.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* Descriptive label that is associated with a build. Build names do not need to be unique. It can be set using
* <a>CreateBuild</a> or <a>UpdateBuild</a>.
* </p>
*
* @return Descriptive label that is associated with a build. Build names do not need to be unique. It can be set
* using <a>CreateBuild</a> or <a>UpdateBuild</a>.
*/
public String getName() {
return this.name;
}
/**
* <p>
* Descriptive label that is associated with a build. Build names do not need to be unique. It can be set using
* <a>CreateBuild</a> or <a>UpdateBuild</a>.
* </p>
*
* @param name
* Descriptive label that is associated with a build. Build names do not need to be unique. It can be set
* using <a>CreateBuild</a> or <a>UpdateBuild</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Build withName(String name) {
setName(name);
return this;
}
/**
* <p>
* Version that is associated with a build or script. Version strings do not need to be unique. This value can be
* set using <a>CreateBuild</a> or <a>UpdateBuild</a>.
* </p>
*
* @param version
* Version that is associated with a build or script. Version strings do not need to be unique. This value
* can be set using <a>CreateBuild</a> or <a>UpdateBuild</a>.
*/
public void setVersion(String version) {
this.version = version;
}
/**
* <p>
* Version that is associated with a build or script. Version strings do not need to be unique. This value can be
* set using <a>CreateBuild</a> or <a>UpdateBuild</a>.
* </p>
*
* @return Version that is associated with a build or script. Version strings do not need to be unique. This value
* can be set using <a>CreateBuild</a> or <a>UpdateBuild</a>.
*/
public String getVersion() {
return this.version;
}
/**
* <p>
* Version that is associated with a build or script. Version strings do not need to be unique. This value can be
* set using <a>CreateBuild</a> or <a>UpdateBuild</a>.
* </p>
*
* @param version
* Version that is associated with a build or script. Version strings do not need to be unique. This value
* can be set using <a>CreateBuild</a> or <a>UpdateBuild</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Build withVersion(String version) {
setVersion(version);
return this;
}
/**
* <p>
* Current status of the build.
* </p>
* <p>
* Possible build statuses include the following:
* </p>
* <ul>
* <li>
* <p>
* <b>INITIALIZED</b> -- A new build has been defined, but no files have been uploaded. You cannot create fleets for
* builds that are in this status. When a build is successfully created, the build status is set to this value.
* </p>
* </li>
* <li>
* <p>
* <b>READY</b> -- The game build has been successfully uploaded. You can now create new fleets for this build.
* </p>
* </li>
* <li>
* <p>
* <b>FAILED</b> -- The game build upload failed. You cannot create new fleets for this build.
* </p>
* </li>
* </ul>
*
* @param status
* Current status of the build.</p>
* <p>
* Possible build statuses include the following:
* </p>
* <ul>
* <li>
* <p>
* <b>INITIALIZED</b> -- A new build has been defined, but no files have been uploaded. You cannot create
* fleets for builds that are in this status. When a build is successfully created, the build status is set
* to this value.
* </p>
* </li>
* <li>
* <p>
* <b>READY</b> -- The game build has been successfully uploaded. You can now create new fleets for this
* build.
* </p>
* </li>
* <li>
* <p>
* <b>FAILED</b> -- The game build upload failed. You cannot create new fleets for this build.
* </p>
* </li>
* @see BuildStatus
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* Current status of the build.
* </p>
* <p>
* Possible build statuses include the following:
* </p>
* <ul>
* <li>
* <p>
* <b>INITIALIZED</b> -- A new build has been defined, but no files have been uploaded. You cannot create fleets for
* builds that are in this status. When a build is successfully created, the build status is set to this value.
* </p>
* </li>
* <li>
* <p>
* <b>READY</b> -- The game build has been successfully uploaded. You can now create new fleets for this build.
* </p>
* </li>
* <li>
* <p>
* <b>FAILED</b> -- The game build upload failed. You cannot create new fleets for this build.
* </p>
* </li>
* </ul>
*
* @return Current status of the build.</p>
* <p>
* Possible build statuses include the following:
* </p>
* <ul>
* <li>
* <p>
* <b>INITIALIZED</b> -- A new build has been defined, but no files have been uploaded. You cannot create
* fleets for builds that are in this status. When a build is successfully created, the build status is set
* to this value.
* </p>
* </li>
* <li>
* <p>
* <b>READY</b> -- The game build has been successfully uploaded. You can now create new fleets for this
* build.
* </p>
* </li>
* <li>
* <p>
* <b>FAILED</b> -- The game build upload failed. You cannot create new fleets for this build.
* </p>
* </li>
* @see BuildStatus
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* Current status of the build.
* </p>
* <p>
* Possible build statuses include the following:
* </p>
* <ul>
* <li>
* <p>
* <b>INITIALIZED</b> -- A new build has been defined, but no files have been uploaded. You cannot create fleets for
* builds that are in this status. When a build is successfully created, the build status is set to this value.
* </p>
* </li>
* <li>
* <p>
* <b>READY</b> -- The game build has been successfully uploaded. You can now create new fleets for this build.
* </p>
* </li>
* <li>
* <p>
* <b>FAILED</b> -- The game build upload failed. You cannot create new fleets for this build.
* </p>
* </li>
* </ul>
*
* @param status
* Current status of the build.</p>
* <p>
* Possible build statuses include the following:
* </p>
* <ul>
* <li>
* <p>
* <b>INITIALIZED</b> -- A new build has been defined, but no files have been uploaded. You cannot create
* fleets for builds that are in this status. When a build is successfully created, the build status is set
* to this value.
* </p>
* </li>
* <li>
* <p>
* <b>READY</b> -- The game build has been successfully uploaded. You can now create new fleets for this
* build.
* </p>
* </li>
* <li>
* <p>
* <b>FAILED</b> -- The game build upload failed. You cannot create new fleets for this build.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see BuildStatus
*/
public Build withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* Current status of the build.
* </p>
* <p>
* Possible build statuses include the following:
* </p>
* <ul>
* <li>
* <p>
* <b>INITIALIZED</b> -- A new build has been defined, but no files have been uploaded. You cannot create fleets for
* builds that are in this status. When a build is successfully created, the build status is set to this value.
* </p>
* </li>
* <li>
* <p>
* <b>READY</b> -- The game build has been successfully uploaded. You can now create new fleets for this build.
* </p>
* </li>
* <li>
* <p>
* <b>FAILED</b> -- The game build upload failed. You cannot create new fleets for this build.
* </p>
* </li>
* </ul>
*
* @param status
* Current status of the build.</p>
* <p>
* Possible build statuses include the following:
* </p>
* <ul>
* <li>
* <p>
* <b>INITIALIZED</b> -- A new build has been defined, but no files have been uploaded. You cannot create
* fleets for builds that are in this status. When a build is successfully created, the build status is set
* to this value.
* </p>
* </li>
* <li>
* <p>
* <b>READY</b> -- The game build has been successfully uploaded. You can now create new fleets for this
* build.
* </p>
* </li>
* <li>
* <p>
* <b>FAILED</b> -- The game build upload failed. You cannot create new fleets for this build.
* </p>
* </li>
* @see BuildStatus
*/
public void setStatus(BuildStatus status) {
withStatus(status);
}
/**
* <p>
* Current status of the build.
* </p>
* <p>
* Possible build statuses include the following:
* </p>
* <ul>
* <li>
* <p>
* <b>INITIALIZED</b> -- A new build has been defined, but no files have been uploaded. You cannot create fleets for
* builds that are in this status. When a build is successfully created, the build status is set to this value.
* </p>
* </li>
* <li>
* <p>
* <b>READY</b> -- The game build has been successfully uploaded. You can now create new fleets for this build.
* </p>
* </li>
* <li>
* <p>
* <b>FAILED</b> -- The game build upload failed. You cannot create new fleets for this build.
* </p>
* </li>
* </ul>
*
* @param status
* Current status of the build.</p>
* <p>
* Possible build statuses include the following:
* </p>
* <ul>
* <li>
* <p>
* <b>INITIALIZED</b> -- A new build has been defined, but no files have been uploaded. You cannot create
* fleets for builds that are in this status. When a build is successfully created, the build status is set
* to this value.
* </p>
* </li>
* <li>
* <p>
* <b>READY</b> -- The game build has been successfully uploaded. You can now create new fleets for this
* build.
* </p>
* </li>
* <li>
* <p>
* <b>FAILED</b> -- The game build upload failed. You cannot create new fleets for this build.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see BuildStatus
*/
public Build withStatus(BuildStatus status) {
this.status = status.toString();
return this;
}
/**
* <p>
* File size of the uploaded game build, expressed in bytes. When the build status is <code>INITIALIZED</code>, this
* value is 0.
* </p>
*
* @param sizeOnDisk
* File size of the uploaded game build, expressed in bytes. When the build status is
* <code>INITIALIZED</code>, this value is 0.
*/
public void setSizeOnDisk(Long sizeOnDisk) {
this.sizeOnDisk = sizeOnDisk;
}
/**
* <p>
* File size of the uploaded game build, expressed in bytes. When the build status is <code>INITIALIZED</code>, this
* value is 0.
* </p>
*
* @return File size of the uploaded game build, expressed in bytes. When the build status is
* <code>INITIALIZED</code>, this value is 0.
*/
public Long getSizeOnDisk() {
return this.sizeOnDisk;
}
/**
* <p>
* File size of the uploaded game build, expressed in bytes. When the build status is <code>INITIALIZED</code>, this
* value is 0.
* </p>
*
* @param sizeOnDisk
* File size of the uploaded game build, expressed in bytes. When the build status is
* <code>INITIALIZED</code>, this value is 0.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Build withSizeOnDisk(Long sizeOnDisk) {
setSizeOnDisk(sizeOnDisk);
return this;
}
/**
* <p>
* Operating system that the game server binaries are built to run on. This value determines the type of fleet
* resources that you can use for this build.
* </p>
*
* @param operatingSystem
* Operating system that the game server binaries are built to run on. This value determines the type of
* fleet resources that you can use for this build.
* @see OperatingSystem
*/
public void setOperatingSystem(String operatingSystem) {
this.operatingSystem = operatingSystem;
}
/**
* <p>
* Operating system that the game server binaries are built to run on. This value determines the type of fleet
* resources that you can use for this build.
* </p>
*
* @return Operating system that the game server binaries are built to run on. This value determines the type of
* fleet resources that you can use for this build.
* @see OperatingSystem
*/
public String getOperatingSystem() {
return this.operatingSystem;
}
/**
* <p>
* Operating system that the game server binaries are built to run on. This value determines the type of fleet
* resources that you can use for this build.
* </p>
*
* @param operatingSystem
* Operating system that the game server binaries are built to run on. This value determines the type of
* fleet resources that you can use for this build.
* @return Returns a reference to this object so that method calls can be chained together.
* @see OperatingSystem
*/
public Build withOperatingSystem(String operatingSystem) {
setOperatingSystem(operatingSystem);
return this;
}
/**
* <p>
* Operating system that the game server binaries are built to run on. This value determines the type of fleet
* resources that you can use for this build.
* </p>
*
* @param operatingSystem
* Operating system that the game server binaries are built to run on. This value determines the type of
* fleet resources that you can use for this build.
* @see OperatingSystem
*/
public void setOperatingSystem(OperatingSystem operatingSystem) {
withOperatingSystem(operatingSystem);
}
/**
* <p>
* Operating system that the game server binaries are built to run on. This value determines the type of fleet
* resources that you can use for this build.
* </p>
*
* @param operatingSystem
* Operating system that the game server binaries are built to run on. This value determines the type of
* fleet resources that you can use for this build.
* @return Returns a reference to this object so that method calls can be chained together.
* @see OperatingSystem
*/
public Build withOperatingSystem(OperatingSystem operatingSystem) {
this.operatingSystem = operatingSystem.toString();
return this;
}
/**
* <p>
* Time stamp indicating when this data object was created. Format is a number expressed in Unix time as
* milliseconds (for example "1469498468.057").
* </p>
*
* @param creationTime
* Time stamp indicating when this data object was created. Format is a number expressed in Unix time as
* milliseconds (for example "1469498468.057").
*/
public void setCreationTime(java.util.Date creationTime) {
this.creationTime = creationTime;
}
/**
* <p>
* Time stamp indicating when this data object was created. Format is a number expressed in Unix time as
* milliseconds (for example "1469498468.057").
* </p>
*
* @return Time stamp indicating when this data object was created. Format is a number expressed in Unix time as
* milliseconds (for example "1469498468.057").
*/
public java.util.Date getCreationTime() {
return this.creationTime;
}
/**
* <p>
* Time stamp indicating when this data object was created. Format is a number expressed in Unix time as
* milliseconds (for example "1469498468.057").
* </p>
*
* @param creationTime
* Time stamp indicating when this data object was created. Format is a number expressed in Unix time as
* milliseconds (for example "1469498468.057").
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Build withCreationTime(java.util.Date creationTime) {
setCreationTime(creationTime);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getBuildId() != null)
sb.append("BuildId: ").append(getBuildId()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getVersion() != null)
sb.append("Version: ").append(getVersion()).append(",");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getSizeOnDisk() != null)
sb.append("SizeOnDisk: ").append(getSizeOnDisk()).append(",");
if (getOperatingSystem() != null)
sb.append("OperatingSystem: ").append(getOperatingSystem()).append(",");
if (getCreationTime() != null)
sb.append("CreationTime: ").append(getCreationTime());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Build == false)
return false;
Build other = (Build) obj;
if (other.getBuildId() == null ^ this.getBuildId() == null)
return false;
if (other.getBuildId() != null && other.getBuildId().equals(this.getBuildId()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getVersion() == null ^ this.getVersion() == null)
return false;
if (other.getVersion() != null && other.getVersion().equals(this.getVersion()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getSizeOnDisk() == null ^ this.getSizeOnDisk() == null)
return false;
if (other.getSizeOnDisk() != null && other.getSizeOnDisk().equals(this.getSizeOnDisk()) == false)
return false;
if (other.getOperatingSystem() == null ^ this.getOperatingSystem() == null)
return false;
if (other.getOperatingSystem() != null && other.getOperatingSystem().equals(this.getOperatingSystem()) == false)
return false;
if (other.getCreationTime() == null ^ this.getCreationTime() == null)
return false;
if (other.getCreationTime() != null && other.getCreationTime().equals(this.getCreationTime()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getBuildId() == null) ? 0 : getBuildId().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getVersion() == null) ? 0 : getVersion().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getSizeOnDisk() == null) ? 0 : getSizeOnDisk().hashCode());
hashCode = prime * hashCode + ((getOperatingSystem() == null) ? 0 : getOperatingSystem().hashCode());
hashCode = prime * hashCode + ((getCreationTime() == null) ? 0 : getCreationTime().hashCode());
return hashCode;
}
@Override
public Build clone() {
try {
return (Build) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.gamelift.model.transform.BuildMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package restaurant.redland;
import global.BusinessAgent;
import global.actions.Action;
import global.roles.Role;
import interfaces.Building;
import interfaces.Person;
import restaurant.redland.UtilityClasses.*;
import restaurant.redland.interfaces.*;
import restaurant.redland.roles.RedlandCashierRole;
import restaurant.redland.roles.RedlandCookRole;
import restaurant.redland.roles.RedlandCustomerRole;
import restaurant.redland.roles.RedlandHostRole;
import restaurant.redland.roles.RedlandWaiterRole;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import market.MarketAgent;
import market.TruckDriverRole;
import market.MarketAgent.ManagerState;
import market.MarketAgent.MarketState;
import market.interfaces.Market;
import bank.BankAgent;
/**Redland Restaurant
* @author Redland
*/
public class RedlandRestaurantAgent extends BusinessAgent implements Building {
/** Data */
public int currentTime;
public String name;
public float balance;
public float startingWage;
public boolean isOpen;
public MarketAgent market;
public BankAgent bank;
public List<MyCustomer> customers;
public List<MyWaiter> waiters;
public MyHost host;
public MyCook cook;
public MyCashier cashier;
public List<Role> peopleInTheBuilding;
public enum RedlandHostState {Pending, Waiting, AskedForBreak, OnBreak, GoingOnBreak, Working, ShiftOver, NotPresent};
public enum RedlandCookState {Pending, Waiting, AskedForBreak, OnBreak, GoingOnBreak, Working, ShiftOver, NotPresent};
public enum RedlandCashierState {Pending, Waiting, AskedForBreak, OnBreak, GoingOnBreak, Working, ShiftOver, NotPresent};
public enum RedlandWaiterState {Pending, Waiting, AskedForBreak, OnBreak, GoingOnBreak, Working, ShiftOver, NotPresent};
public enum RedlandCustomerState {Pending, Waiting, BeingSeated, Seated, ReadyToOrder, AskedToOrder, Ordered, Eating, Leaving, NotPresent};
public enum RedlandRestaurantState {pending, closing, closed, opening, checkIfOpen, open}
RedlandRestaurantState state;
public RedlandRestaurantAgent( String name, MarketAgent market, BankAgent bank){
super();
this.name = name;
//initialize variables
currentTime = 0;
balance = 1000.0f;
startingWage = 10.0f;
this.market = market;
this.bank = bank;
customers = new ArrayList<MyCustomer>();
waiters = new ArrayList<MyWaiter>();
host = null;
cook = null;
cashier = null;
peopleInTheBuilding = new ArrayList<Role>();
isOpen = false;
this.state = RedlandRestaurantState.closed;
}
/** Messages */
public void msgAtLocation(Person p, Role r, List<Action> actions) {
if( r instanceof RedlandCustomer ){
boolean flag = true;
for( MyCustomer customer : customers ){
if( customer.person == p ){
flag = false;//to check if this is a new customer
customer.state = RedlandCustomerState.Pending;
break;
}
}
if( flag ){
customers.add( new MyCustomer( (RedlandCustomerRole) r, p ) );
}
}
if( r instanceof RedlandWaiterRole ){
boolean flag = true;
for( MyWaiter waiter : waiters ){
if( waiter.person == p ){
flag = false;//to check if this is a new employee
waiter.state = RedlandWaiterState.Pending;
break;
}
}
if( flag ){
waiters.add( new MyWaiter( (RedlandWaiterRole) r, p, startingWage ) );
}
}
if( r instanceof RedlandHostRole ){
if( host == null ) host = new MyHost( (RedlandHostRole) r, p, startingWage );//better way to do this?
if( host.person == p ){
host.state = RedlandHostState.Pending;
}
else host = new MyHost( (RedlandHostRole) r, p, startingWage );
}
if( r instanceof RedlandCookRole ){
if( cook == null ) cook = new MyCook( (RedlandCookRole) r, p, startingWage );//better way to do this?
if( cook.person == p ){
cook.state = RedlandCookState.Pending;
}
else cook = new MyCook( (RedlandCookRole) r, p, startingWage );
}
if( r instanceof RedlandCashierRole ){
if( cashier == null ) cashier = new MyCashier( (RedlandCashierRole) r, p, startingWage );//better way to do this?
if( cashier.person == p ){
cashier.state = RedlandCashierState.Pending;
}
else cashier = new MyCashier( (RedlandCashierRole) r, p, startingWage );
}
peopleInTheBuilding.add( r );
stateChanged();
}
public void msgOrderDelivered( Map<String,Integer> order, Market market, TruckDriverRole driver, float bill ){
//TODO: finish
}
public void msgUpdateTime( int time ) {
this.currentTime = time;
if( currentTime == startTime ) state = RedlandRestaurantState.opening;
if( currentTime == closeTime ) state = RedlandRestaurantState.closing;
if( currentTime < startTime || currentTime > closeTime ){
state = RedlandRestaurantState.closed;
isOpen = false;
}
else{
state = RedlandRestaurantState.open;
isOpen = true;
}
stateChanged();
}
public void msgPersonHasLeft( Person p, Role r ){
if( r instanceof RedlandCustomerRole ){
for( MyCustomer customer : customers ){
if( customer.person == p ){
customer.state = RedlandCustomerState.Leaving;
break;
}
}
}
if( r instanceof RedlandWaiterRole ){
for( MyWaiter waiter : waiters ){
if( waiter.person == p ){
waiter.state = RedlandWaiterState.ShiftOver;
break;
}
}
}
if( r instanceof RedlandCashierRole ){
host.state = RedlandHostState.ShiftOver;
if( balance > 1000 ){
float cashToDeposit = this.balance - 1000;//anything over 1000
host.person.AddTaskDepositEarnings( this, cashToDeposit );
this.balance = 1000.0f;
//keep track of bank account?
}
}
if( r instanceof RedlandCookRole ){
cook.state = RedlandCookState.ShiftOver;
}
if( r instanceof RedlandHostRole ){
host.state = RedlandHostState.ShiftOver;
}
peopleInTheBuilding.remove( r );
stateChanged();
}
/********** Scheduler **********/
public boolean pickAndExecuteAnAction(){
//Rules pertaining to restaurant
if( state == RedlandRestaurantState.opening ){
if( host.state == RedlandHostState.Working ){
AskHostIfReady();
state = RedlandRestaurantState.pending;
return true;
}
}
if( state == RedlandRestaurantState.closing ){
TellHostToClose();
//anything else?
return true;
}
//Rules pertaining to host
if( host.state == RedlandHostState.Pending ){
PrepareHostForWork();
return true;
}
//Rules pertaining to customers
for( MyCustomer customer : customers ){
if( customer.state == RedlandCustomerState.Pending ){
TellHostNewCustomer( customer );
return true;
}
}
//Rules pertaining to waiters
for( MyWaiter waiter : waiters ){
if( waiter.state == RedlandWaiterState.Pending ){
TellHostNewWaiter( waiter );
return true;
}
}
//Rules pertaining to cooks
if( cook.state == RedlandCookState.Pending ){
TellHostNewCook();
return true;
}
//Rules pertaining to cashiers
if( cashier.state == RedlandCashierState.Pending ){
TellHostNewCashier();
return true;
}
return false;
}
/********** Actions **********/
private void TellHostNewCustomer( MyCustomer customer ){
customer.state = RedlandCustomerState.Waiting;
host.host.msgIWantFood( customer.customer );
}
private void TellHostNewWaiter( MyWaiter waiter ){
waiter.state = RedlandWaiterState.Waiting;
host.host.msgAddWaiter( waiter.waiter );
}
private void TellHostNewCook(){
cook.state = RedlandCookState.Waiting;
host.host.msgAddCook( cook.cook );
}
private void TellHostNewCashier(){
cashier.state = RedlandCashierState.Waiting;
host.host.msgAddCashier( cashier.cashier );
}
private void PrepareHostForWork(){
//send list of waiters
List<RedlandWaiterRole> theWaiters = new ArrayList<RedlandWaiterRole>();
for( MyWaiter waiter : waiters ){
theWaiters.add( waiter.waiter );
}
host.state = RedlandHostState.Working;
host.host.msgGetReadyForWork( this, cook.cook, cashier.cashier, theWaiters );
}
private void TellHostToClose(){
}
private void AskHostIfReady(){
}
/********** Utilities **********/
public String getLocation() {
return this.location;
}
public String getName() {
return this.name;
}
public int getStartTime() {
return this.startTime;
}
public void setStartTime(int t) {
this.startTime = t;
}
public int getCloseTime() {
return this.closeTime;
}
public void setCloseTime(int t) {
this.closeTime = t;
}
public List<Role> getPeopleInTheBuilding() {
return this.peopleInTheBuilding;
}
public RedlandCashierRole getCashier(){
return this.cashier.cashier;
}
public RedlandHostRole getHost(){
return this.host.host;
}
public RedlandCookRole getCook(){
return this.cook.cook;
}
private class MyCustomer{
RedlandCustomerRole customer;
Person person;
float debt;
RedlandCustomerState state;
public MyCustomer( RedlandCustomerRole customer, Person person ){
this.customer = customer;
this.person = person;
debt = 0.0f;
state = RedlandCustomerState.Pending;
}
}
private class MyWaiter{
RedlandWaiterRole waiter;
Person person;
float wage;
RedlandWaiterState state;
public MyWaiter( RedlandWaiterRole waiter, Person person, float wage ){
this.waiter = waiter;
this.person = person;
this.wage = wage;
state = RedlandWaiterState.Pending;
}
}
private class MyHost{
RedlandHostRole host;
Person person;
float wage;
RedlandHostState state;
public MyHost( RedlandHostRole host, Person person, float wage ){
this.host = host;
this.person = person;
this.wage = wage;
state = RedlandHostState.Pending;
}
}
private class MyCook{
RedlandCookRole cook;
Person person;
float wage;
RedlandCookState state;
public MyCook( RedlandCookRole cook, Person person, float wage ){
this.cook = cook;
this.person = person;
this.wage = wage;
state = RedlandCookState.Pending;
}
}
private class MyCashier{
RedlandCashierRole cashier;
Person person;
float wage;
RedlandCashierState state;
public MyCashier( RedlandCashierRole cashier, Person person, float wage ){
this.cashier = cashier;
this.person = person;
this.wage = wage;
state = RedlandCashierState.Pending;
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.wm.impl;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.UiActivity;
import com.intellij.ide.UiActivityMonitor;
import com.intellij.internal.focus.FocusTracesAction;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationActivationListener;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ex.ApplicationManagerEx;
import com.intellij.openapi.components.impl.ServiceManagerImpl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.wm.*;
import com.intellij.openapi.wm.ex.IdeFocusTraversalPolicy;
import com.intellij.openapi.wm.ex.LayoutFocusTraversalPolicyExt;
import com.intellij.reference.SoftReference;
import com.intellij.ui.FocusTrackback;
import com.intellij.util.containers.WeakValueHashMap;
import com.intellij.util.ui.UIUtil;
import gnu.trove.TIntIntHashMap;
import gnu.trove.TIntIntProcedure;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.FocusEvent;
import java.awt.event.KeyEvent;
import java.awt.event.WindowEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.lang.ref.Reference;
import java.lang.ref.WeakReference;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.List;
public class FocusManagerImpl extends IdeFocusManager implements Disposable {
private static final Logger LOG = Logger.getInstance(FocusManagerImpl.class);
private static final UiActivity FOCUS = new UiActivity.Focus("awtFocusRequest");
private static final UiActivity TYPEAHEAD = new UiActivity.Focus("typeahead");
private final Application myApp;
private FocusCommand myRequestFocusCmd;
private final List<FocusCommand> myFocusRequests = new ArrayList<FocusCommand>();
private final List<KeyEvent> myToDispatchOnDone = new ArrayList<KeyEvent>();
private Reference<FocusCommand> myLastForcedRequest;
private FocusCommand myFocusCommandOnAppActivation;
private ActionCallback myCallbackOnActivation;
private final boolean isInternalMode = ApplicationManagerEx.getApplicationEx().isInternal();
private final LinkedList<FocusRequestInfo> myRequests = new LinkedList<FocusRequestInfo>();
private final IdeEventQueue myQueue;
private final KeyProcessorContext myKeyProcessorContext = new KeyProcessorContext();
private long myCmdTimestamp;
private long myForcedCmdTimestamp;
private final EdtAlarm myFocusedComponentAlarm;
private final EdtAlarm myForcedFocusRequestsAlarm;
private final SimpleTimer myTimer = SimpleTimer.newInstance("FocusManager timer");
private final EdtAlarm myIdleAlarm;
private final Set<Runnable> myIdleRequests = new LinkedHashSet<Runnable>();
private boolean myFlushWasDelayedToFixFocus;
private ExpirableRunnable myFocusRevalidator;
private final Set<FurtherRequestor> myValidFurtherRequestors = new HashSet<FurtherRequestor>();
private final Set<ActionCallback> myTypeAheadRequestors = new HashSet<ActionCallback>();
private final UiActivityMonitor myActivityMonitor;
private boolean myTypeaheadEnabled = true;
private int myModalityStateForLastForcedRequest;
private class IdleRunnable extends EdtRunnable {
@Override
public void runEdt() {
if (canFlushIdleRequests()) {
flushIdleRequests();
}
else {
if (processFocusRevalidation()) {
if (isFocusTransferReady()) {
flushIdleRequests();
}
}
restartIdleAlarm();
}
}
}
private boolean canFlushIdleRequests() {
Component focusOwner = getFocusOwner();
return isFocusTransferReady()
&& !isIdleQueueEmpty()
&& !IdeEventQueue.getInstance().isDispatchingFocusEvent()
&& !(focusOwner == null && (!myValidFurtherRequestors.isEmpty() || myFocusRevalidator != null && !myFocusRevalidator.isExpired()));
}
private final Map<IdeFrame, Component> myLastFocused = new WeakValueHashMap<IdeFrame, Component>();
private final Map<IdeFrame, Component> myLastFocusedAtDeactivation = new WeakValueHashMap<IdeFrame, Component>();
private DataContext myRunContext;
private final TIntIntHashMap myModalityCount2FlushCount = new TIntIntHashMap();
private IdeFrame myLastFocusedFrame;
@SuppressWarnings("UnusedParameters") // the dependencies are needed to ensure correct loading order
public FocusManagerImpl(ServiceManagerImpl serviceManager, WindowManager wm, UiActivityMonitor monitor) {
myApp = ApplicationManager.getApplication();
myQueue = IdeEventQueue.getInstance();
myActivityMonitor = monitor;
myFocusedComponentAlarm = new EdtAlarm();
myForcedFocusRequestsAlarm = new EdtAlarm();
myIdleAlarm = new EdtAlarm();
final AppListener myAppListener = new AppListener();
myApp.getMessageBus().connect().subscribe(ApplicationActivationListener.TOPIC, myAppListener);
IdeEventQueue.getInstance().addDispatcher(new IdeEventQueue.EventDispatcher() {
@Override
public boolean dispatch(AWTEvent e) {
if (e instanceof FocusEvent) {
final FocusEvent fe = (FocusEvent)e;
final Component c = fe.getComponent();
if (c instanceof Window || c == null) return false;
Component parent = UIUtil.findUltimateParent(c);
if (parent instanceof IdeFrame) {
myLastFocused.put((IdeFrame)parent, c);
}
}
else if (e instanceof WindowEvent) {
Window wnd = ((WindowEvent)e).getWindow();
if (e.getID() == WindowEvent.WINDOW_CLOSED) {
if (wnd instanceof IdeFrame) {
myLastFocused.remove(wnd);
myLastFocusedAtDeactivation.remove(wnd);
}
}
}
return false;
}
}, this);
KeyboardFocusManager.getCurrentKeyboardFocusManager().addPropertyChangeListener("focusedWindow", new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (evt.getNewValue() instanceof IdeFrame) {
myLastFocusedFrame = (IdeFrame)evt.getNewValue();
}
}
});
}
@Override
public IdeFrame getLastFocusedFrame() {
return myLastFocusedFrame;
}
@Override
@NotNull
public ActionCallback requestFocus(@NotNull final Component c, final boolean forced) {
return requestFocus(new FocusCommand.ByComponent(c, new Exception()), forced);
}
@Override
@NotNull
public ActionCallback requestFocus(@NotNull final FocusCommand command, final boolean forced) {
assertDispatchThread();
if (isInternalMode) {
recordCommand(command, new Throwable(), forced);
}
final ActionCallback result = new ActionCallback();
myActivityMonitor.addActivity(FOCUS, ModalityState.any());
if (!forced) {
UIUtil.invokeAndWaitIfNeeded(new Runnable() {
@Override
public void run() {
if (!myFocusRequests.contains(command)) {
myFocusRequests.add(command);
}
}
});
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
resetUnforcedCommand(command);
_requestFocus(command, forced, result);
}
});
}
else {
_requestFocus(command, forced, result);
}
result.doWhenProcessed(new Runnable() {
@Override
public void run() {
restartIdleAlarm();
}
});
return result;
}
@NotNull
public List<FocusRequestInfo> getRequests() {
return myRequests;
}
public void recordFocusRequest(Component c, boolean forced) {
myRequests.add(new FocusRequestInfo(c, new Throwable(), forced));
if (myRequests.size() > 200) {
myRequests.removeFirst();
}
}
private void recordCommand(@NotNull FocusCommand command, @NotNull Throwable trace, boolean forced) {
if (FocusTracesAction.isActive()) {
recordFocusRequest(command.getDominationComponent(), forced);
}
}
private void _requestFocus(@NotNull final FocusCommand command, final boolean forced, @NotNull final ActionCallback result) {
result.doWhenProcessed(new Runnable() {
@Override
public void run() {
maybeRemoveFocusActivity();
}
});
if (checkForRejectOrByPass(command, forced, result)) return;
setCommand(command);
command.setCallback(result);
if (forced) {
myForcedFocusRequestsAlarm.cancelAllRequests();
setLastEffectiveForcedRequest(command);
}
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
if (checkForRejectOrByPass(command, forced, result)) return;
if (myRequestFocusCmd == command) {
final TimedOutCallback focusTimeout =
new TimedOutCallback(Registry.intValue("actionSystem.commandProcessingTimeout"),
"Focus command timed out, cmd=" + command, command.getAllocation(), true) {
@Override
protected void onTimeout() {
forceFinishFocusSettleDown(command, result);
}
};
if (command.invalidatesRequestors()) {
myCmdTimestamp++;
}
revalidateFurtherRequestors();
if (forced) {
if (command.invalidatesRequestors()) {
myForcedCmdTimestamp++;
}
revalidateFurtherRequestors();
}
command.setForced(forced);
command.run().doWhenDone(new Runnable() {
@Override
public void run() {
UIUtil.invokeLaterIfNeeded(new Runnable() {
@Override
public void run() {
resetCommand(command, false);
result.setDone();
}
});
}
}).doWhenRejected(new Runnable() {
@Override
public void run() {
result.setRejected();
resetCommand(command, true);
}
}).doWhenProcessed(new Runnable() {
@Override
public void run() {
if (forced) {
myForcedFocusRequestsAlarm.addRequest(new SetLastEffectiveRunnable(), 250);
}
}
}).notify(focusTimeout);
}
else {
rejectCommand(command, result);
}
}
});
}
private void maybeRemoveFocusActivity() {
if (isFocusTransferReady()) {
myActivityMonitor.removeActivity(FOCUS);
}
}
private boolean checkForRejectOrByPass(@NotNull FocusCommand cmd, final boolean forced, @NotNull ActionCallback result) {
if (cmd.isExpired()) {
rejectCommand(cmd, result);
return true;
}
final FocusCommand lastRequest = getLastEffectiveForcedRequest();
if (!forced && !isUnforcedRequestAllowed()) {
if (cmd.equals(lastRequest)) {
resetCommand(cmd, false);
result.setDone();
}
else {
rejectCommand(cmd, result);
}
return true;
}
if (lastRequest != null && lastRequest.dominatesOver(cmd)) {
rejectCommand(cmd, result);
return true;
}
if (!Registry.is("focus.fix.lost.cursor")) {
boolean doNotExecuteBecauseAppIsInactive =
!myApp.isActive() && !canExecuteOnInactiveApplication(cmd) && Registry.is("actionSystem.suspendFocusTransferIfApplicationInactive");
if (doNotExecuteBecauseAppIsInactive) {
if (myCallbackOnActivation != null) {
myCallbackOnActivation.setRejected();
if (myFocusCommandOnAppActivation != null) {
resetCommand(myFocusCommandOnAppActivation, true);
}
}
myFocusCommandOnAppActivation = cmd;
myCallbackOnActivation = result;
return true;
}
}
return false;
}
private void setCommand(@NotNull final FocusCommand command) {
myRequestFocusCmd = command;
UIUtil.invokeAndWaitIfNeeded(new Runnable() {
@Override
public void run() {
if (!myFocusRequests.contains(command)) {
myFocusRequests.add(command);
}
}
});
}
private void resetCommand(@NotNull final FocusCommand cmd, boolean reject) {
assertDispatchThread();
if (cmd == myRequestFocusCmd) {
myRequestFocusCmd = null;
}
final KeyEventProcessor processor = cmd.getProcessor();
if (processor != null) {
processor.finish(myKeyProcessorContext);
}
UIUtil.invokeAndWaitIfNeeded(new Runnable() {
@Override
public void run() {
myFocusRequests.remove(cmd);
}
});
if (reject) {
ActionCallback cb = cmd.getCallback();
if (cb != null && !cb.isProcessed()) {
cmd.getCallback().setRejected();
}
}
}
private void resetUnforcedCommand(@NotNull final FocusCommand cmd) {
UIUtil.invokeAndWaitIfNeeded(new Runnable() {
@Override
public void run() {
myFocusRequests.remove(cmd);
}
});
}
private static boolean canExecuteOnInactiveApplication(@NotNull FocusCommand cmd) {
return cmd.canExecuteOnInactiveApp();
}
private void setLastEffectiveForcedRequest(@Nullable FocusCommand command) {
myLastForcedRequest = command == null ? null : new WeakReference<FocusCommand>(command);
myModalityStateForLastForcedRequest = getCurrentModalityCount();
}
@Nullable
private FocusCommand getLastEffectiveForcedRequest() {
final FocusCommand request = SoftReference.dereference(myLastForcedRequest);
return request != null && !request.isExpired() ? request : null;
}
boolean isUnforcedRequestAllowed() {
if (getLastEffectiveForcedRequest() == null) return true;
return myModalityStateForLastForcedRequest != getCurrentModalityCount();
}
public static FocusManagerImpl getInstance() {
return (FocusManagerImpl)ApplicationManager.getApplication().getComponent(IdeFocusManager.class);
}
@Override
public void dispose() {
myForcedFocusRequestsAlarm.cancelAllRequests();
myFocusedComponentAlarm.cancelAllRequests();
}
private class KeyProcessorContext implements KeyEventProcessor.Context {
@Override
@NotNull
public List<KeyEvent> getQueue() {
return myToDispatchOnDone;
}
@Override
public void dispatch(@NotNull final List<KeyEvent> events) {
doWhenFocusSettlesDown(new Runnable() {
@Override
public void run() {
myToDispatchOnDone.addAll(events);
restartIdleAlarm();
}
});
}
}
@Override
public void doWhenFocusSettlesDown(@NotNull ExpirableRunnable runnable) {
doWhenFocusSettlesDown((Runnable)runnable);
}
@Override
public void doWhenFocusSettlesDown(@NotNull final Runnable runnable) {
UIUtil.invokeLaterIfNeeded(new Runnable() {
@Override
public void run() {
if (isFlushingIdleRequests()) {
myIdleRequests.add(runnable);
return;
}
if (myRunContext != null) {
flushRequest(runnable);
return;
}
final boolean needsRestart = isIdleQueueEmpty();
if (myIdleRequests.contains(runnable)) {
myIdleRequests.remove(runnable);
myIdleRequests.add(runnable);
} else {
myIdleRequests.add(runnable);
}
if (canFlushIdleRequests()) {
flushIdleRequests();
}
else {
if (needsRestart) {
restartIdleAlarm();
}
}
}
});
}
private void restartIdleAlarm() {
if (!ApplicationManager.getApplication().isActive()) return;
myIdleAlarm.cancelAllRequests();
myIdleAlarm.addRequest(new IdleRunnable(), Registry.intValue("actionSystem.focusIdleTimeout"));
}
private void flushIdleRequests() {
int currentModalityCount = getCurrentModalityCount();
try {
incFlushingRequests(1, currentModalityCount);
if (!isTypeaheadEnabled()) {
myToDispatchOnDone.clear();
myTypeAheadRequestors.clear();
}
if (!myToDispatchOnDone.isEmpty() && myTypeAheadRequestors.isEmpty()) {
final KeyEvent[] events = myToDispatchOnDone.toArray(new KeyEvent[myToDispatchOnDone.size()]);
IdeEventQueue.getInstance().getKeyEventDispatcher().resetState();
for (int eachIndex = 0; eachIndex < events.length; eachIndex++) {
if (!isFocusTransferReady()) {
break;
}
KeyEvent each = events[eachIndex];
Component owner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner();
if (owner == null) {
owner = JOptionPane.getRootFrame();
}
boolean metaKey =
each.getKeyCode() == KeyEvent.VK_ALT ||
each.getKeyCode() == KeyEvent.VK_CONTROL ||
each.getKeyCode() == KeyEvent.VK_SHIFT ||
each.getKeyCode() == KeyEvent.VK_META;
boolean toDispatch = false;
if (!metaKey && (each.getID() == KeyEvent.KEY_RELEASED || each.getID() == KeyEvent.KEY_TYPED)) {
for (int i = 0; i < eachIndex; i++) {
final KeyEvent prev = events[i];
if (prev == null) continue;
if (prev.getID() == KeyEvent.KEY_PRESSED) {
if (prev.getKeyCode() == each.getKeyCode() || prev.getKeyChar() == each.getKeyChar()) {
toDispatch = true;
events[i] = null;
break;
}
}
}
}
else {
toDispatch = true;
}
myToDispatchOnDone.remove(each);
if (!toDispatch) {
continue;
}
KeyEvent keyEvent = new KeyEvent(owner, each.getID(), each.getWhen(), each.getModifiersEx(), each.getKeyCode(), each.getKeyChar(),
each.getKeyLocation());
if (owner != null && SwingUtilities.getWindowAncestor(owner) != null) {
IdeEventQueue.getInstance().dispatchEvent(keyEvent);
}
else {
myQueue._dispatchEvent(keyEvent, true);
}
}
if (myToDispatchOnDone.isEmpty() && myTypeAheadRequestors.isEmpty()) {
myActivityMonitor.removeActivity(TYPEAHEAD);
}
}
if (!isFocusBeingTransferred()) {
boolean focusOk = getFocusOwner() != null;
if (!focusOk && !myFlushWasDelayedToFixFocus) {
IdeEventQueue.getInstance().fixStickyFocusedComponents(null);
myFlushWasDelayedToFixFocus = true;
}
else if (!focusOk) {
myFlushWasDelayedToFixFocus = false;
}
if (canFlushIdleRequests() && getFlushingIdleRequests() <= 1 && (focusOk || !myFlushWasDelayedToFixFocus)) {
myFlushWasDelayedToFixFocus = false;
flushNow();
}
}
}
finally {
incFlushingRequests(-1, currentModalityCount);
if (!isIdleQueueEmpty()) {
restartIdleAlarm();
}
maybeRemoveFocusActivity();
}
}
private boolean processFocusRevalidation() {
ExpirableRunnable revalidator = myFocusRevalidator;
myFocusRevalidator = null;
if (revalidator != null && !revalidator.isExpired()) {
revalidator.run();
return true;
}
return false;
}
private void flushNow() {
final Runnable[] all = myIdleRequests.toArray(new Runnable[myIdleRequests.size()]);
myIdleRequests.clear();
for (int i = 0; i < all.length; i++) {
flushRequest(all[i]);
if (isFocusBeingTransferred()) {
for (int j = i + 1; j < all.length; j++) {
myIdleRequests.add(all[j]);
}
break;
}
}
maybeRemoveFocusActivity();
}
private static void flushRequest(Runnable each) {
if (each == null) return;
if (each instanceof Expirable) {
if (!((Expirable)each).isExpired()) {
each.run();
}
} else {
each.run();
}
}
public boolean isFocusTransferReady() {
assertDispatchThread();
if (myRunContext != null) return true;
invalidateFocusRequestsQueue();
if (!myFocusRequests.isEmpty()) return false;
if (myQueue == null) return true;
return !myQueue.isSuspendMode() && !myQueue.hasFocusEventsPending();
}
private void invalidateFocusRequestsQueue() {
assertDispatchThread();
UIUtil.invokeAndWaitIfNeeded(new Runnable() {
@Override
public void run() {
if (myFocusRequests.isEmpty()) return;
FocusCommand[] requests = myFocusRequests.toArray(new FocusCommand[myFocusRequests.size()]);
boolean wasChanged = false;
for (FocusCommand each : requests) {
if (each.isExpired()) {
resetCommand(each, true);
wasChanged = true;
}
}
if (wasChanged && myFocusRequests.isEmpty()) {
restartIdleAlarm();
}
}
});
}
private boolean isIdleQueueEmpty() {
return isPendingKeyEventsRedispatched() && myIdleRequests.isEmpty();
}
private boolean isPendingKeyEventsRedispatched() {
return myToDispatchOnDone.isEmpty();
}
@Override
public boolean dispatch(@NotNull KeyEvent e) {
if (!isTypeaheadEnabled()) return false;
if (isFlushingIdleRequests()) return false;
assertDispatchThread();
if (!isFocusTransferReady() || !isPendingKeyEventsRedispatched() || !myTypeAheadRequestors.isEmpty()) {
for (FocusCommand each : myFocusRequests) {
final KeyEventProcessor processor = each.getProcessor();
if (processor != null) {
final Boolean result = processor.dispatch(e, myKeyProcessorContext);
if (result != null) {
if (result.booleanValue()) {
myActivityMonitor.addActivity(TYPEAHEAD, ModalityState.any());
return true;
}
return false;
}
}
}
myToDispatchOnDone.add(e);
myActivityMonitor.addActivity(TYPEAHEAD, ModalityState.any());
restartIdleAlarm();
return true;
}
return false;
}
@Override
public void setTypeaheadEnabled(boolean enabled) {
myTypeaheadEnabled = enabled;
}
private boolean isTypeaheadEnabled() {
return Registry.is("actionSystem.fixLostTyping") && myTypeaheadEnabled;
}
@Override
public void typeAheadUntil(@NotNull ActionCallback callback) {
if (!isTypeaheadEnabled()) return;
final long currentTime = System.currentTimeMillis();
final ActionCallback done;
if (!Registry.is("type.ahead.logging.enabled")) {
done = callback;
}
else {
final String id = new Exception().getStackTrace()[2].getClassName();
//LOG.setLevel(Level.ALL);
final SimpleDateFormat dateFormat = new SimpleDateFormat("dd MMM yyyy HH:ss:SSS", Locale.US);
LOG.info(dateFormat.format(System.currentTimeMillis()) + "\tStarted: " + id);
done = new ActionCallback();
callback.doWhenDone(new Runnable() {
@Override
public void run() {
done.setDone();
LOG.info(dateFormat.format(System.currentTimeMillis()) + "\tDone: " + id);
}
});
callback.doWhenRejected(new Runnable() {
@Override
public void run() {
done.setRejected();
LOG.info(dateFormat.format(System.currentTimeMillis()) + "\tRejected: " + id);
}
});
}
assertDispatchThread();
myTypeAheadRequestors.add(done);
done.notify(new TimedOutCallback(Registry.intValue("actionSystem.commandProcessingTimeout"),
"Typeahead request blocked",
new Exception() {
@Override
public String getMessage() {
return "Time: " + (System.currentTimeMillis() - currentTime);
}
},
true).doWhenProcessed(new Runnable() {
@Override
public void run() {
if (myTypeAheadRequestors.remove(done)) {
restartIdleAlarm();
}
}
}));
}
private boolean isFlushingIdleRequests() {
return getFlushingIdleRequests() > 0;
}
private int getFlushingIdleRequests() {
int currentModalityCount = getCurrentModalityCount();
return myModalityCount2FlushCount.get(currentModalityCount);
}
private void incFlushingRequests(int delta, final int currentModalityCount) {
if (myModalityCount2FlushCount.containsKey(currentModalityCount)) {
myModalityCount2FlushCount.adjustValue(currentModalityCount, delta);
}
else {
myModalityCount2FlushCount.put(currentModalityCount, delta);
}
}
private int getCurrentModalityCount() {
int modalityCount = 0;
Window[] windows = Window.getWindows();
for (Window each : windows) {
if (!each.isShowing()) continue;
if (each instanceof Dialog) {
Dialog eachDialog = (Dialog)each;
if (eachDialog.isModal()) {
modalityCount++;
}
else if (each instanceof JDialog) {
if (isModalContextPopup(((JDialog)each).getRootPane())) {
modalityCount++;
}
}
}
else if (each instanceof JWindow) {
JRootPane rootPane = ((JWindow)each).getRootPane();
if (isModalContextPopup(rootPane)) {
modalityCount++;
}
}
}
final int finalModalityCount = modalityCount;
myModalityCount2FlushCount.retainEntries(new TIntIntProcedure() {
@Override
public boolean execute(int eachModalityCount, int flushCount) {
return eachModalityCount <= finalModalityCount;
}
});
return modalityCount;
}
private static boolean isModalContextPopup(@NotNull JRootPane rootPane) {
final JBPopup popup = (JBPopup)rootPane.getClientProperty(JBPopup.KEY);
return popup != null && popup.isModalContext();
}
@NotNull
@Override
public Expirable getTimestamp(final boolean trackOnlyForcedCommands) {
assertDispatchThread();
return new Expirable() {
long myOwnStamp = trackOnlyForcedCommands ? myForcedCmdTimestamp : myCmdTimestamp;
@Override
public boolean isExpired() {
return myOwnStamp < (trackOnlyForcedCommands ? myForcedCmdTimestamp : myCmdTimestamp);
}
};
}
@NotNull
@Override
public FocusRequestor getFurtherRequestor() {
assertDispatchThread();
FurtherRequestor requestor = new FurtherRequestor(this, getTimestamp(true));
myValidFurtherRequestors.add(requestor);
revalidateFurtherRequestors();
return requestor;
}
private void revalidateFurtherRequestors() {
Iterator<FurtherRequestor> requestorIterator = myValidFurtherRequestors.iterator();
while (requestorIterator.hasNext()) {
FurtherRequestor each = requestorIterator.next();
if (each.isExpired()) {
requestorIterator.remove();
Disposer.dispose(each);
}
}
}
@Override
public void revalidateFocus(@NotNull final ExpirableRunnable runnable) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
myFocusRevalidator = runnable;
restartIdleAlarm();
}
});
}
@Override
public Component getFocusOwner() {
assertDispatchThread();
Component result = null;
if (!ApplicationManager.getApplication().isActive()) {
result = myLastFocusedAtDeactivation.get(getLastFocusedFrame());
}
else if (myRunContext != null) {
result = (Component)myRunContext.getData(PlatformDataKeys.CONTEXT_COMPONENT.getName());
}
if (result == null) {
result = isFocusBeingTransferred() ? null : KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner();
}
final boolean meaninglessOwner = UIUtil.isMeaninglessFocusOwner(result);
if (result == null && !isFocusBeingTransferred() || meaninglessOwner) {
final Component permOwner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getPermanentFocusOwner();
if (permOwner != null) {
result = permOwner;
}
if (UIUtil.isMeaninglessFocusOwner(result)) {
result = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow();
}
}
return result;
}
@Override
public void runOnOwnContext(@NotNull DataContext context, @NotNull Runnable runnable) {
assertDispatchThread();
myRunContext = context;
try {
runnable.run();
}
finally {
myRunContext = null;
}
}
@Override
public Component getLastFocusedFor(IdeFrame frame) {
assertDispatchThread();
return myLastFocused.get(frame);
}
public void setLastFocusedAtDeactivation(@NotNull IdeFrame frame, @NotNull Component c) {
myLastFocusedAtDeactivation.put(frame, c);
}
@Override
public void toFront(JComponent c) {
assertDispatchThread();
if (c == null) return;
final Window window = UIUtil.getParentOfType(Window.class, c);
if (window != null && window.isShowing()) {
doWhenFocusSettlesDown(new Runnable() {
@Override
public void run() {
if (ApplicationManager.getApplication().isActive()) {
if (window instanceof JFrame && ((JFrame)window).getState() == Frame.ICONIFIED) {
((JFrame)window).setState(Frame.NORMAL);
} else {
window.toFront();
}
}
}
});
}
}
private static class FurtherRequestor implements FocusRequestor {
private final IdeFocusManager myManager;
private final Expirable myExpirable;
private Throwable myAllocation;
private boolean myDisposed;
private FurtherRequestor(@NotNull IdeFocusManager manager, @NotNull Expirable expirable) {
myManager = manager;
myExpirable = expirable;
if (Registry.is("ide.debugMode")) {
myAllocation = new Exception();
}
}
@NotNull
@Override
public ActionCallback requestFocus(@NotNull Component c, boolean forced) {
final ActionCallback result = isExpired() ? ActionCallback.REJECTED : myManager.requestFocus(c, forced);
result.doWhenProcessed(new Runnable() {
@Override
public void run() {
Disposer.dispose(FurtherRequestor.this);
}
});
return result;
}
private boolean isExpired() {
return myExpirable.isExpired() || myDisposed;
}
@NotNull
@Override
public ActionCallback requestFocus(@NotNull FocusCommand command, boolean forced) {
return isExpired() ? ActionCallback.REJECTED : myManager.requestFocus(command, forced);
}
@Override
public void dispose() {
myDisposed = true;
}
}
class EdtAlarm {
private final Set<EdtRunnable> myRequests = new HashSet<EdtRunnable>();
public void cancelAllRequests() {
for (EdtRunnable each : myRequests) {
each.expire();
}
myRequests.clear();
}
public void addRequest(@NotNull EdtRunnable runnable, int delay) {
myRequests.add(runnable);
myTimer.setUp(runnable, delay);
}
}
private void forceFinishFocusSettleDown(@NotNull FocusCommand cmd, @NotNull ActionCallback cmdCallback) {
rejectCommand(cmd, cmdCallback);
}
private void rejectCommand(@NotNull FocusCommand cmd, @NotNull ActionCallback callback) {
resetCommand(cmd, true);
resetUnforcedCommand(cmd);
callback.setRejected();
}
private class AppListener extends ApplicationActivationListener.Adapter {
@Override
public void applicationActivated(final IdeFrame ideFrame) {
final FocusCommand cmd = myFocusCommandOnAppActivation;
ActionCallback callback = myCallbackOnActivation;
myFocusCommandOnAppActivation = null;
myCallbackOnActivation = null;
if (cmd != null) {
requestFocus(cmd, true).notify(callback);
} else {
focusLastFocusedComponent(ideFrame);
}
}
@Override
public void delayedApplicationDeactivated(IdeFrame ideFrame) {
final Component owner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner();
Component parent = UIUtil.findUltimateParent(owner);
if (parent == ideFrame) {
myLastFocusedAtDeactivation.put(ideFrame, owner);
}
}
private void focusLastFocusedComponent(IdeFrame ideFrame) {
final KeyboardFocusManager mgr = KeyboardFocusManager.getCurrentKeyboardFocusManager();
if (mgr.getFocusOwner() == null) {
Component c = getComponent(myLastFocusedAtDeactivation, ideFrame);
if (c == null || !c.isShowing()) {
c = getComponent(myLastFocusedAtDeactivation, ideFrame);
}
final boolean mouseEventAhead = IdeEventQueue.isMouseEventAhead(null);
if (c != null && c.isShowing() && !mouseEventAhead) {
final LayoutFocusTraversalPolicyExt policy = LayoutFocusTraversalPolicyExt.findWindowPolicy(c);
if (policy != null) {
policy.setNoDefaultComponent(true, FocusManagerImpl.this);
}
requestFocus(c, false).doWhenProcessed(new Runnable() {
@Override
public void run() {
if (policy != null) {
policy.setNoDefaultComponent(false, FocusManagerImpl.this);
}
}
});
}
}
myLastFocusedAtDeactivation.remove(ideFrame);
}
}
@Nullable
private static Component getComponent(@NotNull Map<IdeFrame, Component> map, IdeFrame frame) {
return map.get(frame);
}
@Override
public JComponent getFocusTargetFor(@NotNull JComponent comp) {
return IdeFocusTraversalPolicy.getPreferredFocusedComponent(comp);
}
@Override
public Component getFocusedDescendantFor(Component comp) {
final Component focused = getFocusOwner();
if (focused == null) return null;
if (focused == comp || SwingUtilities.isDescendingFrom(focused, comp)) return focused;
List<JBPopup> popups = FocusTrackback.getChildPopups(comp);
for (JBPopup each : popups) {
if (each.isFocused()) return focused;
}
return null;
}
@Override
public boolean isFocusBeingTransferred() {
return !isFocusTransferReady();
}
@NotNull
@Override
public ActionCallback requestDefaultFocus(boolean forced) {
Component toFocus = null;
if (myLastFocusedFrame != null) {
toFocus = myLastFocused.get(myLastFocusedFrame);
if (toFocus == null || !toFocus.isShowing()) {
toFocus = getFocusTargetFor(myLastFocusedFrame.getComponent());
}
}
else {
Window[] windows = Window.getWindows();
for (Window each : windows) {
if (each.isActive()) {
if (each instanceof JFrame) {
toFocus = getFocusTargetFor(((JFrame)each).getRootPane());
break;
} else if (each instanceof JDialog) {
toFocus = getFocusTargetFor(((JDialog)each).getRootPane());
break;
} else if (each instanceof JWindow) {
toFocus = getFocusTargetFor(((JWindow)each).getRootPane());
break;
}
}
}
}
if (toFocus != null) {
return requestFocus(new FocusCommand.ByComponent(toFocus, new Exception()).setToInvalidateRequestors(false), forced);
}
return ActionCallback.DONE;
}
@Override
public boolean isFocusTransferEnabled() {
if (Registry.is("focus.fix.lost.cursor")) return true;
return myApp.isActive() || !Registry.is("actionSystem.suspendFocusTransferIfApplicationInactive");
}
private static void assertDispatchThread() {
if (Registry.is("actionSystem.assertFocusAccessFromEdt")) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
}
private class SetLastEffectiveRunnable extends EdtRunnable {
@Override
public void runEdt() {
setLastEffectiveForcedRequest(null);
}
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2015 the original author or authors.
*/
package org.assertj.core.internal;
import static org.assertj.core.error.ShouldBeBetween.shouldBeBetween;
import static org.assertj.core.error.ShouldBeEqual.shouldBeEqual;
import static org.assertj.core.error.ShouldBeGreater.shouldBeGreater;
import static org.assertj.core.error.ShouldBeGreaterOrEqual.shouldBeGreaterOrEqual;
import static org.assertj.core.error.ShouldBeLess.shouldBeLess;
import static org.assertj.core.error.ShouldBeLessOrEqual.shouldBeLessOrEqual;
import static org.assertj.core.error.ShouldNotBeEqual.shouldNotBeEqual;
import java.util.Comparator;
import org.assertj.core.api.AssertionInfo;
import org.assertj.core.util.VisibleForTesting;
/**
* Reusable assertions for <code>{@link Comparable}</code>s.
*
* @author Alex Ruiz
* @author Joel Costigliola
*/
public class Comparables {
private static final Comparables INSTANCE = new Comparables();
/**
* Returns the singleton instance of this class based on {@link StandardComparisonStrategy}.
*
* @return the singleton instance of this class based on {@link StandardComparisonStrategy}.
*/
public static Comparables instance() {
return INSTANCE;
}
@VisibleForTesting
Failures failures = Failures.instance();
final ComparisonStrategy comparisonStrategy;
@VisibleForTesting
Comparables() {
this(StandardComparisonStrategy.instance());
}
public Comparables(ComparisonStrategy comparisonStrategy) {
this.comparisonStrategy = comparisonStrategy;
}
@VisibleForTesting
public Comparator<?> getComparator() {
if (comparisonStrategy instanceof ComparatorBasedComparisonStrategy) {
return ((ComparatorBasedComparisonStrategy) comparisonStrategy).getComparator();
}
return null;
}
@VisibleForTesting
void setFailures(Failures failures) {
this.failures = failures;
}
@VisibleForTesting
void resetFailures() {
this.failures = Failures.instance();
}
/**
* Asserts that two T instances are equal.
*
* @param info contains information about the assertion.
* @param actual the actual value.
* @param expected the expected value.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is not equal to the expected one. This method will throw a
* {@code org.junit.ComparisonFailure} instead if JUnit is in the classpath and the expected and actual
* values are not equal.
*/
public <T> void assertEqual(AssertionInfo info, T actual, T expected) {
assertNotNull(info, actual);
if (areEqual(actual, expected))
return;
throw failures.failure(info, shouldBeEqual(actual, expected, comparisonStrategy, info.representation()));
}
protected <T> boolean areEqual(T actual, T expected) {
return comparisonStrategy.areEqual(actual, expected);
}
/**
* Asserts that two T instances are not equal.
*
* @param info contains information about the assertion.
* @param actual the actual value.
* @param other the value to compare the actual value to.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is equal to the other one.
*/
public <T> void assertNotEqual(AssertionInfo info, T actual, T other) {
assertNotNull(info, actual);
if (!areEqual(actual, other))
return;
throw failures.failure(info, shouldNotBeEqual(actual, other, comparisonStrategy));
}
/**
* Asserts that two <code>{@link Comparable}</code>s are equal by invoking
* <code>{@link Comparable#compareTo(Object)}</code>.<br>
* Note that it does not rely on the custom {@link #comparisonStrategy} if one has been set.
*
* @param <T> used to guarantee that two objects of the same type are being compared against each other.
* @param info contains information about the assertion.
* @param actual the actual value.
* @param expected the expected value.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is not equal to the expected one. This method will throw a
* {@code org.junit.ComparisonFailure} instead if JUnit is in the classpath and the expected and actual
* values are not equal.
*/
public <T extends Comparable<? super T>> void assertEqualByComparison(AssertionInfo info, T actual, T expected) {
assertNotNull(info, actual);
// we don't delegate to comparisonStrategy, as this assertion makes it clear it relies on Comparable
if (actual.compareTo(expected) == 0)
return;
throw failures.failure(info, shouldBeEqual(actual, expected, info.representation()));
}
/**
* Asserts that two <code>{@link Comparable}</code>s are not equal by invoking
* <code>{@link Comparable#compareTo(Object)}</code> .<br>
* Note that it does not rely on the custom {@link #comparisonStrategy} if one has been set.
*
* @param <T> used to guarantee that two objects of the same type are being compared against each other.
* @param info contains information about the assertion.
* @param actual the actual value.
* @param other the value to compare the actual value to.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is equal to the other one.
*/
public <T extends Comparable<? super T>> void assertNotEqualByComparison(AssertionInfo info, T actual, T other) {
assertNotNull(info, actual);
// we don't delagate to comparisonStrategy, as this assertion makes it clear it relies on Comparable
if (actual.compareTo(other) != 0)
return;
throw failures.failure(info, shouldNotBeEqual(actual, other));
}
/**
* Asserts that the actual value is less than the other one.
*
* @param <T> used to guarantee that two objects of the same type are being compared against each other.
* @param info contains information about the assertion.
* @param actual the actual value.
* @param other the value to compare the actual value to.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is not less than the other one: this assertion will fail if the actual
* value is equal to or greater than the other value.
*/
public <T extends Comparable<? super T>> void assertLessThan(AssertionInfo info, T actual, T other) {
assertNotNull(info, actual);
if (isLessThan(actual, other))
return;
throw failures.failure(info, shouldBeLess(actual, other, comparisonStrategy));
}
/**
* Asserts that the actual value is less than or equal to the other one.
*
* @param <T> used to guarantee that two objects of the same type are being compared against each other.
* @param info contains information about the assertion.
* @param actual the actual value.
* @param other the value to compare the actual value to.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is greater than the other one.
*/
public <T extends Comparable<? super T>> void assertLessThanOrEqualTo(AssertionInfo info, T actual, T other) {
assertNotNull(info, actual);
if (!isGreaterThan(actual, other))
return;
throw failures.failure(info, shouldBeLessOrEqual(actual, other, comparisonStrategy));
}
/**
* Asserts that the actual value is greater than the other one.
*
* @param <T> used to guarantee that two objects of the same type are being compared against each other.
* @param info contains information about the assertion.
* @param actual the actual value.
* @param other the value to compare the actual value to.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is not greater than the other one: this assertion will fail if the
* actual value is equal to or less than the other value.
*/
public <T extends Comparable<? super T>> void assertGreaterThan(AssertionInfo info, T actual, T other) {
assertNotNull(info, actual);
if (isGreaterThan(actual, other))
return;
throw failures.failure(info, shouldBeGreater(actual, other, comparisonStrategy));
}
/**
* delegates to {@link #comparisonStrategy#isGreaterThan(Object, Object)}
*/
private boolean isGreaterThan(Object actual, Object other) {
return comparisonStrategy.isGreaterThan(actual, other);
}
/**
* Asserts that the actual value is greater than or equal to the other one.
*
* @param <T> used to guarantee that two objects of the same type are being compared against each other.
* @param info contains information about the assertion.
* @param actual the actual value.
* @param other the value to compare the actual value to.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is less than the other one.
*/
public <T extends Comparable<? super T>> void assertGreaterThanOrEqualTo(AssertionInfo info, T actual, T other) {
assertNotNull(info, actual);
if (!isLessThan(actual, other))
return;
throw failures.failure(info, shouldBeGreaterOrEqual(actual, other, comparisonStrategy));
}
private boolean isLessThan(Object actual, Object other) {
return comparisonStrategy.isLessThan(actual, other);
}
protected static <T> void assertNotNull(AssertionInfo info, T actual) {
Objects.instance().assertNotNull(info, actual);
}
/**
* Asserts that the actual value is between start and end, inclusive or not.
*
* @param <T> used to guarantee that two objects of the same type are being compared against each other.
* @param info contains information about the assertion.
* @param actual the actual value.
* @param start the start value.
* @param end the end value.
* @param inclusiveStart if start is inclusive (fail is actual == start and inclusiveStart is false).
* @param inclusiveEnd if end is inclusive (fail is actual == end and inclusiveEnd is false).
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is not between start and end.
* @throws NullPointerException if start value is {@code null}.
* @throws NullPointerException if end value is {@code null}.
*/
public <T extends Comparable<? super T>> void assertIsBetween(AssertionInfo info, T actual, T start, T end,
boolean inclusiveStart, boolean inclusiveEnd) {
assertNotNull(info, actual);
startParameterIsNotNull(start);
endParameterIsNotNull(end);
boolean checkLowerBoundaryRange = inclusiveStart ? !isGreaterThan(start, actual)
: isLessThan(start, actual);
boolean checkUpperBoundaryRange = inclusiveEnd ? !isGreaterThan(actual, end)
: isLessThan(actual, end);
if (checkLowerBoundaryRange && checkUpperBoundaryRange)
return;
throw failures.failure(info, shouldBeBetween(actual, start, end, inclusiveStart, inclusiveEnd, comparisonStrategy));
}
/**
* used to check that the start of range to compare actual number to is not null, in that case throws a
* {@link NullPointerException} with an explicit message
*
* @param start the start number to check
* @throws NullPointerException with an explicit message if the given start value is null
*/
private static void startParameterIsNotNull(Object start) {
if (start == null)
throw new NullPointerException("The start range to compare actual with should not be null");
}
/**
* used to check that the end of range to compare actual number to is not null, in that case throws a
* {@link NullPointerException} with an explicit message
*
* @param end the end number to check
* @throws NullPointerException with an explicit message if the given end value is null
*/
private static void endParameterIsNotNull(Object end) {
if (end == null)
throw new NullPointerException("The end range to compare actual with should not be null");
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.integrationtests;
import org.drools.core.InitialFact;
import org.drools.core.base.ClassObjectType;
import org.drools.core.common.BaseNode;
import org.drools.core.common.NetworkNode;
import org.drools.core.common.RuleBasePartitionId;
import org.drools.core.impl.InternalKnowledgeBase;
import org.drools.core.reteoo.BetaNode;
import org.drools.core.reteoo.CompositePartitionAwareObjectSinkAdapter;
import org.drools.core.reteoo.EntryPointNode;
import org.drools.core.reteoo.LeftTupleSource;
import org.drools.core.reteoo.ObjectSink;
import org.drools.core.reteoo.ObjectSinkPropagator;
import org.drools.core.reteoo.ObjectSource;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.reteoo.Rete;
import org.drools.core.reteoo.Sink;
import org.drools.core.reteoo.TerminalNode;
import org.junit.Test;
import org.kie.api.io.ResourceType;
import org.kie.internal.conf.MultithreadEvaluationOption;
import org.kie.internal.utils.KieHelper;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
public class NodesPartitioningTest {
@Test
public void test2Partitions() {
String drl = ruleA(1) + ruleB(2) + ruleC(2) + ruleD(1) +
ruleD(2) + ruleC(1) + ruleA(2) + ruleB(1);
checkDrl( drl );
}
@Test
public void testPartitioningWithSharedNodes() {
StringBuilder sb = new StringBuilder( 400 );
for (int i = 1; i < 4; i++) {
sb.append( getRule( i ) );
}
for (int i = 1; i < 4; i++) {
sb.append( getNotRule( i ) );
}
checkDrl( sb.toString() );
}
private void checkDrl(String drl) {
InternalKnowledgeBase kbase = (InternalKnowledgeBase) new KieHelper().addContent( drl, ResourceType.DRL )
.build( MultithreadEvaluationOption.YES );
Rete rete = kbase.getRete();
for (EntryPointNode entryPointNode : rete.getEntryPointNodes().values()) {
traverse( entryPointNode );
}
}
private void traverse(BaseNode node ) {
checkNode(node);
Sink[] sinks = node.getSinks();
if (sinks != null) {
for (Sink sink : sinks) {
if (sink instanceof BaseNode) {
traverse((BaseNode)sink);
}
}
}
}
private void checkNode(NetworkNode node) {
if (node instanceof EntryPointNode) {
assertSame( RuleBasePartitionId.MAIN_PARTITION, node.getPartitionId() );
} else if (node instanceof ObjectTypeNode) {
assertSame( RuleBasePartitionId.MAIN_PARTITION, node.getPartitionId() );
checkPartitionedSinks((ObjectTypeNode) node);
} else if (node instanceof ObjectSource ) {
ObjectSource source = ( (ObjectSource) node ).getParentObjectSource();
if ( !(source instanceof ObjectTypeNode) ) {
assertSame( source.getPartitionId(), node.getPartitionId() );
}
} else if (node instanceof BetaNode ) {
ObjectSource rightInput = ( (BetaNode) node ).getRightInput();
if ( !(rightInput instanceof ObjectTypeNode) ) {
assertSame( rightInput.getPartitionId(), node.getPartitionId() );
}
LeftTupleSource leftInput = ( (BetaNode) node ).getLeftTupleSource();
assertSame( leftInput.getPartitionId(), node.getPartitionId() );
} else if (node instanceof TerminalNode ) {
LeftTupleSource leftInput = ( (TerminalNode) node ).getLeftTupleSource();
assertSame( leftInput.getPartitionId(), node.getPartitionId() );
}
}
private void checkPartitionedSinks(ObjectTypeNode otn) {
if ( InitialFact.class.isAssignableFrom( ( (ClassObjectType) otn.getObjectType() ).getClassType() ) ) {
return;
}
ObjectSinkPropagator sinkPropagator = otn.getObjectSinkPropagator();
ObjectSinkPropagator[] propagators = sinkPropagator instanceof CompositePartitionAwareObjectSinkAdapter ?
((CompositePartitionAwareObjectSinkAdapter) sinkPropagator).getPartitionedPropagators() :
new ObjectSinkPropagator[] { sinkPropagator };
for (int i = 0; i < propagators.length; i++) {
for (ObjectSink sink : propagators[i].getSinks()) {
assertEquals( sink + " on " + sink.getPartitionId() + " is expcted to be on propagator " + i,
i, sink.getPartitionId().getId() % propagators.length );
}
}
}
private String ruleA(int i) {
return "rule Ra" + i + " when\n" +
" $i : Integer( this == " + i + " )\n" +
" $s : String( length == $i )\n" +
" Integer( this == $s.length )\n" +
"then end\n";
}
private String ruleB(int i) {
return "rule Rb" + i + " when\n" +
" $i : Integer( this == " + i + " )\n" +
" $s : String( this == $i.toString )\n" +
" Integer( this == $s.length )\n" +
"then end\n";
}
private String ruleC(int i) {
return "rule Rc" + i + " when\n" +
" $i : Integer( this == " + i + " )\n" +
" $s : String( length == $i )\n" +
" Integer( this == $i+1 )\n" +
"then end\n";
}
private String ruleD(int i) {
return "rule Rd" + i + " when\n" +
" $i : Integer( this == " + i + " )\n" +
" $s : String( length == $i )\n" +
"then end\n";
}
private String getRule(int i) {
return "rule R" + i + " when\n" +
" $i : Integer( this == " + i + " )" +
" String( this == $i.toString )\n" +
"then end\n";
}
private String getNotRule(int i) {
return "rule Rnot" + i + " when\n" +
" String( this == \"" + i + "\" )\n" +
" not Integer( this == " + i + " )" +
"then end\n";
}
public static class Account {
private final int number;
private final int uuid;
private final Customer owner;
public Account( int number, int uuid, Customer owner ) {
this.number = number;
this.uuid = uuid;
this.owner = owner;
}
public int getNumber() {
return number;
}
public int getUuid() {
return uuid;
}
public Customer getOwner() {
return owner;
}
}
public static class Customer {
private final int uuid;
public Customer( int uuid ) {
this.uuid = uuid;
}
public int getUuid() {
return uuid;
}
}
@Test
public void testChangePartitionOfAlphaSourceOfAlpha() {
// DROOLS-1487
String drl =
"import " + Account.class.getCanonicalName() + ";\n" +
"import " + Customer.class.getCanonicalName() + ";\n" +
"rule \"customerDoesNotHaveSpecifiedAccount_2\"\n" +
"when\n" +
" $account : Account (number == 1, uuid == \"customerDoesNotHaveSpecifiedAccount\")\n" +
" Customer (uuid == \"customerDoesNotHaveSpecifiedAccount\")\n" +
"then\n" +
"end\n" +
"\n" +
"rule \"customerDoesNotHaveSpecifiedAccount_1\"\n" +
"when\n" +
" $account : Account (number == 2, uuid == \"customerDoesNotHaveSpecifiedAccount\")\n" +
" Customer (uuid == \"customerDoesNotHaveSpecifiedAccount\")\n" +
"then\n" +
"end";
checkDrl( drl );
}
}
| |
package ca.uhn.fhir.util;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2017 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
import java.lang.ref.SoftReference;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
/**
* A utility class for parsing and formatting HTTP dates as used in cookies and
* other headers. This class handles dates as defined by RFC 2616 section
* 3.3.1 as well as some other common non-standard formats.
*
* @since 4.3
*/
public final class DateUtils {
/**
* Date format pattern used to parse HTTP date headers in RFC 1123 format.
*/
public static final String PATTERN_RFC1123 = "EEE, dd MMM yyyy HH:mm:ss zzz";
/**
* Date format pattern used to parse HTTP date headers in RFC 1036 format.
*/
public static final String PATTERN_RFC1036 = "EEE, dd-MMM-yy HH:mm:ss zzz";
/**
* Date format pattern used to parse HTTP date headers in ANSI C
* {@code asctime()} format.
*/
public static final String PATTERN_ASCTIME = "EEE MMM d HH:mm:ss yyyy";
private static final String[] DEFAULT_PATTERNS = new String[] {
PATTERN_RFC1123,
PATTERN_RFC1036,
PATTERN_ASCTIME
};
private static final Date DEFAULT_TWO_DIGIT_YEAR_START;
public static final TimeZone GMT = TimeZone.getTimeZone("GMT");
static {
final Calendar calendar = Calendar.getInstance();
calendar.setTimeZone(GMT);
calendar.set(2000, Calendar.JANUARY, 1, 0, 0, 0);
calendar.set(Calendar.MILLISECOND, 0);
DEFAULT_TWO_DIGIT_YEAR_START = calendar.getTime();
}
/**
* Parses a date value. The formats used for parsing the date value are retrieved from
* the default http params.
*
* @param dateValue the date value to parse
*
* @return the parsed date or null if input could not be parsed
*/
public static Date parseDate(final String dateValue) {
return parseDate(dateValue, null, null);
}
/**
* Parses the date value using the given date formats.
*
* @param dateValue the date value to parse
* @param dateFormats the date formats to use
*
* @return the parsed date or null if input could not be parsed
*/
public static Date parseDate(final String dateValue, final String[] dateFormats) {
return parseDate(dateValue, dateFormats, null);
}
/**
* Parses the date value using the given date formats.
*
* @param dateValue the date value to parse
* @param dateFormats the date formats to use
* @param startDate During parsing, two digit years will be placed in the range
* {@code startDate} to {@code startDate + 100 years}. This value may
* be {@code null}. When {@code null} is given as a parameter, year
* {@code 2000} will be used.
*
* @return the parsed date or null if input could not be parsed
*/
public static Date parseDate(
final String dateValue,
final String[] dateFormats,
final Date startDate) {
notNull(dateValue, "Date value");
final String[] localDateFormats = dateFormats != null ? dateFormats : DEFAULT_PATTERNS;
final Date localStartDate = startDate != null ? startDate : DEFAULT_TWO_DIGIT_YEAR_START;
String v = dateValue;
// trim single quotes around date if present
// see issue #5279
if (v.length() > 1 && v.startsWith("'") && v.endsWith("'")) {
v = v.substring (1, v.length() - 1);
}
for (final String dateFormat : localDateFormats) {
final SimpleDateFormat dateParser = DateFormatHolder.formatFor(dateFormat);
dateParser.set2DigitYearStart(localStartDate);
final ParsePosition pos = new ParsePosition(0);
final Date result = dateParser.parse(v, pos);
if (pos.getIndex() != 0) {
return result;
}
}
return null;
}
/**
* Formats the given date according to the RFC 1123 pattern.
*
* @param date The date to format.
* @return An RFC 1123 formatted date string.
*
* @see #PATTERN_RFC1123
*/
public static String formatDate(final Date date) {
return formatDate(date, PATTERN_RFC1123);
}
/**
* Formats the given date according to the specified pattern. The pattern
* must conform to that used by the {@link SimpleDateFormat simple date
* format} class.
*
* @param date The date to format.
* @param pattern The pattern to use for formatting the date.
* @return A formatted date string.
*
* @throws IllegalArgumentException If the given date pattern is invalid.
*
* @see SimpleDateFormat
*/
public static String formatDate(final Date date, final String pattern) {
notNull(date, "Date");
notNull(pattern, "Pattern");
final SimpleDateFormat formatter = DateFormatHolder.formatFor(pattern);
return formatter.format(date);
}
public static <T> T notNull(final T argument, final String name) {
if (argument == null) {
throw new IllegalArgumentException(name + " may not be null");
}
return argument;
}
/**
* Clears thread-local variable containing {@link java.text.DateFormat} cache.
*
* @since 4.3
*/
public static void clearThreadLocal() {
DateFormatHolder.clearThreadLocal();
}
/** This class should not be instantiated. */
private DateUtils() {
}
/**
* A factory for {@link SimpleDateFormat}s. The instances are stored in a
* threadlocal way because SimpleDateFormat is not threadsafe as noted in
* {@link SimpleDateFormat its javadoc}.
*
*/
final static class DateFormatHolder {
private static final ThreadLocal<SoftReference<Map<String, SimpleDateFormat>>>
THREADLOCAL_FORMATS = new ThreadLocal<SoftReference<Map<String, SimpleDateFormat>>>() {
@Override
protected SoftReference<Map<String, SimpleDateFormat>> initialValue() {
return new SoftReference<Map<String, SimpleDateFormat>>(
new HashMap<String, SimpleDateFormat>());
}
};
/**
* creates a {@link SimpleDateFormat} for the requested format string.
*
* @param pattern
* a non-{@code null} format String according to
* {@link SimpleDateFormat}. The format is not checked against
* {@code null} since all paths go through
* {@link DateUtils}.
* @return the requested format. This simple dateformat should not be used
* to {@link SimpleDateFormat#applyPattern(String) apply} to a
* different pattern.
*/
public static SimpleDateFormat formatFor(final String pattern) {
final SoftReference<Map<String, SimpleDateFormat>> ref = THREADLOCAL_FORMATS.get();
Map<String, SimpleDateFormat> formats = ref.get();
if (formats == null) {
formats = new HashMap<String, SimpleDateFormat>();
THREADLOCAL_FORMATS.set(
new SoftReference<Map<String, SimpleDateFormat>>(formats));
}
SimpleDateFormat format = formats.get(pattern);
if (format == null) {
format = new SimpleDateFormat(pattern, Locale.US);
format.setTimeZone(TimeZone.getTimeZone("GMT"));
formats.put(pattern, format);
}
return format;
}
public static void clearThreadLocal() {
THREADLOCAL_FORMATS.remove();
}
}
}
| |
/*
* Copyright (C) 2013 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lizy.okhttp;
import com.lizy.okhttp.internal.http.HttpMethod;
import java.net.URL;
import java.util.List;
/**
* An HTTP request. Instances of this class are immutable if their {@link #body} is null or itself
* immutable.
*/
public final class Request {
private final HttpUrl url;
private final String method;
private final Headers headers;
private final RequestBody body;
private final Object tag;
// private volatile CacheControl cacheControl; // Lazily initialized.
private Request(Builder builder) {
this.url = builder.url;
this.method = builder.method;
this.headers = builder.headers.build();
this.body = builder.body;
this.tag = builder.tag != null ? builder.tag : this;
}
public HttpUrl url() {
return url;
}
public String method() {
return method;
}
public Headers headers() {
return headers;
}
public String header(String name) {
return headers.get(name);
}
public List<String> headers(String name) {
return headers.values(name);
}
public RequestBody body() {
return body;
}
public Object tag() {
return tag;
}
public Builder newBuilder() {
return new Builder(this);
}
/**
* Returns the cache control directives for this response. This is never null, even if this
* response contains no {@code Cache-Control} header.
*/
// public CacheControl cacheControl() {
// CacheControl result = cacheControl;
// return result != null ? result : (cacheControl = CacheControl.parse(headers));
// }
public boolean isHttps() {
return url.isHttps();
}
@Override public String toString() {
return "Request{method="
+ method
+ ", url="
+ url
+ ", tag="
+ (tag != this ? tag : null)
+ '}';
}
public static class Builder {
private HttpUrl url;
private String method;
private Headers.Builder headers;
private RequestBody body;
private Object tag;
public Builder() {
this.method = "GET";
this.headers = new Headers.Builder();
}
private Builder(Request request) {
this.url = request.url;
this.method = request.method;
this.body = request.body;
this.tag = request.tag;
this.headers = request.headers.newBuilder();
}
public Builder url(HttpUrl url) {
if (url == null) throw new NullPointerException("url == null");
this.url = url;
return this;
}
/**
* Sets the URL target of this request.
*
* @throws IllegalArgumentException if {@code url} is not a valid HTTP or HTTPS URL. Avoid this
* exception by calling {@link HttpUrl#parse}; it returns null for invalid URLs.
*/
public Builder url(String url) {
if (url == null) throw new NullPointerException("url == null");
// Silently replace websocket URLs with HTTP URLs.
if (url.regionMatches(true, 0, "ws:", 0, 3)) {
url = "http:" + url.substring(3);
} else if (url.regionMatches(true, 0, "wss:", 0, 4)) {
url = "https:" + url.substring(4);
}
HttpUrl parsed = HttpUrl.parse(url);
if (parsed == null) throw new IllegalArgumentException("unexpected url: " + url);
return url(parsed);
}
/**
* Sets the URL target of this request.
*
* @throws IllegalArgumentException if the scheme of {@code url} is not {@code http} or {@code
* https}.
*/
public Builder url(URL url) {
if (url == null) throw new NullPointerException("url == null");
HttpUrl parsed = HttpUrl.get(url);
if (parsed == null) throw new IllegalArgumentException("unexpected url: " + url);
return url(parsed);
}
/**
* Sets the header named {@code name} to {@code value}. If this request already has any headers
* with that name, they are all replaced.
*/
public Builder header(String name, String value) {
headers.set(name, value);
return this;
}
/**
* Adds a header with {@code name} and {@code value}. Prefer this method for multiply-valued
* headers like "Cookie".
*
* <p>Note that for some headers including {@code Content-Length} and {@code Content-Encoding},
* OkHttp may replace {@code value} with a header derived from the request body.
*/
public Builder addHeader(String name, String value) {
headers.add(name, value);
return this;
}
public Builder removeHeader(String name) {
headers.removeAll(name);
return this;
}
/** Removes all headers on this builder and adds {@code headers}. */
public Builder headers(Headers headers) {
this.headers = headers.newBuilder();
return this;
}
/**
* Sets this request's {@code Cache-Control} header, replacing any cache control headers already
* present. If {@code cacheControl} doesn't define any directives, this clears this request's
* cache-control headers.
*/
// public Builder cacheControl(CacheControl cacheControl) {
// String value = cacheControl.toString();
// if (value.isEmpty()) return removeHeader("Cache-Control");
// return header("Cache-Control", value);
// }
public Builder get() {
return method("GET", null);
}
public Builder head() {
return method("HEAD", null);
}
public Builder post(RequestBody body) {
return method("POST", body);
}
public Builder delete(RequestBody body) {
return method("DELETE", body);
}
public Builder delete() {
return delete(RequestBody.create(null, new byte[0]));
}
public Builder put(RequestBody body) {
return method("PUT", body);
}
public Builder patch(RequestBody body) {
return method("PATCH", body);
}
public Builder method(String method, RequestBody body) {
if (method == null) throw new NullPointerException("method == null");
if (method.length() == 0) throw new IllegalArgumentException("method.length() == 0");
if (body != null && !HttpMethod.permitsRequestBody(method)) {
throw new IllegalArgumentException("method " + method + " must not have a request body.");
}
if (body == null && HttpMethod.requiresRequestBody(method)) {
throw new IllegalArgumentException("method " + method + " must have a request body.");
}
this.method = method;
this.body = body;
return this;
}
/**
* Attaches {@code tag} to the request. It can be used later to cancel the request. If the tag
* is unspecified or null, the request is canceled by using the request itself as the tag.
*/
public Builder tag(Object tag) {
this.tag = tag;
return this;
}
public Request build() {
if (url == null) throw new IllegalStateException("url == null");
return new Request(this);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jms.reply;
import java.math.BigInteger;
import java.util.Random;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.Session;
import org.apache.camel.AsyncCallback;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.component.jms.DefaultSpringErrorHandler;
import org.apache.camel.component.jms.ReplyToType;
import org.springframework.jms.listener.AbstractMessageListenerContainer;
import org.springframework.jms.listener.DefaultMessageListenerContainer;
import org.springframework.jms.support.destination.DestinationResolver;
/**
* A {@link ReplyManager} when using persistent queues.
*
* @version
*/
public class PersistentQueueReplyManager extends ReplyManagerSupport {
private String replyToSelectorValue;
private MessageSelectorCreator dynamicMessageSelector;
public PersistentQueueReplyManager(CamelContext camelContext) {
super(camelContext);
}
public String registerReply(ReplyManager replyManager, Exchange exchange, AsyncCallback callback,
String originalCorrelationId, String correlationId, long requestTimeout) {
// add to correlation map
PersistentQueueReplyHandler handler = new PersistentQueueReplyHandler(replyManager, exchange, callback,
originalCorrelationId, correlationId, requestTimeout);
correlation.put(correlationId, handler, requestTimeout);
return correlationId;
}
public void updateCorrelationId(String correlationId, String newCorrelationId, long requestTimeout) {
log.trace("Updated provisional correlationId [{}] to expected correlationId [{}]", correlationId, newCorrelationId);
ReplyHandler handler = correlation.remove(correlationId);
if (handler == null) {
// should not happen that we can't find the handler
return;
}
correlation.put(newCorrelationId, handler, requestTimeout);
}
protected void handleReplyMessage(String correlationID, Message message) {
ReplyHandler handler = correlation.get(correlationID);
if (handler == null && endpoint.isUseMessageIDAsCorrelationID()) {
handler = waitForProvisionCorrelationToBeUpdated(correlationID, message);
}
if (handler != null) {
try {
handler.onReply(correlationID, message);
} finally {
correlation.remove(correlationID);
}
} else {
// we could not correlate the received reply message to a matching request and therefore
// we cannot continue routing the unknown message
// log a warn and then ignore the message
log.warn("Reply received for unknown correlationID [{}]. The message will be ignored: {}", correlationID, message);
}
}
public void setReplyToSelectorHeader(org.apache.camel.Message camelMessage, Message jmsMessage) throws JMSException {
String replyToSelectorName = endpoint.getReplyToDestinationSelectorName();
if (replyToSelectorName != null && replyToSelectorValue != null) {
camelMessage.setHeader(replyToSelectorName, replyToSelectorValue);
jmsMessage.setStringProperty(replyToSelectorName, replyToSelectorValue);
}
}
private final class DestinationResolverDelegate implements DestinationResolver {
private DestinationResolver delegate;
private Destination destination;
public DestinationResolverDelegate(DestinationResolver delegate) {
this.delegate = delegate;
}
public Destination resolveDestinationName(Session session, String destinationName,
boolean pubSubDomain) throws JMSException {
synchronized (PersistentQueueReplyManager.this) {
// resolve the reply to destination
if (destination == null) {
destination = delegate.resolveDestinationName(session, destinationName, pubSubDomain);
setReplyTo(destination);
}
}
return destination;
}
};
protected AbstractMessageListenerContainer createListenerContainer() throws Exception {
DefaultMessageListenerContainer answer;
ReplyToType type = endpoint.getConfiguration().getReplyToType();
if (type == null) {
// use shared by default for persistent reply queues
type = ReplyToType.Shared;
}
if (ReplyToType.Shared == type) {
// shared reply to queues support either a fixed or dynamic JMS message selector
String replyToSelectorName = endpoint.getReplyToDestinationSelectorName();
if (replyToSelectorName != null) {
// create a random selector value we will use for the persistent reply queue
replyToSelectorValue = "ID:" + new BigInteger(24 * 8, new Random()).toString(16);
String fixedMessageSelector = replyToSelectorName + "='" + replyToSelectorValue + "'";
answer = new SharedPersistentQueueMessageListenerContainer(fixedMessageSelector);
// must use cache level consumer for fixed message selector
answer.setCacheLevel(DefaultMessageListenerContainer.CACHE_CONSUMER);
log.debug("Using shared queue: " + endpoint.getReplyTo() + " with fixed message selector [" + fixedMessageSelector + "] as reply listener: " + answer);
} else {
// use a dynamic message selector which will select the message we want to receive as reply
dynamicMessageSelector = new MessageSelectorCreator(correlation);
answer = new SharedPersistentQueueMessageListenerContainer(dynamicMessageSelector);
// must use cache level session for dynamic message selector,
// as otherwise the dynamic message selector will not be updated on-the-fly
answer.setCacheLevel(DefaultMessageListenerContainer.CACHE_SESSION);
log.debug("Using shared queue: " + endpoint.getReplyTo() + " with dynamic message selector as reply listener: " + answer);
}
} else if (ReplyToType.Exclusive == type) {
answer = new ExclusivePersistentQueueMessageListenerContainer();
// must use cache level consumer for exclusive as there is no message selector
answer.setCacheLevel(DefaultMessageListenerContainer.CACHE_CONSUMER);
log.debug("Using exclusive queue:" + endpoint.getReplyTo() + " as reply listener: " + answer);
} else {
throw new IllegalArgumentException("ReplyToType " + type + " is not supported for persistent reply queues");
}
String replyToCacheLevelName = endpoint.getConfiguration().getReplyToCacheLevelName();
if (replyToCacheLevelName != null) {
answer.setCacheLevelName(replyToCacheLevelName);
log.debug("Setting the replyCacheLevel to be " + replyToCacheLevelName);
}
DestinationResolver resolver = endpoint.getDestinationResolver();
if (resolver == null) {
resolver = answer.getDestinationResolver();
}
answer.setDestinationResolver(new DestinationResolverDelegate(resolver));
answer.setDestinationName(endpoint.getReplyTo());
answer.setAutoStartup(true);
answer.setMessageListener(this);
answer.setPubSubDomain(false);
answer.setSubscriptionDurable(false);
answer.setConcurrentConsumers(1);
answer.setMaxConcurrentConsumers(1);
answer.setConnectionFactory(endpoint.getConnectionFactory());
String clientId = endpoint.getClientId();
if (clientId != null) {
clientId += ".CamelReplyManager";
answer.setClientId(clientId);
}
// we cannot do request-reply over JMS with transaction
answer.setSessionTransacted(false);
// other optional properties
if (endpoint.getExceptionListener() != null) {
answer.setExceptionListener(endpoint.getExceptionListener());
}
if (endpoint.getErrorHandler() != null) {
answer.setErrorHandler(endpoint.getErrorHandler());
} else {
answer.setErrorHandler(new DefaultSpringErrorHandler(PersistentQueueReplyManager.class, endpoint.getErrorHandlerLoggingLevel(), endpoint.isErrorHandlerLogStackTrace()));
}
if (endpoint.getReceiveTimeout() >= 0) {
answer.setReceiveTimeout(endpoint.getReceiveTimeout());
}
if (endpoint.getRecoveryInterval() >= 0) {
answer.setRecoveryInterval(endpoint.getRecoveryInterval());
}
// do not use a task executor for reply as we are are always a single threaded task
// setup a bean name which is used ny Spring JMS as the thread name
String name = "PersistentQueueReplyManager[" + answer.getDestinationName() + "]";
name = endpoint.getCamelContext().getExecutorServiceManager().resolveThreadName(name);
answer.setBeanName(name);
return answer;
}
}
| |
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.generator.engine.ast;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThrows;
import org.junit.Test;
public class ThrowExprTest {
@Test
public void createThrowExpr_basic() {
TypeNode npeType = TypeNode.withExceptionClazz(NullPointerException.class);
ThrowExpr.builder().setType(npeType).build();
// No exception thrown, we're good.
}
@Test
public void createThrowExpr_basicExpr() {
TypeNode npeType = TypeNode.withExceptionClazz(NullPointerException.class);
VariableExpr throwVarExpr =
VariableExpr.builder()
.setVariable(
Variable.builder()
.setName("e")
.setType(TypeNode.withExceptionClazz(RuntimeException.class))
.build())
.build();
ThrowExpr throwExpr = ThrowExpr.builder().setThrowExpr(throwVarExpr).build();
assertEquals(throwVarExpr.variable().type(), throwExpr.type());
// Setting the type doesn't matter.
throwExpr = ThrowExpr.builder().setThrowExpr(throwVarExpr).setType(npeType).build();
assertEquals(throwVarExpr.variable().type(), throwExpr.type());
}
@Test
public void createThrowExpr_basicWithStringMessage() {
TypeNode npeType = TypeNode.withExceptionClazz(NullPointerException.class);
ThrowExpr.builder().setType(npeType).setMessageExpr("Some message").build();
// No exception thrown, we're good.
}
@Test
public void createThrowExpr_messageExpr() {
TypeNode npeType = TypeNode.withExceptionClazz(NullPointerException.class);
Expr messageExpr =
MethodInvocationExpr.builder()
.setMethodName("foobar")
.setReturnType(TypeNode.STRING)
.build();
ThrowExpr.builder().setType(npeType).setMessageExpr(messageExpr).build();
// No exception thrown, we're good.
}
@Test
public void createThrowExpr_badExceptionType() {
TypeNode nonExceptionType = TypeNode.STRING;
assertThrows(
IllegalStateException.class, () -> ThrowExpr.builder().setType(nonExceptionType).build());
}
@Test
public void createThrowExpr_badMessageExpr() {
TypeNode npeType = TypeNode.withExceptionClazz(NullPointerException.class);
Expr messageExpr =
MethodInvocationExpr.builder().setMethodName("foobar").setReturnType(TypeNode.INT).build();
assertThrows(
IllegalStateException.class,
() -> ThrowExpr.builder().setType(npeType).setMessageExpr(messageExpr).build());
}
@Test
public void createThrowExpr_causeExpr() {
TypeNode npeType =
TypeNode.withReference(ConcreteReference.withClazz(NullPointerException.class));
ThrowExpr.builder()
.setType(npeType)
.setCauseExpr(
NewObjectExpr.builder()
.setType(TypeNode.withReference(ConcreteReference.withClazz(Throwable.class)))
.build())
.build();
// Successfully created a ThrowExpr.
}
@Test
public void createThrowExpr_causeExpr_throwableSubtype() {
TypeNode npeType =
TypeNode.withReference(ConcreteReference.withClazz(NullPointerException.class));
ThrowExpr.builder()
.setType(npeType)
.setCauseExpr(
NewObjectExpr.builder()
.setType(TypeNode.withExceptionClazz(IllegalStateException.class))
.build())
.build();
// Successfully created a ThrowExpr.
}
@Test
public void createThrowExpr_causeExpr_onThrowableSubtype() {
TypeNode npeType =
TypeNode.withReference(ConcreteReference.withClazz(NullPointerException.class));
assertThrows(
IllegalStateException.class,
() ->
ThrowExpr.builder()
.setType(npeType)
.setCauseExpr(NewObjectExpr.builder().setType(TypeNode.STRING).build())
.build());
}
@Test
public void createThrowExpr_messageAndCauseExpr() {
TypeNode npeType =
TypeNode.withReference(ConcreteReference.withClazz(NullPointerException.class));
Expr messageExpr =
MethodInvocationExpr.builder()
.setMethodName("foobar")
.setReturnType(TypeNode.STRING)
.build();
ThrowExpr.builder()
.setType(npeType)
.setMessageExpr(messageExpr)
.setCauseExpr(
NewObjectExpr.builder()
.setType(TypeNode.withReference(ConcreteReference.withClazz(Throwable.class)))
.build())
.build();
// Successfully created a ThrowExpr.
}
@Test
public void createThrowExpr_cannotThrowVariableDeclaration() {
VariableExpr throwVarExpr =
VariableExpr.builder()
.setVariable(
Variable.builder()
.setName("e")
.setType(TypeNode.withExceptionClazz(RuntimeException.class))
.build())
.build();
assertThrows(
IllegalStateException.class,
() ->
ThrowExpr.builder()
.setThrowExpr(throwVarExpr.toBuilder().setIsDecl(true).build())
.build());
}
@Test
public void createThrowExpr_cannotThrowNonExceptionTypedExpr() {
VariableExpr throwVarExpr =
VariableExpr.builder()
.setVariable(Variable.builder().setName("str").setType(TypeNode.STRING).build())
.build();
assertThrows(
IllegalStateException.class, () -> ThrowExpr.builder().setThrowExpr(throwVarExpr).build());
}
@Test
public void createThrowExpr_cannotHaveThrowVariableAndMessageExprPresent() {
Expr messageExpr =
MethodInvocationExpr.builder()
.setMethodName("foobar")
.setReturnType(TypeNode.STRING)
.build();
VariableExpr throwVarExpr =
VariableExpr.builder()
.setVariable(
Variable.builder()
.setName("e")
.setType(TypeNode.withExceptionClazz(RuntimeException.class))
.build())
.build();
assertThrows(
IllegalStateException.class,
() -> ThrowExpr.builder().setThrowExpr(throwVarExpr).setMessageExpr(messageExpr).build());
}
@Test
public void createThrowExpr_cannotHaveThrowVariableAndCauseExprPresent() {
VariableExpr throwVarExpr =
VariableExpr.builder()
.setVariable(
Variable.builder()
.setName("e")
.setType(TypeNode.withExceptionClazz(RuntimeException.class))
.build())
.build();
assertThrows(
IllegalStateException.class,
() ->
ThrowExpr.builder()
.setThrowExpr(throwVarExpr)
.setCauseExpr(
NewObjectExpr.builder()
.setType(
TypeNode.withReference(ConcreteReference.withClazz(Throwable.class)))
.build())
.build());
}
}
| |
package lj_3d.gearloadinglayout.gearViews;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.util.AttributeSet;
import android.view.View;
import lj_3d.gearloadinglayout.R;
import lj_3d.gearloadinglayout.enums.ShowMode;
import lj_3d.gearloadinglayout.enums.Style;
import lj_3d.gearloadinglayout.utils.DeviceScreenHelper;
import lj_3d.gearloadinglayout.utils.FastBlur;
/**
* Created by LJ on 23.03.2016.
*/
public class ThreeGearsLayout extends GearLoadingLayout {
public static final String IDENTIFIER = "ThreeGearsLayout";
private GearView mFirstGearView;
private GearView mSecondGearView;
private GearView mThirdGearView;
public ThreeGearsLayout(Context context) {
this(context, null);
}
public ThreeGearsLayout(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public ThreeGearsLayout(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
addChildView();
parseAttributes(attrs);
}
private void addChildView() {
final View childView = inflate(getContext(), R.layout.layout_three_gears, null);
initUI(childView);
addView(childView);
}
protected void initUI(View rootView) {
super.initUI(rootView);
mFirstGearView = (GearView) rootView.findViewById(R.id.gear_view);
mSecondGearView = (GearView) rootView.findViewById(R.id.gear_view_second);
mThirdGearView = (GearView) rootView.findViewById(R.id.gear_view_third);
}
public void start() {
mFirstGearView.startSpinning(false);
mSecondGearView.startSpinning(true);
mThirdGearView.startSpinning(false);
}
public void start(boolean reverseMode) {
if (reverseMode) {
mFirstGearView.startSpinning(true);
mSecondGearView.startSpinning(false);
mThirdGearView.startSpinning(true);
} else {
start();
}
}
public void stop() {
mFirstGearView.stopSpinning();
mSecondGearView.stopSpinning();
mThirdGearView.stopSpinning();
}
public void rotateByValue(float rotateOffset) {
mFirstGearView.rotateByValue(rotateOffset, false);
mSecondGearView.rotateByValue(rotateOffset, true);
mThirdGearView.rotateByValue(rotateOffset, false);
}
public ThreeGearsLayout setDuration(final int duration) {
mFirstGearView.setDuration(duration);
mSecondGearView.setDuration(duration);
mThirdGearView.setDuration(duration);
return this;
}
public ThreeGearsLayout setFirstGearColor(int color) {
mFirstGearView.setColor(color);
return this;
}
public ThreeGearsLayout setSecondGearColor(int color) {
mSecondGearView.setColor(color);
return this;
}
public ThreeGearsLayout setThirdGearColor(int color) {
mThirdGearView.setColor(color);
return this;
}
public ThreeGearsLayout setFirstGearInnerColor(int color) {
setFirstGearInnerColor(color, false);
return this;
}
public ThreeGearsLayout setSecondGearInnerColor(int color) {
setSecondGearInnerColor(color, false);
return this;
}
public ThreeGearsLayout setThirdGearInnerColor(int color) {
setThirdGearInnerColor(color, false);
return this;
}
public ThreeGearsLayout setShadowColor(int color) {
super.setShadowColor(color);
return this;
}
public ThreeGearsLayout setShadowWidth(int width) {
super.setShadowWidth(width);
return this;
}
private ThreeGearsLayout setFirstGearInnerColor(int color, boolean enableCuttedCenter) {
mFirstGearView.setInnerColor(color);
mFirstGearView.enableCuttedCenter(enableCuttedCenter);
return this;
}
private ThreeGearsLayout setSecondGearInnerColor(int color, boolean enableCuttedCenter) {
mSecondGearView.setInnerColor(color);
mSecondGearView.enableCuttedCenter(enableCuttedCenter);
return this;
}
private ThreeGearsLayout setThirdGearInnerColor(int color, boolean enableCuttedCenter) {
mThirdGearView.setInnerColor(color);
mThirdGearView.enableCuttedCenter(enableCuttedCenter);
return this;
}
public ThreeGearsLayout setStyle(final Style style) {
super.setStyle(style);
mDialogHeight = style == Style.SNACK_BAR ? mResources.getDimensionPixelSize(R.dimen.three_gear_layout_wrapper_height) : DeviceScreenHelper.mDeviceHeight;
return this;
}
public ThreeGearsLayout setDialogBackgroundColor(int color) {
super.setDialogBackgroundColor(color);
return this;
}
public ThreeGearsLayout setDialogBackgroundAlpha(float alpha) {
super.setDialogBackgroundAlpha(alpha);
return this;
}
public ThreeGearsLayout setMainBackgroundColor(int color) {
super.setMainBackgroundColor(color);
return this;
}
public ThreeGearsLayout setMainBackgroundAlpha(float alpha) {
super.setMainBackgroundAlpha(alpha);
return this;
}
public ThreeGearsLayout enableCutLayout(boolean enable) {
super.enableCutLayout(enable);
return this;
}
public ThreeGearsLayout setCutRadius(int radius) {
super.setCutRadius(radius);
return this;
}
public ThreeGearsLayout setCutLayoutColor(int color) {
super.setCutLayoutColor(color);
return this;
}
public ThreeGearsLayout setCutLayoutAlpha(float alpha) {
super.setCutLayoutAlpha(alpha);
return this;
}
public ThreeGearsLayout setShowMode(ShowMode showMode) {
super.setShowMode(showMode);
return this;
}
public ThreeGearsLayout setShowDialogDuration(int showDialogDuration) {
super.setShowDialogDuration(showDialogDuration);
return this;
}
public ThreeGearsLayout blurBackground(boolean enable) {
blurBackground(enable, 0, 0);
return this;
}
public ThreeGearsLayout blurBackground(boolean enable, int radius, float scaleFactor) {
super.blurBackground(enable, radius, scaleFactor);
return this;
}
public ThreeGearsLayout setCancelable(boolean cancelable) {
super.setCancelable(cancelable);
return this;
}
protected void parseAttributes(AttributeSet attrs) {
super.parseAttributes(attrs);
TypedArray a = getContext().obtainStyledAttributes(attrs, R.styleable.GearLoadingLayout);
setFirstGearColor(a.getColor(R.styleable.GearLoadingLayout_firstGearColor, Color.GRAY));
setSecondGearColor(a.getColor(R.styleable.GearLoadingLayout_secondGearColor, Color.GRAY));
setThirdGearColor(a.getColor(R.styleable.GearLoadingLayout_thirdGearColor, Color.GRAY));
setFirstGearInnerColor(a.getColor(R.styleable.GearLoadingLayout_firstInnerGearColor, Color.WHITE), a.getBoolean(R.styleable.GearLoadingLayout_firstGearCuttedCenter, true));
setSecondGearInnerColor(a.getColor(R.styleable.GearLoadingLayout_secondInnerGearColor, Color.WHITE), a.getBoolean(R.styleable.GearLoadingLayout_secondGearCuttedCenter, true));
setThirdGearInnerColor(a.getColor(R.styleable.GearLoadingLayout_thirdInnerGearColor, Color.WHITE), a.getBoolean(R.styleable.GearLoadingLayout_thirdGearCuttedCenter, true));
a.recycle();
requestLayout();
}
}
| |
/*L
* Copyright SAIC, Ellumen and RSNA (CTP)
*
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/national-biomedical-image-archive/LICENSE.txt for details.
*/
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>QA_Tool_View_Imges_TestCase</title>
</head>
<body>
<table cellpadding="1" cellspacing="1" border="1">
<thead>
<tr><td rowspan="1" colspan="3">QA_Tool_View_Imges_TestCase</td></tr>
</thead><tbody>
<tr>
<td>open</td>
<td>/ncia/</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=CLICKING HERE</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>MAINbody:loginForm:_id127</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=QA Tool</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:2:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Collection: IDRI')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Patient: 1.3.6.1.4.1.9328.50.3.0001')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Study: 1.3.6.1.4.1.9328.50.3.3')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Series: 1.3.6.1.4.1.9328.50.3.4')]</td>
<td></td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:0:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:0:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:0:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Patient: 1.3.6.1.4.1.9328.50.3.0015')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Study: 1.3.6.1.4.1.9328.50.3.46')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Images</td>
<td></td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:0:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:0:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:0:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:2:0:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Collection: ISPY')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Patient: 35')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Series</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Images</td>
<td></td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:0:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:0:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:0:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Next</td>
<td></td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:26:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:26:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:26:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=>></td>
<td></td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:38:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:41:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:41:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:2:1:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Collection: LIDC')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Patient: 1.3.6.1.4.1.9328.50.3.0023')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Series</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Images</td>
<td></td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:4:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:4:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:4:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>selectAndWait</td>
<td>MAINbody:mainForm:_id59</td>
<td>label=10</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:25:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:25:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:25:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Patient: 1.3.6.1.4.1.9328.50.3.0106')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Series</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Images</td>
<td></td>
</tr>
<tr>
<td>selectAndWait</td>
<td>MAINbody:mainForm:_id62</td>
<td>label=6</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:19:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:19:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:13:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:2:2:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Collection: RIDER')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Patient: 1.3.6.1.4.1.9328.50.1.0010')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[@onclick="document.forms['MAINbody:mainForm']['MAINbody:mainForm:_idcl'].value='MAINbody:mainForm:studyView:resultTable:2:_id102'; document.forms['MAINbody:mainForm'].submit(); return false;"]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[@onclick="document.forms['MAINbody:mainForm']['MAINbody:mainForm:_idcl'].value='MAINbody:mainForm:seriesView:resultTable:1:_id123'; document.forms['MAINbody:mainForm'].submit(); return false;"]</td>
<td></td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:1:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:1:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:1:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:2:3:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Collection: Virtual Colonoscopy')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Series</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Images</td>
<td></td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:14:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:14:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:14:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:2:4:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:2:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:1:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Collection: LIDC')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Studies</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Series</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Images</td>
<td></td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:8:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:8:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:8:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Collection: RIDER')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Patient: 1.3.6.1.4.1.9328.50.2.0001')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Series</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Images</td>
<td></td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:9:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:9:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:9:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:1:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:0:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Collection: IDRI')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Patient: 1.3.6.1.4.1.9328.50.10.0005')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[@onclick="document.forms['MAINbody:mainForm']['MAINbody:mainForm:_idcl'].value='MAINbody:mainForm:studyView:resultTable:5:_id102'; document.forms['MAINbody:mainForm'].submit(); return false;"]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[@onclick="document.forms['MAINbody:mainForm']['MAINbody:mainForm:_idcl'].value='MAINbody:mainForm:seriesView:resultTable:4:_id123'; document.forms['MAINbody:mainForm'].submit(); return false;"]</td>
<td></td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:9:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:9:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:9:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:0:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:0:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:0:0:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[contains(text(),'Collection: RIDER')]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[@onclick="document.forms['MAINbody:mainForm']['MAINbody:mainForm:_idcl'].value='MAINbody:mainForm:patientView:resultTable:6:_id81'; document.forms['MAINbody:mainForm'].submit(); return false;"]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>//a[@onclick="document.forms['MAINbody:mainForm']['MAINbody:mainForm:_idcl'].value='MAINbody:mainForm:studyView:resultTable:6:_id102'; document.forms['MAINbody:mainForm'].submit(); return false;"]</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=Show Images</td>
<td></td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:1:_id76</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>jpeg_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:1:_id78</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>dicom_window</td>
<td>30000</td>
</tr>
<tr>
<td>click</td>
<td>MAINbody:mainForm:imageTable:1:_id80</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>history_window</td>
<td>30000</td>
</tr>
<tr>
<td>clickAndWait</td>
<td>SUBmenu:treeForm:lazyAjaxTree:0:0:1:t2</td>
<td></td>
</tr>
<tr>
<td>clickAndWait</td>
<td>link=LOGOUT</td>
<td></td>
</tr>
</tbody></table>
</body>
</html>
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.common.eventtime;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.java.ClosureCleaner;
import org.apache.flink.metrics.CharacterFilter;
import org.apache.flink.metrics.Counter;
import org.apache.flink.metrics.Gauge;
import org.apache.flink.metrics.Histogram;
import org.apache.flink.metrics.Meter;
import org.apache.flink.metrics.MetricGroup;
import org.junit.Test;
import java.io.Serializable;
import java.util.Map;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
/**
* Test for the {@link WatermarkStrategies} class.
*/
public class WatermarkStrategiesTest {
@Test
public void testDefaultTimeStampAssigner() {
WatermarkStrategy<Object> wmStrategy = WatermarkStrategies
.forMonotonousTimestamps()
.build();
// ensure that the closure can be cleaned through the WatermarkStategies
ClosureCleaner.clean(wmStrategy, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
assertThat(wmStrategy.createTimestampAssigner(assignerContext()), instanceOf(RecordTimestampAssigner.class));
}
@Test
public void testLambdaTimestampAssigner() {
WatermarkStrategy<Object> wmStrategy = WatermarkStrategies
.forMonotonousTimestamps()
.withTimestampAssigner((event, timestamp) -> 42L)
.build();
// ensure that the closure can be cleaned through the WatermarkStategies
ClosureCleaner.clean(wmStrategy, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
TimestampAssigner<Object> timestampAssigner = wmStrategy.createTimestampAssigner(assignerContext());
assertThat(timestampAssigner.extractTimestamp(null, 13L), is(42L));
}
@Test
public void testLambdaTimestampAssignerSupplier() {
WatermarkStrategy<Object> wmStrategy = WatermarkStrategies
.forMonotonousTimestamps()
.withTimestampAssigner(TimestampAssignerSupplier.of((event, timestamp) -> 42L))
.build();
// ensure that the closure can be cleaned through the WatermarkStategies
ClosureCleaner.clean(wmStrategy, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
TimestampAssigner<Object> timestampAssigner = wmStrategy.createTimestampAssigner(assignerContext());
assertThat(timestampAssigner.extractTimestamp(null, 13L), is(42L));
}
@Test
public void testAnonymousInnerTimestampAssigner() {
WatermarkStrategy<Object> wmStrategy = WatermarkStrategies
.forMonotonousTimestamps()
.withTimestampAssigner(new SerializableTimestampAssigner<Object>() {
@Override
public long extractTimestamp(Object element, long recordTimestamp) {
return 42;
}
})
.build();
// ensure that the closure can be cleaned through the WatermarkStategies
ClosureCleaner.clean(wmStrategy, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
TimestampAssigner<Object> timestampAssigner = wmStrategy.createTimestampAssigner(assignerContext());
assertThat(timestampAssigner.extractTimestamp(null, 13L), is(42L));
}
@Test
public void testClassTimestampAssigner() {
WatermarkStrategy<Object> wmStrategy = WatermarkStrategies
.forMonotonousTimestamps()
.withTimestampAssigner((ctx) -> new TestTimestampAssigner())
.build();
// ensure that the closure can be cleaned through the WatermarkStategies
ClosureCleaner.clean(wmStrategy, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
TimestampAssigner<Object> timestampAssigner = wmStrategy.createTimestampAssigner(assignerContext());
assertThat(timestampAssigner.extractTimestamp(null, 13L), is(42L));
}
@Test
public void testClassTimestampAssignerUsingSupplier() {
WatermarkStrategy<Object> wmStrategy = WatermarkStrategies
.forMonotonousTimestamps()
.withTimestampAssigner((context) -> new TestTimestampAssigner())
.build();
// ensure that the closure can be cleaned through the WatermarkStategies
ClosureCleaner.clean(wmStrategy, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
TimestampAssigner<Object> timestampAssigner = wmStrategy.createTimestampAssigner(assignerContext());
assertThat(timestampAssigner.extractTimestamp(null, 13L), is(42L));
}
static class TestTimestampAssigner implements TimestampAssigner<Object>, Serializable {
@Override
public long extractTimestamp(Object element, long recordTimestamp) {
return 42L;
}
}
static TimestampAssignerSupplier.Context assignerContext() {
return DummyMetricGroup::new;
}
/**
* A dummy {@link MetricGroup} to be used when a group is required as an argument but not actually used.
*/
public static class DummyMetricGroup implements MetricGroup {
@Override
public Counter counter(int name) {
return null;
}
@Override
public Counter counter(String name) {
return null;
}
@Override
public <C extends Counter> C counter(int name, C counter) {
return null;
}
@Override
public <C extends Counter> C counter(String name, C counter) {
return null;
}
@Override
public <T, G extends Gauge<T>> G gauge(int name, G gauge) {
return null;
}
@Override
public <T, G extends Gauge<T>> G gauge(String name, G gauge) {
return null;
}
@Override
public <H extends Histogram> H histogram(String name, H histogram) {
return null;
}
@Override
public <H extends Histogram> H histogram(int name, H histogram) {
return null;
}
@Override
public <M extends Meter> M meter(String name, M meter) {
return null;
}
@Override
public <M extends Meter> M meter(int name, M meter) {
return null;
}
@Override
public MetricGroup addGroup(int name) {
return null;
}
@Override
public MetricGroup addGroup(String name) {
return null;
}
@Override
public MetricGroup addGroup(String key, String value) {
return null;
}
@Override
public String[] getScopeComponents() {
return new String[0];
}
@Override
public Map<String, String> getAllVariables() {
return null;
}
@Override
public String getMetricIdentifier(String metricName) {
return null;
}
@Override
public String getMetricIdentifier(
String metricName, CharacterFilter filter) {
return null;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.metadata;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_HIDDEN_SETTING;
/**
* An index abstraction is a reference to one or more concrete indices.
* An index abstraction has a unique name and encapsulates all the {@link IndexMetadata} instances it is pointing to.
* Also depending on type it may refer to a single or many concrete indices and may or may not have a write index.
*/
public interface IndexAbstraction {
/**
* @return the type of the index abstraction
*/
Type getType();
/**
* @return the name of the index abstraction
*/
String getName();
/**
* @return All {@link IndexMetadata} of all concrete indices this index abstraction is referring to.
*/
List<IndexMetadata> getIndices();
/**
* A write index is a dedicated concrete index, that accepts all the new documents that belong to an index abstraction.
* <p>
* A write index may also be a regular concrete index of a index abstraction and may therefore also be returned
* by {@link #getIndices()}. An index abstraction may also not have a dedicated write index.
*
* @return the write index of this index abstraction or
* <code>null</code> if this index abstraction doesn't have a write index.
*/
@Nullable
IndexMetadata getWriteIndex();
/**
* @return the data stream to which this index belongs or <code>null</code> if this is not a concrete index or
* if it is a concrete index that does not belong to a data stream.
*/
@Nullable DataStream getParentDataStream();
/**
* @return whether this index abstraction is hidden or not
*/
boolean isHidden();
/**
* @return whether this index abstraction should be treated as a system index or not
*/
boolean isSystem();
/**
* An index abstraction type.
*/
enum Type {
/**
* An index abstraction that refers to a single concrete index.
* This concrete index is also the write index.
*/
CONCRETE_INDEX("concrete index"),
/**
* An index abstraction that refers to an alias.
* An alias typically refers to many concrete indices and
* may have a write index.
*/
ALIAS("alias"),
/**
* An index abstraction that refers to a data stream.
* A data stream typically has multiple backing indices, the latest of which
* is the target for index requests.
*/
DATA_STREAM("data_stream");
private final String displayName;
Type(String displayName) {
this.displayName = displayName;
}
public String getDisplayName() {
return displayName;
}
}
/**
* Represents an concrete index and encapsulates its {@link IndexMetadata}
*/
class Index implements IndexAbstraction {
private final IndexMetadata concreteIndex;
private final DataStream dataStream;
public Index(IndexMetadata indexMetadata, DataStream dataStream) {
this.concreteIndex = indexMetadata;
this.dataStream = dataStream;
}
public Index(IndexMetadata indexMetadata) {
this(indexMetadata, null);
}
@Override
public String getName() {
return concreteIndex.getIndex().getName();
}
@Override
public Type getType() {
return Type.CONCRETE_INDEX;
}
@Override
public List<IndexMetadata> getIndices() {
return List.of(concreteIndex);
}
@Override
public IndexMetadata getWriteIndex() {
return concreteIndex;
}
@Override
public DataStream getParentDataStream() {
return dataStream;
}
@Override
public boolean isHidden() {
return INDEX_HIDDEN_SETTING.get(concreteIndex.getSettings());
}
@Override
public boolean isSystem() {
return concreteIndex.isSystem();
}
}
/**
* Represents an alias and groups all {@link IndexMetadata} instances sharing the same alias name together.
*/
class Alias implements IndexAbstraction {
private final String aliasName;
private final List<IndexMetadata> referenceIndexMetadatas;
private final IndexMetadata writeIndex;
private final boolean isHidden;
public Alias(AliasMetadata aliasMetadata, List<IndexMetadata> indices) {
this.aliasName = aliasMetadata.getAlias();
this.referenceIndexMetadatas = indices;
List<IndexMetadata> writeIndices = indices.stream()
.filter(idxMeta -> Boolean.TRUE.equals(idxMeta.getAliases().get(aliasName).writeIndex()))
.collect(Collectors.toList());
if (writeIndices.isEmpty() && referenceIndexMetadatas.size() == 1
&& referenceIndexMetadatas.get(0).getAliases().get(aliasName).writeIndex() == null) {
writeIndices.add(referenceIndexMetadatas.get(0));
}
if (writeIndices.size() == 0) {
this.writeIndex = null;
} else if (writeIndices.size() == 1) {
this.writeIndex = writeIndices.get(0);
} else {
List<String> writeIndicesStrings = writeIndices.stream()
.map(i -> i.getIndex().getName()).collect(Collectors.toList());
throw new IllegalStateException("alias [" + aliasName + "] has more than one write index [" +
Strings.collectionToCommaDelimitedString(writeIndicesStrings) + "]");
}
this.isHidden = aliasMetadata.isHidden() == null ? false : aliasMetadata.isHidden();
validateAliasProperties();
}
@Override
public Type getType() {
return Type.ALIAS;
}
public String getName() {
return aliasName;
}
@Override
public List<IndexMetadata> getIndices() {
return referenceIndexMetadatas;
}
@Nullable
public IndexMetadata getWriteIndex() {
return writeIndex;
}
@Override
public DataStream getParentDataStream() {
// aliases may not be part of a data stream
return null;
}
@Override
public boolean isHidden() {
return isHidden;
}
@Override
public boolean isSystem() {
return referenceIndexMetadatas.stream().allMatch(IndexMetadata::isSystem);
}
private void validateAliasProperties() {
// Validate hidden status
final Map<Boolean, List<IndexMetadata>> groupedByHiddenStatus = referenceIndexMetadatas.stream()
.collect(Collectors.groupingBy(idxMeta -> Boolean.TRUE.equals(idxMeta.getAliases().get(aliasName).isHidden())));
if (isNonEmpty(groupedByHiddenStatus.get(true)) && isNonEmpty(groupedByHiddenStatus.get(false))) {
List<String> hiddenOn = groupedByHiddenStatus.get(true).stream()
.map(idx -> idx.getIndex().getName()).collect(Collectors.toList());
List<String> nonHiddenOn = groupedByHiddenStatus.get(false).stream()
.map(idx -> idx.getIndex().getName()).collect(Collectors.toList());
throw new IllegalStateException("alias [" + aliasName + "] has is_hidden set to true on indices [" +
Strings.collectionToCommaDelimitedString(hiddenOn) + "] but does not have is_hidden set to true on indices [" +
Strings.collectionToCommaDelimitedString(nonHiddenOn) + "]; alias must have the same is_hidden setting " +
"on all indices");
}
// Validate system status
final Map<Boolean, List<IndexMetadata>> groupedBySystemStatus = referenceIndexMetadatas.stream()
.collect(Collectors.groupingBy(IndexMetadata::isSystem));
// If the alias has either all system or all non-system, then no more validation is required
if (isNonEmpty(groupedBySystemStatus.get(false)) && isNonEmpty(groupedBySystemStatus.get(true))) {
final List<String> newVersionSystemIndices = groupedBySystemStatus.get(true).stream()
.filter(i -> i.getCreationVersion().onOrAfter(IndexNameExpressionResolver.SYSTEM_INDEX_ENFORCEMENT_VERSION))
.map(i -> i.getIndex().getName())
.sorted() // reliable error message for testing
.collect(Collectors.toList());
if (newVersionSystemIndices.isEmpty() == false) {
final List<String> nonSystemIndices = groupedBySystemStatus.get(false).stream()
.map(i -> i.getIndex().getName())
.sorted() // reliable error message for testing
.collect(Collectors.toList());
throw new IllegalStateException("alias [" + aliasName + "] refers to both system indices " + newVersionSystemIndices +
" and non-system indices: " + nonSystemIndices + ", but aliases must refer to either system or" +
" non-system indices, not both");
}
}
}
private boolean isNonEmpty(List<IndexMetadata> idxMetas) {
return (Objects.isNull(idxMetas) || idxMetas.isEmpty()) == false;
}
}
class DataStream implements IndexAbstraction {
private final org.elasticsearch.cluster.metadata.DataStream dataStream;
private final List<IndexMetadata> dataStreamIndices;
private final IndexMetadata writeIndex;
public DataStream(org.elasticsearch.cluster.metadata.DataStream dataStream, List<IndexMetadata> dataStreamIndices) {
this.dataStream = dataStream;
this.dataStreamIndices = List.copyOf(dataStreamIndices);
this.writeIndex = dataStreamIndices.get(dataStreamIndices.size() - 1);
}
@Override
public String getName() {
return dataStream.getName();
}
@Override
public Type getType() {
return Type.DATA_STREAM;
}
@Override
public List<IndexMetadata> getIndices() {
return dataStreamIndices;
}
public IndexMetadata getWriteIndex() {
return writeIndex;
}
@Override
public DataStream getParentDataStream() {
// a data stream cannot have a parent data stream
return null;
}
@Override
public boolean isHidden() {
return dataStream.isHidden();
}
@Override
public boolean isSystem() {
// No such thing as system data streams (yet)
return false;
}
public org.elasticsearch.cluster.metadata.DataStream getDataStream() {
return dataStream;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.portfolio.interestratechart.domain;
import static org.apache.fineract.portfolio.interestratechart.InterestRateChartSlabApiConstants.amountRangeFromParamName;
import static org.apache.fineract.portfolio.interestratechart.InterestRateChartSlabApiConstants.amountRangeToParamName;
import static org.apache.fineract.portfolio.interestratechart.InterestRateChartSlabApiConstants.annualInterestRateParamName;
import static org.apache.fineract.portfolio.interestratechart.InterestRateChartSlabApiConstants.descriptionParamName;
import static org.apache.fineract.portfolio.interestratechart.InterestRateChartSlabApiConstants.fromPeriodParamName;
import static org.apache.fineract.portfolio.interestratechart.InterestRateChartSlabApiConstants.periodTypeParamName;
import static org.apache.fineract.portfolio.interestratechart.InterestRateChartSlabApiConstants.toPeriodParamName;
import java.math.BigDecimal;
import java.util.Locale;
import java.util.Map;
import javax.persistence.Column;
import javax.persistence.Embeddable;
import org.apache.fineract.infrastructure.core.api.JsonCommand;
import org.apache.fineract.infrastructure.core.data.DataValidatorBuilder;
import org.apache.fineract.portfolio.savings.SavingsPeriodFrequencyType;
import org.joda.time.Days;
import org.joda.time.LocalDate;
import org.joda.time.Months;
import org.joda.time.Weeks;
import org.joda.time.Years;
@Embeddable
public class InterestRateChartSlabFields {
@Column(name = "description", nullable = true)
private String description;
@Column(name = "period_type_enum", nullable = false)
private Integer periodType;
@Column(name = "from_period")
private Integer fromPeriod;
@Column(name = "to_period")
private Integer toPeriod;
@Column(name = "amount_range_from", scale = 6, precision = 19)
private BigDecimal amountRangeFrom;
public BigDecimal getAmountRangeFrom() {
return this.amountRangeFrom;
}
public BigDecimal getAmountRangeTo() {
return this.amountRangeTo;
}
@Column(name = "amount_range_to", scale = 6, precision = 19)
private BigDecimal amountRangeTo;
@Column(name = "annual_interest_rate", scale = 6, precision = 19, nullable = false)
private BigDecimal annualInterestRate;
@Column(name = "currency_code", nullable = false)
private String currencyCode;
protected InterestRateChartSlabFields() {
//
}
public static InterestRateChartSlabFields createNew(final String description, final SavingsPeriodFrequencyType periodFrequencyType,
final Integer fromPeriod, final Integer toPeriod, final BigDecimal amountRangeFrom, final BigDecimal amountRangeTo,
final BigDecimal annualInterestRate, final String currencyCode) {
return new InterestRateChartSlabFields(description, periodFrequencyType, fromPeriod, toPeriod, amountRangeFrom, amountRangeTo,
annualInterestRate, currencyCode);
}
private InterestRateChartSlabFields(final String description, final SavingsPeriodFrequencyType periodFrequencyType,
final Integer fromPeriod, final Integer toPeriod, final BigDecimal amountRangeFrom, final BigDecimal amountRangeTo,
final BigDecimal annualInterestRate, final String currencyCode) {
this.description = description;
this.periodType = (periodFrequencyType == null || periodFrequencyType.isInvalid()) ? null : periodFrequencyType.getValue();
this.fromPeriod = fromPeriod;
this.toPeriod = toPeriod;
this.amountRangeFrom = amountRangeFrom;
this.amountRangeTo = amountRangeTo;
this.annualInterestRate = annualInterestRate;
this.currencyCode = currencyCode;
}
public void update(final JsonCommand command, final Map<String, Object> actualChanges, final DataValidatorBuilder baseDataValidator,
final Locale locale) {
if (command.isChangeInStringParameterNamed(descriptionParamName, this.description)) {
final String newValue = command.stringValueOfParameterNamed(descriptionParamName);
actualChanges.put(descriptionParamName, newValue);
this.description = newValue;
}
if (command.isChangeInIntegerParameterNamed(periodTypeParamName, this.periodType, locale)) {
final Integer newValue = command.integerValueOfParameterNamed(periodTypeParamName, locale);
actualChanges.put(periodTypeParamName, newValue);
this.periodType = newValue;
}
if (command.isChangeInIntegerParameterNamed(fromPeriodParamName, this.fromPeriod, locale)) {
final Integer newValue = command.integerValueOfParameterNamed(fromPeriodParamName, locale);
actualChanges.put(fromPeriodParamName, newValue);
this.fromPeriod = newValue;
}
if (command.isChangeInIntegerParameterNamed(toPeriodParamName, this.toPeriod, locale)) {
final Integer newValue = command.integerValueOfParameterNamed(toPeriodParamName, locale);
actualChanges.put(toPeriodParamName, newValue);
this.toPeriod = newValue;
}
if (command.isChangeInBigDecimalParameterNamed(amountRangeFromParamName, this.amountRangeFrom, locale)) {
final BigDecimal newValue = command.bigDecimalValueOfParameterNamed(amountRangeFromParamName, locale);
actualChanges.put(amountRangeFromParamName, newValue);
this.amountRangeFrom = newValue;
}
if (command.isChangeInBigDecimalParameterNamed(amountRangeToParamName, this.amountRangeTo, locale)) {
final BigDecimal newValue = command.bigDecimalValueOfParameterNamed(amountRangeToParamName, locale);
actualChanges.put(amountRangeToParamName, newValue);
this.amountRangeTo = newValue;
}
if (command.isChangeInBigDecimalParameterNamed(annualInterestRateParamName, this.annualInterestRate, locale)) {
final BigDecimal newValue = command.bigDecimalValueOfParameterNamed(annualInterestRateParamName, locale);
actualChanges.put(annualInterestRateParamName, newValue);
this.annualInterestRate = newValue;
}
validateChartSlabPlatformRules(command, baseDataValidator, locale);
}
public void validateChartSlabPlatformRules(final JsonCommand chartSlabsCommand, final DataValidatorBuilder baseDataValidator,
Locale locale) {
if (isFromPeriodGreaterThanToPeriod()) {
final Integer fromPeriod = chartSlabsCommand.integerValueOfParameterNamed(fromPeriodParamName, locale);
baseDataValidator.parameter(fromPeriodParamName).value(fromPeriod).failWithCode("from.period.is.greater.than.to.period");
}
if (isAmountRangeFromGreaterThanTo()) {
final BigDecimal amountRangeFrom = chartSlabsCommand.bigDecimalValueOfParameterNamed(amountRangeFromParamName, locale);
baseDataValidator.parameter(amountRangeFromParamName).value(amountRangeFrom)
.failWithCode("amount.range.from.is.greater.than.amount.range.to");
}
}
public boolean isFromPeriodGreaterThanToPeriod() {
boolean isGreater = false;
if (this.toPeriod != null && this.fromPeriod.compareTo(this.toPeriod) > 1) {
isGreater = true;
}
return isGreater;
}
public boolean isAmountRangeFromGreaterThanTo() {
boolean isGreater = false;
if (this.amountRangeFrom != null && this.amountRangeTo != null && this.amountRangeFrom.compareTo(this.amountRangeTo) > 1) {
isGreater = true;
}
return isGreater;
}
public Integer periodType() {
return this.periodType;
}
public Integer fromPeriod() {
return this.fromPeriod;
}
public Integer toPeriod() {
return this.toPeriod;
}
public boolean isRateChartHasGap(final InterestRateChartSlabFields that, final boolean isPrimaryGroupingByAmount) {
boolean isPeriodSame = isPeriodsSame(that);
boolean isAmountSame = isAmountSame(that);
boolean hasPeriods = this.fromPeriod != null || that.fromPeriod != null;
boolean hasAmounts = this.amountRangeFrom != null || that.amountRangeFrom != null;
if (isPrimaryGroupingByAmount) {
if (isAmountSame) {
if (hasPeriods) {
if (this.toPeriod == null) { return true; }
return isNotProperPeriodStart(that.fromPeriod);
}
} else {
return isNotProperAmountStart(that.amountRangeFrom) || isNotProperPeriodStart(that);
}
} else {
if (isPeriodSame) {
if (hasAmounts) {
if (this.amountRangeTo == null) { return true; }
return isNotProperAmountStart(that.amountRangeFrom);
}
} else {
return isNotProperPeriodStart(that.fromPeriod) || isNotProperAmountStart(that);
}
}
return false;
}
public boolean isValidChart(boolean isPrimaryGroupingByAmount) {
return (!isPrimaryGroupingByAmount && this.fromPeriod != null) || (isPrimaryGroupingByAmount && this.amountRangeFrom != null);
}
public boolean isNotProperChartStart() {
return isNotProperPeriodStart(this) || isNotProperAmountStart(this);
}
public static boolean isNotProperAmountStart(final InterestRateChartSlabFields interestRateChartSlabFields) {
return interestRateChartSlabFields.amountRangeFrom != null
&& (interestRateChartSlabFields.amountRangeFrom.compareTo(BigDecimal.ONE) != 0 && interestRateChartSlabFields.amountRangeFrom
.compareTo(BigDecimal.ZERO) != 0);
}
private boolean isNotProperAmountStart(final BigDecimal amount) {
return this.amountRangeTo == null || (amount != null && amount.compareTo(this.amountRangeTo.add(BigDecimal.ONE)) != 0);
}
private boolean isNotProperPeriodStart(final Integer period) {
return this.toPeriod == null || (period != null && period.compareTo(this.toPeriod + 1) != 0);
}
public static boolean isNotProperPeriodStart(InterestRateChartSlabFields interestRateChartSlabFields) {
return interestRateChartSlabFields.fromPeriod != null
&& !(interestRateChartSlabFields.fromPeriod.equals(1) || interestRateChartSlabFields.fromPeriod.equals(0));
}
public boolean isNotProperPriodEnd() {
return !(this.toPeriod == null && this.amountRangeTo == null);
}
public boolean isRateChartOverlapping(final InterestRateChartSlabFields that, final boolean isPrimaryGroupingByAmount) {
boolean isPeriodOverLapping = isPeriodOverlapping(that);
boolean isAmountOverLapping = isAmountOverlapping(that);
boolean isPeriodSame = isPeriodsSame(that);
boolean isAmountSame = isAmountSame(that);
boolean isOverlapping = false;
if (isPrimaryGroupingByAmount) {
isOverlapping = (isAmountOverLapping && !isAmountSame) || (isPeriodOverLapping && isAmountSame);
} else {
isOverlapping = (isPeriodOverLapping && !isPeriodSame) || (isAmountOverLapping && isPeriodSame);
}
return isOverlapping;
}
private boolean isPeriodOverlapping(final InterestRateChartSlabFields that) {
if (isIntegerSame(that.toPeriod, this.toPeriod)) {
return true;
} else if (isIntegerSame(that.fromPeriod, this.fromPeriod)) {
return true;
} else if (this.toPeriod == null) {
return true;
} else if (that.toPeriod == null) { return that.fromPeriod <= this.toPeriod; }
return this.fromPeriod <= that.toPeriod && that.fromPeriod <= this.toPeriod;
}
private boolean isAmountOverlapping(final InterestRateChartSlabFields that) {
if (isBigDecimalSame(that.amountRangeFrom, this.amountRangeFrom)) {
return true;
} else if (isBigDecimalSame(that.amountRangeTo, this.amountRangeTo)) {
return true;
} else if (this.amountRangeTo == null) {
return true;
} else if (that.amountRangeTo == null) { return that.amountRangeFrom.compareTo(this.amountRangeTo) < 1; }
return this.amountRangeFrom.compareTo(that.amountRangeTo) < 1 && that.amountRangeFrom.compareTo(this.amountRangeTo) < 1;
}
public boolean isAmountSame(final InterestRateChartSlabFields that) {
return isBigDecimalSame(this.amountRangeFrom, that.amountRangeFrom) && isBigDecimalSame(this.amountRangeTo, that.amountRangeTo);
}
public boolean isPeriodsSame(final InterestRateChartSlabFields that) {
return isIntegerSame(this.fromPeriod, that.fromPeriod) && isIntegerSame(this.toPeriod, that.toPeriod);
}
public boolean isIntegerSame(final Integer obj1, final Integer obj2) {
if (obj1 == null || obj2 == null) {
if (obj1 == obj2) { return true; }
return false;
}
return obj1.equals(obj2);
}
public boolean isBigDecimalSame(final BigDecimal obj1, final BigDecimal obj2) {
if (obj1 == null || obj2 == null) {
if (obj1 == obj2) { return true; }
return false;
}
return obj1.compareTo(obj2) == 0;
}
public boolean isBetweenPeriod(final LocalDate periodStartDate, final LocalDate periodEndDate) {
final Integer compare = depositPeriod(periodStartDate, periodEndDate);
return isPeriodBetween(compare);
}
public boolean isAmountRangeProvided() {
return (this.amountRangeFrom == null) ? false : true;
}
public BigDecimal annualInterestRate() {
return this.annualInterestRate;
}
public Integer depositPeriod(final LocalDate periodStartDate, final LocalDate periodEndDate) {
Integer actualDepositPeriod = 0;
final SavingsPeriodFrequencyType periodFrequencyType = SavingsPeriodFrequencyType.fromInt(periodType());
switch (periodFrequencyType) {
case DAYS:
actualDepositPeriod = Days.daysBetween(periodStartDate, periodEndDate).getDays();
break;
case WEEKS:
actualDepositPeriod = Weeks.weeksBetween(periodStartDate, periodEndDate).getWeeks();
break;
case MONTHS:
actualDepositPeriod = Months.monthsBetween(periodStartDate, periodEndDate).getMonths();
break;
case YEARS:
actualDepositPeriod = Years.yearsBetween(periodStartDate, periodEndDate).getYears();
break;
case INVALID:
actualDepositPeriod = 0;// default value
break;
}
return actualDepositPeriod;
}
public boolean isAmountBetween(final BigDecimal depositAmount) {
boolean returnValue = true;
if (amountRangeFrom != null && amountRangeTo != null) {
returnValue = depositAmount.compareTo(amountRangeFrom) >= 0 && depositAmount.compareTo(amountRangeTo) <= 0;
} else if (amountRangeFrom != null) {
returnValue = depositAmount.compareTo(amountRangeFrom) >= 0;
}
return returnValue;
}
public boolean isPeriodBetween(final Integer periods) {
boolean returnValue = true;
if (fromPeriod != null && toPeriod != null) {
returnValue = periods.compareTo(fromPeriod) >= 0 && periods.compareTo(toPeriod) <= 0;
} else if (fromPeriod != null) {
returnValue = periods.compareTo(fromPeriod) >= 0;
}
return returnValue;
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui;
import com.intellij.ide.BrowserUtil;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.markup.EffectType;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.PlatformColors;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.Nullable;
import javax.accessibility.AccessibleAction;
import javax.accessibility.AccessibleContext;
import javax.accessibility.AccessibleRole;
import javax.swing.*;
import javax.swing.event.HyperlinkEvent;
import javax.swing.event.HyperlinkListener;
import javax.swing.text.MutableAttributeSet;
import javax.swing.text.html.HTML;
import javax.swing.text.html.HTMLEditorKit;
import javax.swing.text.html.parser.ParserDelegator;
import java.awt.*;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.io.IOException;
import java.io.StringReader;
import java.util.List;
/**
* @author Eugene Belyaev
*/
public class HyperlinkLabel extends HighlightableComponent {
private static final TextAttributes BOLD_ATTRIBUTES = new TextAttributes(new JBColor(() -> {
final Color foreground1 = UIUtil.getLabelTextForeground();
return foreground1 == null ? UIUtil.getLabelForeground() : foreground1;
}), null, null, null, Font.BOLD);
private static final Logger LOG = Logger.getInstance(HyperlinkLabel.class.getName());
private UIUtil.FontSize myFontSize;
private HighlightedText myHighlightedText;
private final List<HyperlinkListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private boolean myUseIconAsLink;
private final TextAttributes myAnchorAttributes;
private HyperlinkListener myHyperlinkListener;
private boolean myMouseHover;
private boolean myMousePressed;
public HyperlinkLabel() {
this("");
}
public HyperlinkLabel(String text) {
this(text, UIUtil.getLabelBackground());
}
public HyperlinkLabel(String text, Color background) {
this(text, PlatformColors.BLUE, background, PlatformColors.BLUE);
}
public HyperlinkLabel(String text, final Color textForegroundColor, final Color textBackgroundColor, final Color textEffectColor) {
myAnchorAttributes = UIUtil.isUnderWin10LookAndFeel() ?
new Win10TextAttributes(textBackgroundColor) :
new TextAttributes(textForegroundColor, textBackgroundColor, textEffectColor, EffectType.LINE_UNDERSCORE, Font.PLAIN);
enforceBackgroundOutsideText(textBackgroundColor);
setHyperlinkText(text);
enableEvents(AWTEvent.MOUSE_EVENT_MASK | AWTEvent.MOUSE_MOTION_EVENT_MASK);
setOpaque(false);
}
@Override
public void addNotify() {
super.addNotify();
adjustSize();
}
public void setFontSize(@Nullable UIUtil.FontSize fontSize) {
myFontSize = fontSize;
}
public void setHyperlinkText(String text) {
setHyperlinkText("", text, "");
}
public void setHyperlinkText(String beforeLinkText, String linkText, String afterLinkText) {
myUseIconAsLink = beforeLinkText.isEmpty();
prepareText(beforeLinkText, linkText, afterLinkText);
}
public void setUseIconAsLink(boolean useIconAsLink) {
myUseIconAsLink = useIconAsLink;
}
protected void adjustSize() {
final Dimension preferredSize = getPreferredSize();
setMinimumSize(preferredSize);
}
@Override
protected void processComponentKeyEvent(KeyEvent event) {
if (event.getModifiers() == 0 && event.getKeyCode() == KeyEvent.VK_SPACE) {
event.consume();
fireHyperlinkEvent();
}
}
@Override
protected void processMouseEvent(MouseEvent e) {
if (e.getID() == MouseEvent.MOUSE_ENTERED && isOnLink(e.getX())) {
myMouseHover = true;
repaint();
} else if (e.getID() == MouseEvent.MOUSE_EXITED) {
setCursor(Cursor.getDefaultCursor());
myMouseHover = false;
myMousePressed = false;
repaint();
} else if (UIUtil.isActionClick(e, MouseEvent.MOUSE_PRESSED) && isOnLink(e.getX())) {
fireHyperlinkEvent();
myMousePressed = true;
repaint();
} else if (e.getID() == MouseEvent.MOUSE_RELEASED) {
myMousePressed = false;
repaint();
}
super.processMouseEvent(e);
}
@Override
protected void processMouseMotionEvent(MouseEvent e) {
if (e.getID() == MouseEvent.MOUSE_MOVED) {
boolean onLink = isOnLink(e.getX());
boolean needRepaint = myMouseHover != onLink;
myMouseHover = onLink;
setCursor(myMouseHover ? Cursor.getPredefinedCursor(Cursor.HAND_CURSOR) : Cursor.getDefaultCursor());
if (needRepaint) {
repaint();
}
}
super.processMouseMotionEvent(e);
}
private boolean isOnLink(int x) {
if (myUseIconAsLink && myIcon != null && x < myIcon.getIconWidth()) {
return true;
}
final HighlightedRegion region = findRegionByX(x);
return region != null && region.textAttributes == myAnchorAttributes;
}
private void prepareText(String beforeLinkText, String linkText, String afterLinkText) {
applyFont();
myHighlightedText = new HighlightedText();
myHighlightedText.appendText(beforeLinkText, null);
myHighlightedText.appendText(linkText, myAnchorAttributes);
myHighlightedText.appendText(afterLinkText, null);
myHighlightedText.applyToComponent(this);
updateOnTextChange();
}
@Override
public void setText(String text) {
applyFont();
myUseIconAsLink = false;
super.setText(text);
updateOnTextChange();
}
public void setHyperlinkTarget(@Nullable final String url) {
if (myHyperlinkListener != null) {
removeHyperlinkListener(myHyperlinkListener);
}
if (url != null) {
myHyperlinkListener = e -> BrowserUtil.browse(url);
addHyperlinkListener(myHyperlinkListener);
}
}
public void addHyperlinkListener(HyperlinkListener listener) {
myListeners.add(listener);
}
public void removeHyperlinkListener(HyperlinkListener listener) {
myListeners.remove(listener);
}
public String getText() {
return myHighlightedText.getText();
}
protected void fireHyperlinkEvent() {
HyperlinkEvent e = new HyperlinkEvent(this, HyperlinkEvent.EventType.ACTIVATED, null, null);
for (HyperlinkListener listener : myListeners) {
listener.hyperlinkUpdate(e);
}
}
public void doClick() {
fireHyperlinkEvent();
}
public void setHtmlText(String text) {
HTMLEditorKit.Parser parse = new ParserDelegator();
final HighlightedText highlightedText = new HighlightedText();
try {
parse.parse(new StringReader(text), new HTMLEditorKit.ParserCallback() {
private TextAttributes currentAttributes;
@Override
public void handleText(char[] data, int pos) {
highlightedText.appendText(data, currentAttributes);
}
@Override
public void handleStartTag(HTML.Tag t, MutableAttributeSet a, int pos) {
if (t == HTML.Tag.B) {
currentAttributes = BOLD_ATTRIBUTES;
}
else if (t == HTML.Tag.A) {
currentAttributes = myAnchorAttributes;
}
}
@Override
public void handleEndTag(HTML.Tag t, int pos) {
currentAttributes = null;
}
}, false);
}
catch (IOException e) {
LOG.error(e);
}
highlightedText.applyToComponent(this);
updateOnTextChange();
}
private void updateOnTextChange() {
final JComponent parent = (JComponent)getParent();
if (parent != null) {
parent.revalidate();
parent.repaint();
}
adjustSize();
}
public static class Croppable extends HyperlinkLabel {
@Override
protected void adjustSize() {
// ignore, keep minimum size default
}
}
@Override
public void updateUI() {
super.updateUI();
applyFont();
}
private void applyFont() {
setFont(myFontSize == null ? UIUtil.getLabelFont() : UIUtil.getLabelFont(myFontSize));
}
@Override
public AccessibleContext getAccessibleContext() {
if (accessibleContext == null) {
accessibleContext = new AccessibleHyperlinkLabel();
}
return accessibleContext;
}
/**
* Hyperlink accessibility: "HYPERLINK" role and expose a "click" action.
* @see AbstractButton.AccessibleAbstractButton
*/
protected class AccessibleHyperlinkLabel extends AccessibleHighlightable implements AccessibleAction {
@Override
public AccessibleRole getAccessibleRole() {
return AccessibleRole.HYPERLINK;
}
@Override
public AccessibleAction getAccessibleAction() {
return this;
}
@Override
public int getAccessibleActionCount() {
return 1;
}
@Override
public String getAccessibleActionDescription(int i) {
if (i == 0) {
return UIManager.getString("AbstractButton.clickText");
}
return null;
}
@Override
public boolean doAccessibleAction(int i) {
if (i == 0) {
doClick();
return true;
} else {
return false;
}
}
}
private class Win10TextAttributes extends TextAttributes {
private Win10TextAttributes(Color textBackgroundColor) {
super(null, textBackgroundColor, null, null, Font.PLAIN);
}
@Override public Color getForegroundColor() {
return !isEnabled() ? UIManager.getColor("Label.disabledForeground") :
myMousePressed ? UIManager.getColor("link.pressed.foreground") :
myMouseHover ? UIManager.getColor("link.hover.foreground") :
UIManager.getColor("link.foreground");
}
@Override public Color getEffectColor() {
return getForegroundColor();
}
@Override public EffectType getEffectType() {
return !isEnabled() || myMouseHover || myMousePressed ? EffectType.LINE_UNDERSCORE : null;
}
@Override public void setForegroundColor(Color color) {
throw new UnsupportedOperationException();
}
@Override public void setEffectColor(Color color) {
throw new UnsupportedOperationException();
}
@Override public void setEffectType(EffectType effectType) {
throw new UnsupportedOperationException();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.update.processor;
import java.io.IOException;
import java.io.StringWriter;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.xml.xpath.XPathExpressionException;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.servlet.DirectSolrConnection;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.util.BaseTestHarness;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.xml.sax.SAXException;
public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
/** List of valid + invalid documents */
private static List<SolrInputDocument> docs = null;
/** IDs of the invalid documents in <code>docs</code> */
private static String[] badIds = null;
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig-update-processor-chains.xml", "schema12.xml");
}
@AfterClass
public static void tearDownClass() {
docs = null;
badIds = null;
}
@Override
public void setUp() throws Exception {
super.setUp();
// expected exception messages
ignoreException("Error adding field");
ignoreException("Document is missing mandatory uniqueKey field");
if (docs == null) {
docs = new ArrayList<>(20);
badIds = new String[10];
for (int i = 0; i < 10; i++) {
// a valid document
docs.add(doc(field("id", String.valueOf(2 * i)), field("weight", i)));
// ... and an invalid one
docs.add(doc(field("id", String.valueOf(2 * i + 1)), field("weight", "b")));
badIds[i] = String.valueOf(2 * i + 1);
}
}
}
@Override
public void tearDown() throws Exception {
resetExceptionIgnores();
assertU(delQ("*:*"));
assertU(commit());
assertQ(req("q", "*:*"), "//result[@numFound='0']");
super.tearDown();
}
/**
* future proof TolerantUpdateProcessor against new default method impls being added to
* UpdateProcessor to ensure that every method involved in a processor chain life cycle is
* overridden with exception catching/tracking.
*/
public void testReflection() {
for (Method method : TolerantUpdateProcessor.class.getMethods()) {
if (method.getDeclaringClass().equals(Object.class) || method.getName().equals("close")) {
continue;
}
assertEquals(
"base class(es) has changed, TolerantUpdateProcessor needs updated to ensure it "
+ "overrides all solr update lifcycle methods with exception tracking: "
+ method.toString(),
TolerantUpdateProcessor.class,
method.getDeclaringClass());
}
}
@Test
public void testValidAdds() throws IOException {
SolrInputDocument validDoc = doc(field("id", "1"), field("text", "the quick brown fox"));
add("tolerant-chain-max-errors-10", null, validDoc);
validDoc = doc(field("id", "2"), field("text", "the quick brown fox"));
add("tolerant-chain-max-errors-not-set", null, validDoc);
assertU(commit());
assertQ(req("q", "*:*"), "//result[@numFound='2']");
assertQ(req("q", "id:1"), "//result[@numFound='1']");
assertQ(req("q", "id:2"), "//result[@numFound='1']");
}
@Test
public void testInvalidAdds() throws IOException {
SolrInputDocument invalidDoc1 = doc(field("text", "the quick brown fox")); // no id
// This doc should fail without being tolerant
Exception e = expectThrows(Exception.class, () -> add("not-tolerant", null, invalidDoc1));
assertTrue(e.getMessage().contains("Document is missing mandatory uniqueKey field"));
assertAddsSucceedWithErrors(
"tolerant-chain-max-errors-10",
Arrays.asList(new SolrInputDocument[] {invalidDoc1}),
null,
"(unknown)");
// a valid doc
SolrInputDocument validDoc1 = doc(field("id", "1"), field("text", "the quick brown fox"));
// This batch should fail without being tolerant
e =
expectThrows(
Exception.class,
() ->
add(
"not-tolerant",
null,
Arrays.asList(new SolrInputDocument[] {invalidDoc1, validDoc1})));
assertTrue(e.getMessage().contains("Document is missing mandatory uniqueKey field"));
assertU(commit());
assertQ(req("q", "id:1"), "//result[@numFound='0']");
assertAddsSucceedWithErrors(
"tolerant-chain-max-errors-10",
Arrays.asList(new SolrInputDocument[] {invalidDoc1, validDoc1}),
null,
"(unknown)");
assertU(commit());
// verify that the good document made it in.
assertQ(req("q", "id:1"), "//result[@numFound='1']");
SolrInputDocument invalidDoc2 = doc(field("id", "2"), field("weight", "aaa"));
SolrInputDocument validDoc2 = doc(field("id", "3"), field("weight", "3"));
// This batch should fail without being tolerant
e =
expectThrows(
Exception.class,
() ->
add(
"not-tolerant",
null,
Arrays.asList(new SolrInputDocument[] {invalidDoc2, validDoc2})));
assertTrue(e.getMessage().contains("Error adding field"));
assertU(commit());
assertQ(req("q", "id:3"), "//result[@numFound='0']");
assertAddsSucceedWithErrors(
"tolerant-chain-max-errors-10",
Arrays.asList(new SolrInputDocument[] {invalidDoc2, validDoc2}),
null,
"2");
assertU(commit());
// The valid document was indexed
assertQ(req("q", "id:3"), "//result[@numFound='1']");
// The invalid document was NOT indexed
assertQ(req("q", "id:2"), "//result[@numFound='0']");
}
@Test
public void testMaxErrorsDefault() throws IOException {
// by default the TolerantUpdateProcessor accepts all errors, so this batch should succeed with
// 10 errors.
assertAddsSucceedWithErrors("tolerant-chain-max-errors-not-set", docs, null, badIds);
assertU(commit());
assertQ(req("q", "*:*"), "//result[@numFound='10']");
}
public void testMaxErrorsSucceed() throws IOException {
ModifiableSolrParams requestParams = new ModifiableSolrParams();
requestParams.add("maxErrors", "10");
// still OK
assertAddsSucceedWithErrors("tolerant-chain-max-errors-not-set", docs, requestParams, badIds);
assertU(commit());
assertQ(req("q", "*:*"), "//result[@numFound='10']");
}
@Test
public void testMaxErrorsThrowsException() throws IOException {
ModifiableSolrParams requestParams = new ModifiableSolrParams();
requestParams.add("maxErrors", "5");
SolrException e =
expectThrows(
SolrException.class,
() ->
assertAddsSucceedWithErrors(
"tolerant-chain-max-errors-not-set", docs, requestParams, badIds));
assertTrue(
e.getMessage(),
e.getMessage()
.contains(
"ERROR: [doc=1] Error adding field 'weight'='b' msg=For input string: \"b\""));
// the first good documents made it to the index
assertU(commit());
assertQ(req("q", "*:*"), "//result[@numFound='6']");
}
@Test
public void testMaxErrorsInfinite() throws IOException {
ModifiableSolrParams requestParams = new ModifiableSolrParams();
requestParams.add("maxErrors", "-1");
assertAddsSucceedWithErrors("tolerant-chain-max-errors-not-set", docs, null, badIds);
assertU(commit());
assertQ(req("q", "*:*"), "//result[@numFound='10']");
}
@Test
public void testMaxErrors0() throws IOException {
// make the TolerantUpdateProcessor intolerant
List<SolrInputDocument> smallBatch = docs.subList(0, 2);
ModifiableSolrParams requestParams = new ModifiableSolrParams();
requestParams.add("maxErrors", "0");
SolrException e =
expectThrows(
SolrException.class,
() ->
assertAddsSucceedWithErrors(
"tolerant-chain-max-errors-10", smallBatch, requestParams, "1"));
assertTrue(
e.getMessage()
.contains(
"ERROR: [doc=1] Error adding field 'weight'='b' msg=For input string: \"b\""));
// the first good documents made it to the index
assertU(commit());
assertQ(req("q", "*:*"), "//result[@numFound='1']");
}
@Test
public void testInvalidDelete() throws XPathExpressionException, SAXException {
ignoreException("undefined field invalidfield");
String response =
update("tolerant-chain-max-errors-10", adoc("id", "1", "text", "the quick brown fox"));
assertNull(
BaseTestHarness.validateXPath(
response,
"//int[@name='status']=0",
"//arr[@name='errors']",
"count(//arr[@name='errors']/lst)=0"));
response = update("tolerant-chain-max-errors-10", delQ("invalidfield:1"));
assertNull(
BaseTestHarness.validateXPath(
response,
"//int[@name='status']=0",
"count(//arr[@name='errors']/lst)=1",
"//arr[@name='errors']/lst/str[@name='type']/text()='DELQ'",
"//arr[@name='errors']/lst/str[@name='id']/text()='invalidfield:1'",
"//arr[@name='errors']/lst/str[@name='message']/text()='undefined field invalidfield'"));
}
@Test
public void testValidDelete() throws XPathExpressionException, SAXException {
ignoreException("undefined field invalidfield");
String response =
update("tolerant-chain-max-errors-10", adoc("id", "1", "text", "the quick brown fox"));
assertNull(
BaseTestHarness.validateXPath(
response,
"//int[@name='status']=0",
"//arr[@name='errors']",
"count(//arr[@name='errors']/lst)=0"));
assertU(commit());
assertQ(req("q", "*:*"), "//result[@numFound='1']");
response = update("tolerant-chain-max-errors-10", delQ("id:1"));
assertNull(
BaseTestHarness.validateXPath(
response,
"//int[@name='status']=0",
"//arr[@name='errors']",
"count(//arr[@name='errors']/lst)=0"));
assertU(commit());
assertQ(req("q", "*:*"), "//result[@numFound='0']");
}
@Test
public void testResponse() throws SAXException, XPathExpressionException, IOException {
String response =
update("tolerant-chain-max-errors-10", adoc("id", "1", "text", "the quick brown fox"));
assertNull(
BaseTestHarness.validateXPath(
response,
"//int[@name='status']=0",
"//arr[@name='errors']",
"count(//arr[@name='errors']/lst)=0"));
response = update("tolerant-chain-max-errors-10", adoc("text", "the quick brown fox"));
assertNull(
BaseTestHarness.validateXPath(
response,
"//int[@name='status']=0",
"//int[@name='maxErrors']/text()='10'",
"count(//arr[@name='errors']/lst)=1",
"//arr[@name='errors']/lst/str[@name='id']/text()='(unknown)'",
"//arr[@name='errors']/lst/str[@name='message']/text()='Document is missing mandatory uniqueKey field: id'"));
response = update("tolerant-chain-max-errors-10", adoc("text", "the quick brown fox"));
StringWriter builder = new StringWriter();
builder.append("<add>");
for (SolrInputDocument doc : docs) {
ClientUtils.writeXML(doc, builder);
}
builder.append("</add>");
response = update("tolerant-chain-max-errors-10", builder.toString());
assertNull(
BaseTestHarness.validateXPath(
response,
"//int[@name='status']=0",
"//int[@name='maxErrors']/text()='10'",
"count(//arr[@name='errors']/lst)=10",
"not(//arr[@name='errors']/lst/str[@name='id']/text()='0')",
"//arr[@name='errors']/lst/str[@name='id']/text()='1'",
"not(//arr[@name='errors']/lst/str[@name='id']/text()='2')",
"//arr[@name='errors']/lst/str[@name='id']/text()='3'",
"not(//arr[@name='errors']/lst/str[@name='id']/text()='4')",
"//arr[@name='errors']/lst/str[@name='id']/text()='5'",
"not(//arr[@name='errors']/lst/str[@name='id']/text()='6')",
"//arr[@name='errors']/lst/str[@name='id']/text()='7'",
"not(//arr[@name='errors']/lst/str[@name='id']/text()='8')",
"//arr[@name='errors']/lst/str[@name='id']/text()='9'",
"not(//arr[@name='errors']/lst/str[@name='id']/text()='10')",
"//arr[@name='errors']/lst/str[@name='id']/text()='11'",
"not(//arr[@name='errors']/lst/str[@name='id']/text()='12')",
"//arr[@name='errors']/lst/str[@name='id']/text()='13'",
"not(//arr[@name='errors']/lst/str[@name='id']/text()='14')",
"//arr[@name='errors']/lst/str[@name='id']/text()='15'",
"not(//arr[@name='errors']/lst/str[@name='id']/text()='16')",
"//arr[@name='errors']/lst/str[@name='id']/text()='17'",
"not(//arr[@name='errors']/lst/str[@name='id']/text()='18')",
"//arr[@name='errors']/lst/str[@name='id']/text()='19'"));
// spot check response when effective maxErrors is unlimited
response = update("tolerant-chain-max-errors-not-set", builder.toString());
assertNull(BaseTestHarness.validateXPath(response, "//int[@name='maxErrors']/text()='-1'"));
}
public String update(String chain, String xml) {
DirectSolrConnection connection = new DirectSolrConnection(h.getCore());
SolrRequestHandler handler = h.getCore().getRequestHandler("/update");
ModifiableSolrParams params = new ModifiableSolrParams();
params.add("update.chain", chain);
try {
return connection.request(handler, params, xml);
} catch (SolrException e) {
throw e;
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
}
}
private void assertAddsSucceedWithErrors(
String chain,
final Collection<SolrInputDocument> docs,
SolrParams requestParams,
String... idsShouldFail)
throws IOException {
SolrQueryResponse response = add(chain, requestParams, docs);
@SuppressWarnings("unchecked")
List<SimpleOrderedMap<String>> errors =
(List<SimpleOrderedMap<String>>) response.getResponseHeader().get("errors");
assertNotNull(errors);
assertEquals("number of errors", idsShouldFail.length, errors.size());
Set<String> addErrorIdsExpected = new HashSet<String>(Arrays.asList(idsShouldFail));
for (SimpleOrderedMap<String> err : errors) {
assertEquals("this method only expects 'add' errors", "ADD", err.get("type"));
String id = err.get("id");
assertNotNull("null err id", id);
assertTrue("unexpected id", addErrorIdsExpected.contains(id));
}
}
protected SolrQueryResponse add(
final String chain, SolrParams requestParams, final SolrInputDocument doc)
throws IOException {
return add(chain, requestParams, Arrays.asList(new SolrInputDocument[] {doc}));
}
protected SolrQueryResponse add(
final String chain, SolrParams requestParams, final Collection<SolrInputDocument> docs)
throws IOException {
SolrCore core = h.getCore();
UpdateRequestProcessorChain pc = core.getUpdateProcessingChain(chain);
assertNotNull("No Chain named: " + chain, pc);
SolrQueryResponse rsp = new SolrQueryResponse();
rsp.add("responseHeader", new SimpleOrderedMap<Object>());
if (requestParams == null) {
requestParams = new ModifiableSolrParams();
}
SolrQueryRequest req = new LocalSolrQueryRequest(core, requestParams);
UpdateRequestProcessor processor = null;
try {
processor = pc.createProcessor(req, rsp);
for (SolrInputDocument doc : docs) {
AddUpdateCommand cmd = new AddUpdateCommand(req);
cmd.solrDoc = doc;
processor.processAdd(cmd);
}
processor.finish();
} finally {
IOUtils.closeQuietly(processor);
req.close();
}
return rsp;
}
}
| |
/*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
// Portions copyright Hiroshi Ito. Licensed under Apache 2.0 license
package com.gs.fw.common.mithra.finder;
import com.gs.fw.common.mithra.MithraList;
import com.gs.fw.common.mithra.MithraObjectPortal;
import com.gs.fw.common.mithra.attribute.Attribute;
import com.gs.fw.common.mithra.finder.orderby.OrderBy;
import com.gs.fw.common.mithra.querycache.CachedQuery;
import com.gs.fw.common.mithra.querycache.QueryCache;
import com.gs.fw.common.mithra.tempobject.TupleTempContext;
import com.gs.fw.common.mithra.util.ListFactory;
import org.eclipse.collections.impl.list.mutable.FastList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
public class SimpleToManyDeepFetchStrategy extends SingleLinkDeepFetchStrategy
{
public SimpleToManyDeepFetchStrategy(Mapper mapper, OrderBy orderBy)
{
super(mapper, orderBy);
}
public SimpleToManyDeepFetchStrategy(Mapper mapper, OrderBy orderBy, Mapper alternateMapper)
{
super(mapper, orderBy, alternateMapper);
}
public SimpleToManyDeepFetchStrategy(Mapper mapper, OrderBy orderBy, Mapper alternateMapper, int chainPosition)
{
super(mapper, orderBy, alternateMapper, chainPosition);
}
public List deepFetch(DeepFetchNode node, boolean bypassCache, boolean forceImplicitJoin)
{
List immediateParentList = getImmediateParentList(node);
if (immediateParentList.size() == 0)
{
return cacheEmptyResult(node);
}
HashMap<Operation, List> opToListMap = populateOpToListMapWithEmptyList(immediateParentList);
MithraList complexList = (MithraList) this.mapOpToList(node);
complexList.setForceImplicitJoin(forceImplicitJoin);
if (!bypassCache)
{
List result = deepFetchToManyInMemory(opToListMap, immediateParentList, complexList.getOperation(), node);
if (result != null)
{
return result;
}
}
return deepFetchToManyFromServer(opToListMap, bypassCache, immediateParentList, complexList, node);
}
private List cacheResultsForToMany(HashMap<Operation, List> opToListMap, List immediateParentList, List list, DeepFetchNode node)
{
int roughSize = (list.size() / immediateParentList.size()) + 1;
int doNotCacheCount = associateResultsWithOps(list, opToListMap, roughSize, null);
node.setResolvedList(list, chainPosition);
return cacheResults(opToListMap, doNotCacheCount);
}
protected List deepFetchToManyInMemory(HashMap<Operation, List> opToListMap, List immediateParentList, Operation op, DeepFetchNode node)
{
MithraObjectPortal portal = op.getResultObjectPortal();
if (portal.isCacheDisabled()) return null;
QueryCache queryCache = portal.getQueryCache();
CachedQuery cachedResult = queryCache.findByEquality(op);
if (cachedResult != null)
{
return cacheResultsForToMany(opToListMap, immediateParentList, cachedResult.getResult(), node);
}
Iterator<Operation> it = opToListMap.keySet().iterator();
FastList queries = null;
FastList resolvedList = null;
while(it.hasNext())
{
Operation oneOp = it.next();
CachedQuery cachedQuery = queryCache.findByEquality(oneOp);
if (cachedQuery == null) return null;
if (queries == null) queries = new FastList(opToListMap.size());
if (resolvedList == null) resolvedList = new FastList(opToListMap.size());
resolvedList.addAll(cachedQuery.getResult());
queries.add(cachedQuery);
}
if (this.orderBy != null)
{
resolvedList.sortThis(this.orderBy);
}
node.setResolvedList(resolvedList, this.chainPosition);
CachedQuery complexCachedQuery = new CachedQuery(op, this.orderBy);
complexCachedQuery.setResult(resolvedList);
cacheComplexQuery(complexCachedQuery, true);
queries.add(complexCachedQuery);
return queries;
}
@Override
public DeepFetchResult deepFetchFirstLinkInMemory(DeepFetchNode node)
{
List immediateParentList = getImmediateParentList(node);
if (immediateParentList.size() == 0)
{
node.setResolvedList(ListFactory.EMPTY_LIST, chainPosition);
return DeepFetchResult.nothingToDo();
}
DeepFetchResult deepFetchResult = new DeepFetchResult(immediateParentList);
HashMap<Operation, List> opToListMap = populateOpToListMapWithEmptyList(immediateParentList);
MithraObjectPortal portal = this.getMapper().getFromPortal();
if (portal.isCacheDisabled())
{
return deepFetchResult;
}
QueryCache queryCache = portal.getQueryCache();
Iterator<Operation> it = opToListMap.keySet().iterator();
FastList queries = null;
FastList resolvedList = null;
CachedQuery firstCachedQuery = null;
while(it.hasNext())
{
Operation oneOp = it.next();
CachedQuery cachedQuery = queryCache.findByEquality(oneOp);
if (cachedQuery == null)
{
List list = portal.zFindInMemoryWithoutAnalysis(oneOp, true);
if (list != null)
{
if (orderBy != null && list.size() > 1)
{
Collections.sort(list, orderBy);
}
cachedQuery = new CachedQuery(oneOp, orderBy, firstCachedQuery);
if (firstCachedQuery == null) firstCachedQuery = cachedQuery;
cachedQuery.setResult(list);
cachedQuery.cacheQuery(true);
}
else
{
return deepFetchResult;
}
}
if (queries == null) queries = new FastList(opToListMap.size());
if (resolvedList == null) resolvedList = new FastList(opToListMap.size());
resolvedList.addAll(cachedQuery.getResult());
queries.add(cachedQuery);
}
node.setResolvedList(resolvedList, this.chainPosition);
deepFetchResult.setResult(queries);
deepFetchResult.setPercentComplete(100);
return deepFetchResult;
}
@Override
public List deepFetchAdhocUsingTempContext(DeepFetchNode node, TupleTempContext tempContext, Object parentPrototype, List immediateParentList)
{
MithraList complexList = this.createListForAdHocDeepFetch(tempContext, parentPrototype);
return deepFetchWithComplexList(node, immediateParentList, complexList);
}
private List deepFetchWithComplexList(DeepFetchNode node, List immediateParentList, MithraList complexList)
{
complexList.forceResolve();
associateResultsWithAlternateMapper(complexList.getOperation(), complexList);
if (immediateParentList == null)
{
immediateParentList = this.getImmediateParentList(node);
}
HashMap<Operation, List> opToListMap = populateOpToListMapWithEmptyList(immediateParentList);
return cacheResultsForToMany(opToListMap, immediateParentList, complexList, node);
}
protected List deepFetchToManyFromServer(HashMap<Operation, List> opToListMap, boolean bypassCache,
List immediateParentList, MithraList complexList, DeepFetchNode node)
{
MithraList list = complexList;
Operation simplifiedJoinOp = node.getSimplifiedJoinOp(this.mapper, immediateParentList);
if (simplifiedJoinOp != null)
{
list = findMany(simplifiedJoinOp);
}
list.setBypassCache(bypassCache);
list.forceResolve();
if (list != complexList)
{
associateSimplifiedResult(complexList.getOperation(), list);
}
associateResultsWithAlternateMapper(complexList.getOperation(), list);
return cacheResultsForToMany(opToListMap, immediateParentList, list, node);
}
@Override
public List finishAdhocDeepFetch(DeepFetchNode node, DeepFetchResult resultSoFar)
{
if (resultSoFar.getPercentComplete() != 100)
{
throw new RuntimeException("Should not get here");
}
return ListFactory.EMPTY_LIST;
}
@Override
public List deepFetchAdhocUsingInClause(DeepFetchNode node, Attribute singleAttribute, List parentList)
{
Operation op = node.getSimplifiedJoinOp(this.getMapper(), parentList);
if (op == null) return null;
MithraList complexList = op.getResultObjectPortal().getFinder().findMany(op);
return deepFetchWithComplexList(node, parentList, complexList);
}
}
| |
/*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.config.builders;
import org.ehcache.Cache;
import org.ehcache.config.Builder;
import org.ehcache.core.Ehcache;
import org.ehcache.core.EhcacheWithLoaderWriter;
import org.ehcache.core.InternalCache;
import org.ehcache.core.PersistentUserManagedEhcache;
import org.ehcache.UserManagedCache;
import org.ehcache.core.config.BaseCacheConfiguration;
import org.ehcache.config.CacheConfiguration;
import org.ehcache.config.EvictionAdvisor;
import org.ehcache.config.ResourcePools;
import org.ehcache.config.ResourceType;
import org.ehcache.core.internal.store.StoreConfigurationImpl;
import org.ehcache.core.spi.store.heap.SizeOfEngine;
import org.ehcache.impl.events.CacheEventDispatcherImpl;
import org.ehcache.core.internal.store.StoreSupport;
import org.ehcache.event.CacheEventListener;
import org.ehcache.core.events.CacheEventListenerConfiguration;
import org.ehcache.core.events.CacheEventListenerProvider;
import org.ehcache.impl.config.copy.DefaultCopierConfiguration;
import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration;
import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration;
import org.ehcache.config.units.EntryUnit;
import org.ehcache.config.units.MemoryUnit;
import org.ehcache.core.events.CacheEventDispatcher;
import org.ehcache.impl.internal.events.DisabledCacheEventNotificationService;
import org.ehcache.CachePersistenceException;
import org.ehcache.expiry.Expirations;
import org.ehcache.expiry.Expiry;
import org.ehcache.impl.copy.SerializingCopier;
import org.ehcache.impl.internal.spi.event.DefaultCacheEventListenerProvider;
import org.ehcache.core.spi.LifeCycled;
import org.ehcache.core.spi.LifeCycledAdapter;
import org.ehcache.core.internal.service.ServiceLocator;
import org.ehcache.core.spi.store.Store;
import org.ehcache.spi.persistence.PersistableResourceService;
import org.ehcache.spi.service.ServiceProvider;
import org.ehcache.spi.copy.Copier;
import org.ehcache.spi.loaderwriter.CacheLoaderWriter;
import org.ehcache.spi.serialization.SerializationProvider;
import org.ehcache.spi.serialization.Serializer;
import org.ehcache.spi.serialization.UnsupportedTypeException;
import org.ehcache.core.spi.service.LocalPersistenceService;
import org.ehcache.core.spi.store.heap.SizeOfEngineProvider;
import org.ehcache.spi.service.Service;
import org.ehcache.spi.service.ServiceConfiguration;
import org.ehcache.spi.service.ServiceCreationConfiguration;
import org.ehcache.spi.service.ServiceDependencies;
import org.ehcache.core.internal.util.ClassLoading;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicLong;
import static org.ehcache.config.ResourceType.Core.DISK;
import static org.ehcache.config.ResourceType.Core.OFFHEAP;
import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder;
import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE;
import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE;
import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_UNIT;
import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst;
/**
* The {@code UserManagedCacheBuilder} enables building {@link UserManagedCache}s using a fluent style.
* <P>
* {@link UserManagedCache}s are {@link Cache}s that are not linked to a {@link org.ehcache.CacheManager}.
* </P>
* <P>
* As with all Ehcache builders, all instances are immutable and calling any method on the builder will return a new
* instance without modifying the one on which the method was called.
* This enables the sharing of builder instances without any risk of seeing them modified by code elsewhere.
* </P>
*
* @param <K> the cache key type
* @param <V> the cache value type
* @param <T> the specific {@code UserManagedCache} type
*/
public class UserManagedCacheBuilder<K, V, T extends UserManagedCache<K, V>> implements Builder<T> {
@ServiceDependencies(Store.Provider.class)
private static class ServiceDeps {
private ServiceDeps() {
throw new UnsupportedOperationException("This is an annotation placeholder, not to be instantiated");
}
}
private static final Logger LOGGER = LoggerFactory.getLogger(UserManagedCacheBuilder.class);
private static final AtomicLong instanceId = new AtomicLong(0L);
private final Class<K> keyType;
private final Class<V> valueType;
private String id;
private final Set<Service> services = new HashSet<Service>();
private final Set<ServiceCreationConfiguration<?>> serviceCreationConfigurations = new HashSet<ServiceCreationConfiguration<?>>();
private Expiry<? super K, ? super V> expiry = Expirations.noExpiration();
private ClassLoader classLoader = ClassLoading.getDefaultClassLoader();
private EvictionAdvisor<? super K, ? super V> evictionAdvisor;
private CacheLoaderWriter<? super K, V> cacheLoaderWriter;
private CacheEventDispatcher<K, V> eventDispatcher = new DisabledCacheEventNotificationService<K, V>();
private ResourcePools resourcePools = newResourcePoolsBuilder().heap(Long.MAX_VALUE, EntryUnit.ENTRIES).build();
private Copier<K> keyCopier;
private boolean useKeySerializingCopier;
private Copier<V> valueCopier;
private boolean useValueSerializingCopier;
private Serializer<K> keySerializer;
private Serializer<V> valueSerializer;
private int dispatcherConcurrency = 4;
private List<CacheEventListenerConfiguration> eventListenerConfigurations = new ArrayList<CacheEventListenerConfiguration>();
private ExecutorService unOrderedExecutor;
private ExecutorService orderedExecutor;
private long objectGraphSize = DEFAULT_OBJECT_GRAPH_SIZE;
private long maxObjectSize = DEFAULT_MAX_OBJECT_SIZE;
private MemoryUnit sizeOfUnit = DEFAULT_UNIT;
UserManagedCacheBuilder(final Class<K> keyType, final Class<V> valueType) {
this.keyType = keyType;
this.valueType = valueType;
}
private UserManagedCacheBuilder(UserManagedCacheBuilder<K, V, T> toCopy) {
this.keyType = toCopy.keyType;
this.valueType = toCopy.valueType;
this.id = toCopy.id;
this.services.addAll(toCopy.services);
this.serviceCreationConfigurations.addAll(toCopy.serviceCreationConfigurations);
this.expiry = toCopy.expiry;
this.classLoader = toCopy.classLoader;
this.evictionAdvisor = toCopy.evictionAdvisor;
this.cacheLoaderWriter = toCopy.cacheLoaderWriter;
this.eventDispatcher = toCopy.eventDispatcher;
this.resourcePools = toCopy.resourcePools;
this.keyCopier = toCopy.keyCopier;
this.valueCopier = toCopy.valueCopier;
this.keySerializer = toCopy.keySerializer;
this.valueSerializer = toCopy.valueSerializer;
this.useKeySerializingCopier = toCopy.useKeySerializingCopier;
this.useValueSerializingCopier = toCopy.useValueSerializingCopier;
this.eventListenerConfigurations = toCopy.eventListenerConfigurations;
this.unOrderedExecutor = toCopy.unOrderedExecutor;
this.orderedExecutor = toCopy.orderedExecutor;
this.objectGraphSize = toCopy.objectGraphSize;
this.maxObjectSize = toCopy.maxObjectSize;
this.sizeOfUnit = toCopy.sizeOfUnit;
}
T build(ServiceLocator serviceLocator) throws IllegalStateException {
validateListenerConfig();
try {
for (ServiceCreationConfiguration<?> serviceCreationConfig : serviceCreationConfigurations) {
Service service = serviceLocator.getOrCreateServiceFor(serviceCreationConfig);
if (service == null) {
throw new IllegalArgumentException("Couldn't resolve Service " + serviceCreationConfig.getServiceType().getName());
}
}
serviceLocator.loadDependenciesOf(ServiceDeps.class);
serviceLocator.startAllServices();
} catch (Exception e) {
throw new IllegalStateException("UserManagedCacheBuilder failed to build.", e);
}
List<ServiceConfiguration<?>> serviceConfigsList = new ArrayList<ServiceConfiguration<?>>();
if (keyCopier != null) {
serviceConfigsList.add(new DefaultCopierConfiguration<K>(keyCopier, DefaultCopierConfiguration.Type.KEY));
} else if (useKeySerializingCopier) {
serviceConfigsList.add(new DefaultCopierConfiguration<K>((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.KEY));
}
if (valueCopier != null) {
serviceConfigsList.add(new DefaultCopierConfiguration<V>(valueCopier, DefaultCopierConfiguration.Type.VALUE));
} else if (useValueSerializingCopier) {
serviceConfigsList.add(new DefaultCopierConfiguration<K>((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE));
}
CacheConfiguration<K, V> cacheConfig = new BaseCacheConfiguration<K, V>(keyType, valueType, evictionAdvisor,
classLoader, expiry, resourcePools);
List<LifeCycled> lifeCycledList = new ArrayList<LifeCycled>();
Set<ResourceType<?>> resources = resourcePools.getResourceTypeSet();
boolean persistent = resources.contains(DISK);
if (persistent) {
if (id == null) {
throw new IllegalStateException("Persistent user managed caches must have an id set");
}
final LocalPersistenceService persistenceService = serviceLocator.getService(LocalPersistenceService.class);
if (!resourcePools.getPoolForResource(ResourceType.Core.DISK).isPersistent()) {
try {
persistenceService.destroy(id);
} catch (CachePersistenceException cpex) {
throw new RuntimeException("Unable to clean-up persistence space for non-restartable cache " + id, cpex);
}
}
try {
final PersistableResourceService.PersistenceSpaceIdentifier<?> identifier = persistenceService.getPersistenceSpaceIdentifier(id, cacheConfig);
lifeCycledList.add(new LifeCycledAdapter() {
@Override
public void close() throws Exception {
persistenceService.releasePersistenceSpaceIdentifier(identifier);
}
});
serviceConfigsList.add(identifier);
} catch (CachePersistenceException cpex) {
throw new RuntimeException("Unable to create persistence space for cache " + id, cpex);
}
}
Serializer<K> keySerializer = this.keySerializer;
Serializer<V> valueSerializer = this.valueSerializer;
if (keySerializer != null) {
serviceConfigsList.add(new DefaultSerializerConfiguration<K>(this.keySerializer, DefaultSerializerConfiguration.Type.KEY));
}
if (valueSerializer != null) {
serviceConfigsList.add(new DefaultSerializerConfiguration<V>(this.valueSerializer, DefaultSerializerConfiguration.Type.VALUE));
}
ServiceConfiguration<?>[] serviceConfigs = serviceConfigsList.toArray(new ServiceConfiguration<?>[0]);
final SerializationProvider serialization = serviceLocator.getService(SerializationProvider.class);
if (serialization != null) {
try {
if (keySerializer == null) {
final Serializer<K> keySer = serialization.createKeySerializer(keyType, classLoader, serviceConfigs);
lifeCycledList.add(
new LifeCycledAdapter() {
@Override
public void close() throws Exception {
serialization.releaseSerializer(keySer);
}
}
);
keySerializer = keySer;
}
if (valueSerializer == null) {
final Serializer<V> valueSer = serialization.createValueSerializer(valueType, classLoader, serviceConfigs);
lifeCycledList.add(
new LifeCycledAdapter() {
@Override
public void close() throws Exception {
serialization.releaseSerializer(valueSer);
}
}
);
valueSerializer = valueSer;
}
} catch (UnsupportedTypeException e) {
if (resources.contains(OFFHEAP) || resources.contains(DISK)) {
throw new RuntimeException(e);
} else {
LOGGER.debug("Could not create serializers for user managed cache {}", id, e);
}
}
}
final Store.Provider storeProvider = StoreSupport.selectStoreProvider(serviceLocator, resources, serviceConfigsList);
Store.Configuration<K, V> storeConfig = new StoreConfigurationImpl<K, V>(keyType, valueType, evictionAdvisor, classLoader,
expiry, resourcePools, dispatcherConcurrency, keySerializer, valueSerializer);
final Store<K, V> store = storeProvider.createStore(storeConfig, serviceConfigs);
lifeCycledList.add(new LifeCycled() {
@Override
public void init() throws Exception {
storeProvider.initStore(store);
}
@Override
public void close() throws Exception {
storeProvider.releaseStore(store);
}
});
if (this.eventDispatcher instanceof DisabledCacheEventNotificationService && (orderedExecutor != null & unOrderedExecutor != null)) {
this.eventDispatcher = new CacheEventDispatcherImpl<K, V>(unOrderedExecutor, orderedExecutor);
}
eventDispatcher.setStoreEventSource(store.getStoreEventSource());
if (persistent) {
LocalPersistenceService persistenceService = serviceLocator
.getService(LocalPersistenceService.class);
if (persistenceService == null) {
throw new IllegalStateException("No LocalPersistenceService could be found - did you configure one?");
}
PersistentUserManagedEhcache<K, V> cache = new PersistentUserManagedEhcache<K, V>(cacheConfig, store, persistenceService, cacheLoaderWriter, eventDispatcher, id);
registerListeners(cache, serviceLocator, lifeCycledList);
for (LifeCycled lifeCycled : lifeCycledList) {
cache.addHook(lifeCycled);
}
return cast(cache);
} else {
final InternalCache<K, V> cache;
if (cacheLoaderWriter == null) {
cache = new Ehcache<K, V>(cacheConfig, store, eventDispatcher, getLoggerFor(Ehcache.class));
} else {
cache = new EhcacheWithLoaderWriter<K, V>(cacheConfig, store, cacheLoaderWriter, eventDispatcher, getLoggerFor(EhcacheWithLoaderWriter.class));
}
registerListeners(cache, serviceLocator, lifeCycledList);
for (LifeCycled lifeCycled : lifeCycledList) {
(cache).addHook(lifeCycled);
}
return cast(cache);
}
}
private Logger getLoggerFor(Class clazz) {
String loggerName;
if (id != null) {
loggerName = clazz.getName() + "-" + id;
} else {
loggerName = clazz.getName() + "-UserManaged" + instanceId.incrementAndGet();
}
return LoggerFactory.getLogger(loggerName);
}
private void validateListenerConfig() {
if (!eventListenerConfigurations.isEmpty() && eventDispatcher instanceof DisabledCacheEventNotificationService) {
if (orderedExecutor == null && unOrderedExecutor == null) {
throw new IllegalArgumentException("Listeners will not work unless Executors or EventDispatcher is configured.");
}
}
}
private void registerListeners(Cache<K, V> cache, ServiceProvider<Service> serviceProvider, List<LifeCycled> lifeCycledList) {
if (!eventListenerConfigurations.isEmpty()) {
final CacheEventListenerProvider listenerProvider;
CacheEventListenerProvider provider;
if ((provider = serviceProvider.getService(CacheEventListenerProvider.class)) != null) {
listenerProvider = provider;
} else {
listenerProvider = new DefaultCacheEventListenerProvider();
}
for (CacheEventListenerConfiguration config : eventListenerConfigurations) {
final CacheEventListener<K, V> listener = listenerProvider.createEventListener(id, config);
if (listener != null) {
cache.getRuntimeConfiguration().registerCacheEventListener(listener, config.orderingMode(), config.firingMode(), config.fireOn());
lifeCycledList.add(new LifeCycled() {
@Override
public void init() throws Exception {
}
@Override
public void close() throws Exception {
listenerProvider.releaseEventListener(listener);
}
});
}
}
}
eventDispatcher.setListenerSource(cache);
}
@SuppressWarnings("unchecked")
T cast(UserManagedCache<K, V> cache) {
return (T)cache;
}
/**
* Builds the {@link UserManagedCache}, initializing it if requested.
*
* @param init whether to initialize or not the cache before returning
* @return a user managed cache
* @throws IllegalStateException if the user managed cache cannot be built
*/
public final T build(final boolean init) throws IllegalStateException {
final T build = build(new ServiceLocator(services.toArray(new Service[services.size()])));
if (init) {
build.init();
}
return build;
}
/**
* Builds an unitialized {@link UserManagedCache}.
*
* @return an uninitialized user managed cache
*/
@Override
public T build() {
return build(false);
}
/**
* Specifies the returned {@link UserManagedCache} subtype through a specific {@link UserManagedCacheConfiguration}
* which will optionally add configurations to the returned builder.
*
* @param cfg the {@code UserManagedCacheConfiguration} to use
* @param <N> the subtype of {@code UserManagedCache}
* @return a new builder ready to build a more specific subtype of user managed cache
*
* @see org.ehcache.PersistentUserManagedCache
* @see org.ehcache.impl.config.persistence.UserManagedPersistenceContext
*/
public final <N extends T> UserManagedCacheBuilder<K, V, N> with(UserManagedCacheConfiguration<K, V, N> cfg) {
return cfg.builder(this);
}
/**
* Adds an identifier to the returned builder.
* <P>
* The identifier will be used in services and logging the way a cache alias would be inside a {@code CacheManager}
* </P>
* @param identifier the identifier
* @return a new builder with the added identifier
*/
public final UserManagedCacheBuilder<K, V, T> identifier(String identifier) {
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.id = identifier;
return otherBuilder;
}
/**
* Adds a {@link ClassLoader}, to load non Ehcache types, to the returned builder.
*
* @param classLoader the class loader to use
* @return a new builder with the added class loader
*/
public final UserManagedCacheBuilder<K, V, T> withClassLoader(ClassLoader classLoader) {
if (classLoader == null) {
throw new NullPointerException("Null classloader");
}
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.classLoader = classLoader;
return otherBuilder;
}
/**
* Adds {@link Expiry} configuration to the returned builder.
*
* @param expiry the expiry to use
* @return a new builer with the added expiry
*/
public final UserManagedCacheBuilder<K, V, T> withExpiry(Expiry<? super K, ? super V> expiry) {
if (expiry == null) {
throw new NullPointerException("Null expiry");
}
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.expiry = expiry;
return otherBuilder;
}
/**
* Adds an {@link CacheEventDispatcher} to the returned builder.
* <P>
* This is one way of providing a mandatory part of supporting event listeners in {@link UserManagedCache}
* </P>
*
* @param eventDispatcher the event dispatcher to use
* @return a new builder with the configured event dispatcher
*
* @see #withEventExecutors(ExecutorService, ExecutorService)
* @see #withEventListeners(CacheEventListenerConfiguration...)
* @see #withEventListeners(CacheEventListenerConfigurationBuilder)
*/
public final UserManagedCacheBuilder<K, V, T> withEventDispatcher(CacheEventDispatcher<K, V> eventDispatcher) {
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.orderedExecutor = null;
otherBuilder.unOrderedExecutor = null;
otherBuilder.eventDispatcher = eventDispatcher;
return otherBuilder;
}
/**
* Adds the default {@link CacheEventDispatcher} using the provided {@link ExecutorService} to the returned builder.
* <P>
* This is one way of providing a mandatory part of supporting event listeners in {@link UserManagedCache}
* </P>
*
* @param orderedExecutor the ordered event executor service
* @param unOrderedExecutor the unordered event executor service
* @return a new builder with the configured event dispatcher
*
* @see #withEventDispatcher(CacheEventDispatcher)
* @see #withEventListeners(CacheEventListenerConfiguration...)
* @see #withEventListeners(CacheEventListenerConfigurationBuilder)
*/
public final UserManagedCacheBuilder<K, V, T> withEventExecutors(ExecutorService orderedExecutor, ExecutorService unOrderedExecutor) {
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.eventDispatcher = new DisabledCacheEventNotificationService<K, V>();
otherBuilder.orderedExecutor = orderedExecutor;
otherBuilder.unOrderedExecutor = unOrderedExecutor;
return otherBuilder;
}
/**
* Convenience method to add a {@link CacheEventListenerConfiguration} based on the provided
* {@link CacheEventListenerConfigurationBuilder} to the returned builder.
*
* @param cacheEventListenerConfiguration the builder to get the configuration from
* @return a new builder with the added event listener configuration
*
* @see #withEventDispatcher(CacheEventDispatcher)
* @see #withEventExecutors(ExecutorService, ExecutorService)
* @see #withEventListeners(CacheEventListenerConfiguration...)
*/
public final UserManagedCacheBuilder<K, V, T> withEventListeners(CacheEventListenerConfigurationBuilder cacheEventListenerConfiguration) {
return withEventListeners(cacheEventListenerConfiguration.build());
}
/**
* Adds one or more {@link CacheEventListenerConfiguration} to the returned builder.
*
* @param cacheEventListenerConfigurations the cache event listener configurations
* @return a new builders with the added event listener configurations
*
* @see #withEventDispatcher(CacheEventDispatcher)
* @see #withEventExecutors(ExecutorService, ExecutorService)
* @see #withEventListeners(CacheEventListenerConfigurationBuilder)
*/
public final UserManagedCacheBuilder<K, V, T> withEventListeners(CacheEventListenerConfiguration... cacheEventListenerConfigurations) {
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.eventListenerConfigurations.addAll(Arrays.asList(cacheEventListenerConfigurations));
return otherBuilder;
}
/**
* Adds a {@link ResourcePools} configuration to the returned builder.
*
* @param resourcePools the resource pools to use
* @return a new builder with the configured resource pools
*
* @see #withResourcePools(ResourcePoolsBuilder)
*/
public final UserManagedCacheBuilder<K, V, T> withResourcePools(ResourcePools resourcePools) {
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.resourcePools = resourcePools;
return otherBuilder;
}
/**
* Convenience method to add a {@link ResourcePools} configuration based on the provided {@link ResourcePoolsBuilder}
* to the returned builder.
*
* @param resourcePoolsBuilder the builder to get the resource pools from
* @return a new builder with the configured resource pools
*
* @see #withResourcePools(ResourcePools)
*/
public final UserManagedCacheBuilder<K, V, T> withResourcePools(ResourcePoolsBuilder resourcePoolsBuilder) {
return withResourcePools(resourcePoolsBuilder.build());
}
/**
* Adds a configuration for dispatcher concurrency in event processing.
*
* @param dispatcherConcurrency the dispatcher concurrency level
* @return a new builder with the added configuration
*/
public final UserManagedCacheBuilder<K, V, T> withDispatcherConcurrency(int dispatcherConcurrency) {
this.dispatcherConcurrency = dispatcherConcurrency;
return this;
}
/**
* Adds an {@link EvictionAdvisor} to the returned builder.
*
* @param evictionAdvisor the eviction advisor to use
* @return a new builder with the added eviction advisor
*/
public UserManagedCacheBuilder<K, V, T> withEvictionAdvisor(EvictionAdvisor<K, V> evictionAdvisor) {
if (evictionAdvisor == null) {
throw new NullPointerException("Null eviction advisor");
}
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.evictionAdvisor = evictionAdvisor;
return otherBuilder;
}
/**
* Adds a {@link CacheLoaderWriter} to the returned builder.
*
* @param loaderWriter the cache loader writer to use
* @return a new builder with the added cache loader writer
*/
public UserManagedCacheBuilder<K, V, T> withLoaderWriter(CacheLoaderWriter<K, V> loaderWriter) {
if (loaderWriter == null) {
throw new NullPointerException("Null loaderWriter");
}
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.cacheLoaderWriter = loaderWriter;
return otherBuilder;
}
/**
* Adds a configuration for {@link Copier key copying} using the key {@link Serializer} to the returned builder.
*
* @return a new builder with the added configuration
*
* @see #withKeyCopier(Copier)
* @see #withKeySerializer(Serializer)
*/
public UserManagedCacheBuilder<K, V, T> withKeySerializingCopier() {
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.keyCopier = null;
otherBuilder.useKeySerializingCopier = true;
return otherBuilder;
}
/**
* Adds a configuration for {@link Copier value copying} using the key {@link Serializer} to the returned builder.
*
* @return a new builder with the added configuration
*
* @see #withValueCopier(Copier)
* @see #withValueSerializer(Serializer)
*/
public UserManagedCacheBuilder<K, V, T> withValueSerializingCopier() {
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.valueCopier = null;
otherBuilder.useValueSerializingCopier = true;
return otherBuilder;
}
/**
* Adds a configuration for key {@link Copier} to the returned builder.
*
* @param keyCopier the key copier to use
* @return a new builder with the added key copier configuration
*
* @see #withKeySerializingCopier()
*/
public UserManagedCacheBuilder<K, V, T> withKeyCopier(Copier<K> keyCopier) {
if (keyCopier == null) {
throw new NullPointerException("Null key copier");
}
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.keyCopier = keyCopier;
otherBuilder.useKeySerializingCopier = false;
return otherBuilder;
}
/**
* Adds a configuration for value {@link Copier} to the returned builder.
*
* @param valueCopier the value copier to use
* @return a new builder with the added value copier configuration
*
* @see #withValueSerializingCopier()
*/
public UserManagedCacheBuilder<K, V, T> withValueCopier(Copier<V> valueCopier) {
if (valueCopier == null) {
throw new NullPointerException("Null value copier");
}
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.valueCopier = valueCopier;
otherBuilder.useValueSerializingCopier = false;
return otherBuilder;
}
/**
* Adds a configuration for key {@link Serializer} to the returned builder.
*
* @param keySerializer the key serializer to use
* @return a new builder with the added key serializer configuration
*/
public UserManagedCacheBuilder<K, V, T> withKeySerializer(Serializer<K> keySerializer) {
if (keySerializer == null) {
throw new NullPointerException("Null key serializer");
}
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.keySerializer = keySerializer;
return otherBuilder;
}
/**
* Adds a configuration for value {@link Serializer} to the returned builder.
*
* @param valueSerializer the value serializer to use
* @return a new builder with the added value serializer configuration
*/
public UserManagedCacheBuilder<K, V, T> withValueSerializer(Serializer<V> valueSerializer) {
if (valueSerializer == null) {
throw new NullPointerException("Null value serializer");
}
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
otherBuilder.valueSerializer = valueSerializer;
return otherBuilder;
}
/**
* Adds or updates the {@link DefaultSizeOfEngineProviderConfiguration} with the specified object graph maximum size to the configured
* builder.
* </P>
* {@link SizeOfEngine} is what enables the heap tier to be sized in {@link MemoryUnit}.
*
* @param size the maximum graph size
* @return a new builder with the added / updated configuration
*/
public UserManagedCacheBuilder<K, V, T> withSizeOfMaxObjectGraph(long size) {
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
removeAnySizeOfEngine(otherBuilder);
otherBuilder.objectGraphSize = size;
otherBuilder.serviceCreationConfigurations.add(new DefaultSizeOfEngineProviderConfiguration(otherBuilder.maxObjectSize, otherBuilder.sizeOfUnit, otherBuilder.objectGraphSize));
return otherBuilder;
}
/**
* Adds or updates the {@link DefaultSizeOfEngineProviderConfiguration} with the specified maximum mapping size to the configured
* builder.
* </P>
* {@link SizeOfEngine} is what enables the heap tier to be sized in {@link MemoryUnit}.
*
* @param size the maximum mapping size
* @param unit the memory unit
* @return a new builder with the added / updated configuration
*/
public UserManagedCacheBuilder<K, V, T> withSizeOfMaxObjectSize(long size, MemoryUnit unit) {
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
removeAnySizeOfEngine(otherBuilder);
otherBuilder.maxObjectSize = size;
otherBuilder.sizeOfUnit = unit;
otherBuilder.serviceCreationConfigurations.add(new DefaultSizeOfEngineProviderConfiguration(otherBuilder.maxObjectSize, otherBuilder.sizeOfUnit, otherBuilder.objectGraphSize));
return otherBuilder;
}
/**
* Creates a new {@code UserManagedCacheBuilder}.
*
* @param keyType the cache key type
* @param valueType the cache value type
* @param <K> the key type
* @param <V> the value type
* @return the new builder
*/
public static <K, V> UserManagedCacheBuilder<K, V, UserManagedCache<K, V>> newUserManagedCacheBuilder(Class<K> keyType, Class<V> valueType) {
return new UserManagedCacheBuilder<K, V, UserManagedCache<K, V>>(keyType, valueType);
}
/**
* Adds a {@link Service} to be made available to the returned builder.
* <P>
* Note that while {@link Service}s will be started upon {@link UserManagedCache} construction, no other lifecycle
* operations will be performed on them. It is the responsibility of the developer to properly stop
* {@code Service}s once they are no longer required.
* </P>
*
* @param service the service to add
* @return a new builder with the added service
*
* @see #using(ServiceCreationConfiguration)
*/
public UserManagedCacheBuilder<K, V, T> using(Service service) {
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
if (service instanceof SizeOfEngineProvider) {
removeAnySizeOfEngine(otherBuilder);
}
otherBuilder.services.add(service);
return otherBuilder;
}
/**
* Adds a {@link ServiceCreationConfiguration}, to trigger a service loading and its configuration, to the returned
* builder.
* <P>
* Note that while {@link Service}s will be started upon {@link UserManagedCache} construction, no other lifecycle
* operations will be performed on them. It is the responsibility of the developer to properly stop
* {@code Service}s once they are no longer required. Which means that this method should not be used to get
* services that require a stop.
* </P>
*
* @param serviceConfiguration the service creation configuration to add
* @return a new builder with the added service creation configuration
*
* @see #using(Service)
*/
public UserManagedCacheBuilder<K, V, T> using(ServiceCreationConfiguration<?> serviceConfiguration) {
UserManagedCacheBuilder<K, V, T> otherBuilder = new UserManagedCacheBuilder<K, V, T>(this);
if (serviceConfiguration instanceof DefaultSizeOfEngineProviderConfiguration) {
removeAnySizeOfEngine(otherBuilder);
}
otherBuilder.serviceCreationConfigurations.add(serviceConfiguration);
return otherBuilder;
}
private static void removeAnySizeOfEngine(UserManagedCacheBuilder builder) {
builder.services.remove(findSingletonAmongst(SizeOfEngineProvider.class, builder.services));
builder.serviceCreationConfigurations.remove(findSingletonAmongst(DefaultSizeOfEngineProviderConfiguration.class, builder.serviceCreationConfigurations));
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.identitylink.service.impl.persistence.entity.data.impl;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.flowable.common.engine.api.scope.ScopeTypes;
import org.flowable.common.engine.impl.cfg.IdGenerator;
import org.flowable.common.engine.impl.db.AbstractDataManager;
import org.flowable.common.engine.impl.db.DbSqlSession;
import org.flowable.common.engine.impl.persistence.cache.CachedEntityMatcher;
import org.flowable.identitylink.api.history.HistoricIdentityLink;
import org.flowable.identitylink.service.IdentityLinkServiceConfiguration;
import org.flowable.identitylink.service.impl.persistence.entity.IdentityLinkEntity;
import org.flowable.identitylink.service.impl.persistence.entity.IdentityLinkEntityImpl;
import org.flowable.identitylink.service.impl.persistence.entity.data.IdentityLinkDataManager;
import org.flowable.identitylink.service.impl.persistence.entity.data.impl.cachematcher.IdentityLinksByProcessInstanceMatcher;
import org.flowable.identitylink.service.impl.persistence.entity.data.impl.cachematcher.IdentityLinksByProcessInstanceUserGroupAndTypeMatcher;
import org.flowable.identitylink.service.impl.persistence.entity.data.impl.cachematcher.IdentityLinksByScopeIdAndTypeMatcher;
import org.flowable.identitylink.service.impl.persistence.entity.data.impl.cachematcher.IdentityLinksByScopeIdScopeTypeUserGroupAndTypeMatcher;
import org.flowable.identitylink.service.impl.persistence.entity.data.impl.cachematcher.IdentityLinksBySubScopeIdAndTypeMatcher;
import org.flowable.identitylink.service.impl.persistence.entity.data.impl.cachematcher.IdentityLinksByTaskIdMatcher;
/**
* @author Joram Barrez
*/
public class MybatisIdentityLinkDataManager extends AbstractDataManager<IdentityLinkEntity> implements IdentityLinkDataManager {
protected CachedEntityMatcher<IdentityLinkEntity> identityLinksByTaskIdMatcher = new IdentityLinksByTaskIdMatcher();
protected CachedEntityMatcher<IdentityLinkEntity> identityLinkByProcessInstanceMatcher = new IdentityLinksByProcessInstanceMatcher();
protected CachedEntityMatcher<IdentityLinkEntity> identityLinksByScopeIdAndTypeMatcher = new IdentityLinksByScopeIdAndTypeMatcher();
protected CachedEntityMatcher<IdentityLinkEntity> identityLinksBySubScopeIdAndTypeMatcher = new IdentityLinksBySubScopeIdAndTypeMatcher();
protected CachedEntityMatcher<IdentityLinkEntity> identityLinksByProcessInstanceUserGroupAndTypeMatcher = new IdentityLinksByProcessInstanceUserGroupAndTypeMatcher();
protected CachedEntityMatcher<IdentityLinkEntity> identityLinksByScopeIdScopeTypeUserGroupAndTypeMatcher = new IdentityLinksByScopeIdScopeTypeUserGroupAndTypeMatcher();
protected IdentityLinkServiceConfiguration identityLinkServiceConfiguration;
public MybatisIdentityLinkDataManager(IdentityLinkServiceConfiguration identityLinkServiceConfiguration) {
this.identityLinkServiceConfiguration = identityLinkServiceConfiguration;
}
@Override
public Class<? extends IdentityLinkEntity> getManagedEntityClass() {
return IdentityLinkEntityImpl.class;
}
@Override
public IdentityLinkEntity create() {
return new IdentityLinkEntityImpl();
}
@Override
public IdentityLinkEntity createIdentityLinkFromHistoricIdentityLink(HistoricIdentityLink historicIdentityLink) {
return new IdentityLinkEntityImpl(historicIdentityLink);
}
@Override
public List<IdentityLinkEntity> findIdentityLinksByTaskId(String taskId) {
DbSqlSession dbSqlSession = getDbSqlSession();
if (isEntityInserted(dbSqlSession, "task", taskId)) {
return getListFromCache(identityLinksByTaskIdMatcher, taskId);
}
return getList("selectIdentityLinksByTaskId", taskId, identityLinksByTaskIdMatcher, true);
}
@Override
public List<IdentityLinkEntity> findIdentityLinksByProcessInstanceId(String processInstanceId) {
DbSqlSession dbSqlSession = getDbSqlSession();
// If the process instance has been inserted in the same command execution as this query, there can't be any in the database
if (isEntityInserted(dbSqlSession, "execution", processInstanceId)) {
return getListFromCache(identityLinkByProcessInstanceMatcher, processInstanceId);
}
return getList("selectIdentityLinksByProcessInstance", processInstanceId, identityLinkByProcessInstanceMatcher, true);
}
@Override
public List<IdentityLinkEntity> findIdentityLinksByScopeIdAndType(String scopeId, String scopeType) {
Map<String, String> parameters = new HashMap<>();
parameters.put("scopeId", scopeId);
parameters.put("scopeType", scopeType);
return getList("selectIdentityLinksByScopeIdAndType", parameters, identityLinksByScopeIdAndTypeMatcher, true);
}
@Override
public List<IdentityLinkEntity> findIdentityLinksBySubScopeIdAndType(String subScopeId, String scopeType) {
Map<String, String> parameters = new HashMap<>();
parameters.put("subScopeId", subScopeId);
parameters.put("scopeType", scopeType);
return getList("selectIdentityLinksBySubScopeIdAndType", parameters, identityLinksBySubScopeIdAndTypeMatcher, true);
}
@Override
@SuppressWarnings("unchecked")
public List<IdentityLinkEntity> findIdentityLinksByScopeDefinitionIdAndType(String scopeDefinitionId, String scopeType) {
Map<String, String> parameters = new HashMap<>();
parameters.put("scopeDefinitionId", scopeDefinitionId);
parameters.put("scopeType", scopeType);
return getDbSqlSession().selectList("selectIdentityLinksByScopeDefinitionAndType", parameters);
}
@Override
@SuppressWarnings("unchecked")
public List<IdentityLinkEntity> findIdentityLinksByProcessDefinitionId(String processDefinitionId) {
return getDbSqlSession().selectList("selectIdentityLinksByProcessDefinition", processDefinitionId);
}
@Override
@SuppressWarnings("unchecked")
public List<IdentityLinkEntity> findIdentityLinkByTaskUserGroupAndType(String taskId, String userId, String groupId, String type) {
Map<String, String> parameters = new HashMap<>();
parameters.put("taskId", taskId);
parameters.put("userId", userId);
parameters.put("groupId", groupId);
parameters.put("type", type);
return getDbSqlSession().selectList("selectIdentityLinkByTaskUserGroupAndType", parameters);
}
@Override
@SuppressWarnings("unchecked")
public List<IdentityLinkEntity> findIdentityLinkByProcessInstanceUserGroupAndType(String processInstanceId, String userId, String groupId, String type) {
Map<String, String> parameters = new HashMap<>();
parameters.put("processInstanceId", processInstanceId);
parameters.put("userId", userId);
parameters.put("groupId", groupId);
parameters.put("type", type);
return getList("selectIdentityLinkByProcessInstanceUserGroupAndType", parameters, identityLinksByProcessInstanceUserGroupAndTypeMatcher);
}
@Override
@SuppressWarnings("unchecked")
public List<IdentityLinkEntity> findIdentityLinkByProcessDefinitionUserAndGroup(String processDefinitionId, String userId, String groupId) {
Map<String, String> parameters = new HashMap<>();
parameters.put("processDefinitionId", processDefinitionId);
parameters.put("userId", userId);
parameters.put("groupId", groupId);
return getDbSqlSession().selectList("selectIdentityLinkByProcessDefinitionUserAndGroup", parameters);
}
@Override
@SuppressWarnings("unchecked")
public List<IdentityLinkEntity> findIdentityLinkByScopeIdScopeTypeUserGroupAndType(String scopeId, String scopeType, String userId, String groupId, String type) {
Map<String, String> parameters = new HashMap<>();
parameters.put("scopeId", scopeId);
parameters.put("scopeType", scopeType);
parameters.put("userId", userId);
parameters.put("groupId", groupId);
parameters.put("type", type);
return getList("selectIdentityLinkByScopeIdScopeTypeUserGroupAndType", parameters, identityLinksByScopeIdScopeTypeUserGroupAndTypeMatcher);
}
@Override
@SuppressWarnings("unchecked")
public List<IdentityLinkEntity> findIdentityLinkByScopeDefinitionScopeTypeUserAndGroup(String scopeDefinitionId, String scopeType, String userId, String groupId) {
Map<String, String> parameters = new HashMap<>();
parameters.put("scopeDefinitionId", scopeDefinitionId);
parameters.put("scopeType", scopeType);
parameters.put("userId", userId);
parameters.put("groupId", groupId);
return getDbSqlSession().selectList("selectIdentityLinkByScopeDefinitionScopeTypeUserAndGroup", parameters);
}
@Override
public void deleteIdentityLinksByTaskId(String taskId) {
DbSqlSession dbSqlSession = getDbSqlSession();
if (isEntityInserted(dbSqlSession, "task", taskId)) {
deleteCachedEntities(dbSqlSession, identityLinksByTaskIdMatcher, taskId);
} else {
bulkDelete("deleteIdentityLinksByTaskId", identityLinksByTaskIdMatcher, taskId);
}
}
@Override
public void deleteIdentityLinksByProcDef(String processDefId) {
getDbSqlSession().delete("deleteIdentityLinksByProcDef", processDefId, IdentityLinkEntityImpl.class);
}
@Override
public void deleteIdentityLinksByProcessInstanceId(String processInstanceId) {
DbSqlSession dbSqlSession = getDbSqlSession();
if (isEntityInserted(dbSqlSession, "execution", processInstanceId)) {
deleteCachedEntities(dbSqlSession, identityLinkByProcessInstanceMatcher, processInstanceId);
} else {
bulkDelete("deleteIdentityLinksByProcessInstanceId", identityLinkByProcessInstanceMatcher, processInstanceId);
}
}
@Override
public void deleteIdentityLinksByScopeIdAndScopeType(String scopeId, String scopeType) {
DbSqlSession dbSqlSession = getDbSqlSession();
Map<String, String> parameters = new HashMap<>();
parameters.put("scopeId", scopeId);
parameters.put("scopeType", scopeType);
if (ScopeTypes.CMMN.equals(scopeType) && isEntityInserted(dbSqlSession, "caseInstance", scopeId)) {
deleteCachedEntities(dbSqlSession, identityLinksByScopeIdAndTypeMatcher, parameters);
} else {
bulkDelete("deleteIdentityLinksByScopeIdAndScopeType", identityLinksByScopeIdAndTypeMatcher, parameters);
}
}
@Override
public void deleteIdentityLinksByScopeDefinitionIdAndScopeType(String scopeDefinitionId, String scopeType) {
Map<String, String> parameters = new HashMap<>();
parameters.put("scopeDefinitionId", scopeDefinitionId);
parameters.put("scopeType", scopeType);
getDbSqlSession().delete("deleteIdentityLinksByScopeDefinitionIdAndScopeType", parameters, IdentityLinkEntityImpl.class);
}
@Override
protected IdGenerator getIdGenerator() {
return identityLinkServiceConfiguration.getIdGenerator();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper.test;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZKTestCase;
import org.apache.zookeeper.ZooDefs;
import org.apache.zookeeper.test.ClientBase.CountdownWatcher;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Tests that session upgrade works from local to global sessions.
* Expected behavior is that if global-only sessions are unset,
* and no upgrade interval is specified, then sessions will be
* created locally to the host. They will be upgraded to global
* sessions iff an operation is done on that session which requires
* persistence, i.e. creating an ephemeral node.
*/
public class SessionUpgradeTest extends ZKTestCase {
protected static final Logger LOG = LoggerFactory.getLogger(SessionUpgradeTest.class);
public static final int CONNECTION_TIMEOUT = ClientBase.CONNECTION_TIMEOUT;
private final QuorumBase qb = new QuorumBase();
@Before
public void setUp() throws Exception {
LOG.info("STARTING quorum " + getClass().getName());
qb.localSessionsEnabled = true;
qb.localSessionsUpgradingEnabled = true;
qb.setUp();
ClientBase.waitForServerUp(qb.hostPort, 10000);
}
@After
public void tearDown() throws Exception {
LOG.info("STOPPING quorum " + getClass().getName());
qb.tearDown();
}
@Test
public void testLocalSessionsWithoutEphemeralOnFollower() throws Exception {
testLocalSessionsWithoutEphemeral(false);
}
@Test
public void testLocalSessionsWithoutEphemeralOnLeader() throws Exception {
testLocalSessionsWithoutEphemeral(true);
}
private void testLocalSessionsWithoutEphemeral(boolean testLeader)
throws Exception {
String nodePrefix = "/testLocalSessions-"
+ (testLeader ? "leaderTest-" : "followerTest-");
int leaderIdx = qb.getLeaderIndex();
Assert.assertFalse("No leader in quorum?", leaderIdx == -1);
int followerIdx = (leaderIdx + 1) % 5;
int otherFollowerIdx = (leaderIdx + 2) % 5;
int testPeerIdx = testLeader ? leaderIdx : followerIdx;
String hostPorts[] = qb.hostPort.split(",");
CountdownWatcher watcher = new CountdownWatcher();
DisconnectableZooKeeper zk = new DisconnectableZooKeeper(
hostPorts[testPeerIdx], CONNECTION_TIMEOUT, watcher);
watcher.waitForConnected(CONNECTION_TIMEOUT);
// Try creating some data.
for (int i = 0; i < 5; i++) {
zk.create(nodePrefix + i, new byte[0],
ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
}
long localSessionId = zk.getSessionId();
byte[] localSessionPwd = zk.getSessionPasswd().clone();
// Try connecting with the same session id on a different
// server. This should fail since it is a local sesion.
try {
watcher.reset();
DisconnectableZooKeeper zknew = new DisconnectableZooKeeper(
hostPorts[otherFollowerIdx], CONNECTION_TIMEOUT, watcher,
localSessionId, localSessionPwd);
zknew.create(nodePrefix + "5", new byte[0],
ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
Assert.fail("Connection on the same session ID should fail.");
} catch (KeeperException.SessionExpiredException e) {
} catch (KeeperException.ConnectionLossException e) {
}
// If we're testing a follower, also check the session id on the
// leader. This should also fail
if (!testLeader) {
try {
watcher.reset();
DisconnectableZooKeeper zknew = new DisconnectableZooKeeper(
hostPorts[leaderIdx], CONNECTION_TIMEOUT,
watcher, localSessionId, localSessionPwd);
zknew.create(nodePrefix + "5", new byte[0],
ZooDefs.Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
Assert.fail("Connection on the same session ID should fail.");
} catch (KeeperException.SessionExpiredException e) {
} catch (KeeperException.ConnectionLossException e) {
}
}
// However, we should be able to disconnect and reconnect to the same
// server with the same session id (as long as we do it quickly
// before expiration).
zk.disconnect();
watcher.reset();
zk = new DisconnectableZooKeeper(
hostPorts[testPeerIdx], CONNECTION_TIMEOUT, watcher,
localSessionId, localSessionPwd);
watcher.waitForConnected(CONNECTION_TIMEOUT);
zk.create(nodePrefix + "6", new byte[0],
ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
// If we explicitly close the session, then the session id should no
// longer be valid.
zk.close();
try {
watcher.reset();
zk = new DisconnectableZooKeeper(
hostPorts[testPeerIdx], CONNECTION_TIMEOUT, watcher,
localSessionId, localSessionPwd);
zk.create(nodePrefix + "7", new byte[0],
ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
Assert.fail("Reconnecting to a closed session ID should fail.");
} catch (KeeperException.SessionExpiredException e) {
}
}
@Test
public void testUpgradeWithEphemeralOnFollower() throws Exception {
testUpgradeWithEphemeral(false);
}
@Test
public void testUpgradeWithEphemeralOnLeader() throws Exception {
testUpgradeWithEphemeral(true);
}
private void testUpgradeWithEphemeral(boolean testLeader)
throws Exception {
String nodePrefix = "/testUpgrade-"
+ (testLeader ? "leaderTest-" : "followerTest-");
int leaderIdx = qb.getLeaderIndex();
Assert.assertFalse("No leader in quorum?", leaderIdx == -1);
int followerIdx = (leaderIdx + 1) % 5;
int otherFollowerIdx = (leaderIdx + 2) % 5;
int testPeerIdx = testLeader ? leaderIdx : followerIdx;
String hostPorts[] = qb.hostPort.split(",");
CountdownWatcher watcher = new CountdownWatcher();
DisconnectableZooKeeper zk = new DisconnectableZooKeeper(
hostPorts[testPeerIdx], CONNECTION_TIMEOUT, watcher);
watcher.waitForConnected(CONNECTION_TIMEOUT);
// Create some ephemeral nodes. This should force the session to
// be propagated to the other servers in the ensemble.
for (int i = 0; i < 5; i++) {
zk.create(nodePrefix + i, new byte[0],
ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL);
}
// We should be able to reconnect with the same session id on a
// different server, since it has been propagated.
long localSessionId = zk.getSessionId();
byte[] localSessionPwd = zk.getSessionPasswd().clone();
zk.disconnect();
watcher.reset();
zk = new DisconnectableZooKeeper(
hostPorts[otherFollowerIdx], CONNECTION_TIMEOUT, watcher,
localSessionId, localSessionPwd);
watcher.waitForConnected(CONNECTION_TIMEOUT);
// The created ephemeral nodes are still around.
for (int i = 0; i < 5; i++) {
Assert.assertNotNull(zk.exists(nodePrefix + i, null));
}
// When we explicitly close the session, we should not be able to
// reconnect with the same session id
zk.close();
try {
watcher.reset();
zk = new DisconnectableZooKeeper(
hostPorts[otherFollowerIdx], CONNECTION_TIMEOUT, watcher,
localSessionId, localSessionPwd);
zk.exists(nodePrefix + "0", null);
Assert.fail("Reconnecting to a closed session ID should fail.");
} catch (KeeperException.SessionExpiredException e) {
}
watcher.reset();
// And the ephemeral nodes will be gone since the session died.
zk = new DisconnectableZooKeeper(
hostPorts[testPeerIdx], CONNECTION_TIMEOUT, watcher);
watcher.waitForConnected(CONNECTION_TIMEOUT);
for (int i = 0; i < 5; i++) {
Assert.assertNull(zk.exists(nodePrefix + i, null));
}
}
}
| |
/*
* Copyright 2014, gRPC Authors All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.grpc.internal;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.instrumentation.stats.Stats;
import com.google.instrumentation.stats.StatsContextFactory;
import io.grpc.BindableService;
import io.grpc.CompressorRegistry;
import io.grpc.Context;
import io.grpc.DecompressorRegistry;
import io.grpc.HandlerRegistry;
import io.grpc.Internal;
import io.grpc.InternalNotifyOnServerBuild;
import io.grpc.Server;
import io.grpc.ServerBuilder;
import io.grpc.ServerInterceptor;
import io.grpc.ServerMethodDefinition;
import io.grpc.ServerServiceDefinition;
import io.grpc.ServerStreamTracer;
import io.grpc.ServerTransportFilter;
import io.opencensus.trace.Tracing;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Executor;
import javax.annotation.Nullable;
/**
* The base class for server builders.
*
* @param <T> The concrete type for this builder.
*/
public abstract class AbstractServerImplBuilder<T extends AbstractServerImplBuilder<T>>
extends ServerBuilder<T> {
public static ServerBuilder<?> forPort(int port) {
throw new UnsupportedOperationException("Subclass failed to hide static factory");
}
private static final ObjectPool<? extends Executor> DEFAULT_EXECUTOR_POOL =
SharedResourcePool.forResource(GrpcUtil.SHARED_CHANNEL_EXECUTOR);
private static final HandlerRegistry DEFAULT_FALLBACK_REGISTRY = new HandlerRegistry() {
@Override
public List<ServerServiceDefinition> getServices() {
return Collections.emptyList();
}
@Override
public ServerMethodDefinition<?, ?> lookupMethod(String methodName,
@Nullable String authority) {
return null;
}
};
private static final DecompressorRegistry DEFAULT_DECOMPRESSOR_REGISTRY =
DecompressorRegistry.getDefaultInstance();
private static final CompressorRegistry DEFAULT_COMPRESSOR_REGISTRY =
CompressorRegistry.getDefaultInstance();
final InternalHandlerRegistry.Builder registryBuilder =
new InternalHandlerRegistry.Builder();
final List<ServerTransportFilter> transportFilters =
new ArrayList<ServerTransportFilter>();
final List<ServerInterceptor> interceptors = new ArrayList<ServerInterceptor>();
private final List<InternalNotifyOnServerBuild> notifyOnBuildList =
new ArrayList<InternalNotifyOnServerBuild>();
private final List<ServerStreamTracer.Factory> streamTracerFactories =
new ArrayList<ServerStreamTracer.Factory>();
HandlerRegistry fallbackRegistry = DEFAULT_FALLBACK_REGISTRY;
ObjectPool<? extends Executor> executorPool = DEFAULT_EXECUTOR_POOL;
DecompressorRegistry decompressorRegistry = DEFAULT_DECOMPRESSOR_REGISTRY;
CompressorRegistry compressorRegistry = DEFAULT_COMPRESSOR_REGISTRY;
@Nullable
private StatsContextFactory statsFactory;
private boolean statsEnabled = true;
private boolean recordStats = true;
private boolean tracingEnabled = true;
@Override
public final T directExecutor() {
return executor(MoreExecutors.directExecutor());
}
@Override
public final T executor(@Nullable Executor executor) {
if (executor != null) {
this.executorPool = new FixedObjectPool<Executor>(executor);
} else {
this.executorPool = DEFAULT_EXECUTOR_POOL;
}
return thisT();
}
@Override
public final T addService(ServerServiceDefinition service) {
registryBuilder.addService(service);
return thisT();
}
@Override
public final T addService(BindableService bindableService) {
if (bindableService instanceof InternalNotifyOnServerBuild) {
notifyOnBuildList.add((InternalNotifyOnServerBuild) bindableService);
}
return addService(bindableService.bindService());
}
@Override
public final T addTransportFilter(ServerTransportFilter filter) {
transportFilters.add(checkNotNull(filter, "filter"));
return thisT();
}
@Override
public final T intercept(ServerInterceptor interceptor) {
interceptors.add(interceptor);
return thisT();
}
@Override
public final T addStreamTracerFactory(ServerStreamTracer.Factory factory) {
streamTracerFactories.add(checkNotNull(factory, "factory"));
return thisT();
}
@Override
public final T fallbackHandlerRegistry(HandlerRegistry registry) {
if (registry != null) {
this.fallbackRegistry = registry;
} else {
this.fallbackRegistry = DEFAULT_FALLBACK_REGISTRY;
}
return thisT();
}
@Override
public final T decompressorRegistry(DecompressorRegistry registry) {
if (registry != null) {
decompressorRegistry = registry;
} else {
decompressorRegistry = DEFAULT_DECOMPRESSOR_REGISTRY;
}
return thisT();
}
@Override
public final T compressorRegistry(CompressorRegistry registry) {
if (registry != null) {
compressorRegistry = registry;
} else {
compressorRegistry = DEFAULT_COMPRESSOR_REGISTRY;
}
return thisT();
}
/**
* Override the default stats implementation.
*/
@VisibleForTesting
protected T statsContextFactory(StatsContextFactory statsFactory) {
this.statsFactory = statsFactory;
return thisT();
}
/**
* Disable or enable stats features. Enabled by default.
*/
protected void setStatsEnabled(boolean value) {
statsEnabled = value;
}
/**
* Disable or enable stats recording. Effective only if {@link #setStatsEnabled} is set to true.
* Enabled by default.
*/
protected void setRecordStats(boolean value) {
recordStats = value;
}
/**
* Disable or enable tracing features. Enabled by default.
*/
protected void setTracingEnabled(boolean value) {
tracingEnabled = value;
}
@Override
public Server build() {
ServerImpl server = new ServerImpl(
this,
buildTransportServer(Collections.unmodifiableList(getTracerFactories())),
Context.ROOT);
for (InternalNotifyOnServerBuild notifyTarget : notifyOnBuildList) {
notifyTarget.notifyOnBuild(server);
}
return server;
}
@VisibleForTesting
final List<ServerStreamTracer.Factory> getTracerFactories() {
ArrayList<ServerStreamTracer.Factory> tracerFactories =
new ArrayList<ServerStreamTracer.Factory>();
if (statsEnabled) {
StatsContextFactory statsFactory =
this.statsFactory != null ? this.statsFactory : Stats.getStatsContextFactory();
if (statsFactory != null) {
CensusStatsModule censusStats =
new CensusStatsModule(statsFactory, GrpcUtil.STOPWATCH_SUPPLIER, true, recordStats);
tracerFactories.add(censusStats.getServerTracerFactory());
}
}
if (tracingEnabled) {
CensusTracingModule censusTracing =
new CensusTracingModule(Tracing.getTracer(),
Tracing.getPropagationComponent().getBinaryFormat());
tracerFactories.add(censusTracing.getServerTracerFactory());
}
tracerFactories.addAll(streamTracerFactories);
return tracerFactories;
}
/**
* Children of AbstractServerBuilder should override this method to provide transport specific
* information for the server. This method is mean for Transport implementors and should not be
* used by normal users.
*
* @param streamTracerFactories an immutable list of stream tracer factories
*/
@Internal
protected abstract io.grpc.internal.InternalServer buildTransportServer(
List<ServerStreamTracer.Factory> streamTracerFactories);
private T thisT() {
@SuppressWarnings("unchecked")
T thisT = (T) this;
return thisT;
}
}
| |
package org.testng.reporters;
import java.lang.reflect.Method;
import java.util.List;
import org.testng.ITestContext;
import org.testng.ITestNGMethod;
import org.testng.ITestResult;
import org.testng.TestListenerAdapter;
import org.testng.internal.Utils;
/**
* Reporter printing out detailed messages about what TestNG
* is going to run and what is the status of what has been just run.
*
* To see messages from this reporter, either run Ant in verbose mode ('ant -v')
* or set verbose level to 5 or higher
*
* @author Lukas Jungmann
* @since 6.4
*/
public class VerboseReporter extends TestListenerAdapter {
/**
* Default prefix for messages printed out by this reporter
*
*/
public static final String LISTENER_PREFIX = "[VerboseTestNG] ";
private String suiteName;
private final String prefix;
private enum Status {
SUCCESS(ITestResult.SUCCESS), FAILURE(ITestResult.FAILURE), SKIP(ITestResult.SKIP),
SUCCESS_PERCENTAGE_FAILURE(ITestResult.SUCCESS_PERCENTAGE_FAILURE), STARTED(ITestResult.STARTED);
private int status;
private Status(int i) {
status = i;
}
}
/**
* Default constructor
*/
public VerboseReporter() {
this(LISTENER_PREFIX);
}
/**
* Create VerboseReporter with custom prefix
*
* @param prefix prefix for messages printed out by this reporter
*/
public VerboseReporter(String prefix) {
this.prefix = prefix;
}
@Override
public void beforeConfiguration(ITestResult tr) {
super.beforeConfiguration(tr);
logTestResult(Status.STARTED, tr, true);
}
@Override
public void onConfigurationFailure(ITestResult tr) {
super.onConfigurationFailure(tr);
logTestResult(Status.FAILURE, tr, true);
}
@Override
public void onConfigurationSkip(ITestResult tr) {
super.onConfigurationSkip(tr);
logTestResult(Status.SKIP, tr, true);
}
@Override
public void onConfigurationSuccess(ITestResult tr) {
super.onConfigurationSuccess(tr);
logTestResult(Status.SUCCESS, tr, true);
}
@Override
public void onTestStart(ITestResult tr) {
logTestResult(Status.STARTED, tr, false);
}
@Override
public void onTestFailure(ITestResult tr) {
super.onTestFailure(tr);
logTestResult(Status.FAILURE, tr, false);
}
@Override
public void onTestFailedButWithinSuccessPercentage(ITestResult tr) {
super.onTestFailedButWithinSuccessPercentage(tr);
logTestResult(Status.SUCCESS_PERCENTAGE_FAILURE, tr, false);
}
@Override
public void onTestSkipped(ITestResult tr) {
super.onTestSkipped(tr);
logTestResult(Status.SKIP, tr, false);
}
@Override
public void onTestSuccess(ITestResult tr) {
super.onTestSuccess(tr);
logTestResult(Status.SUCCESS, tr, false);
}
@Override
public void onStart(ITestContext ctx) {
suiteName = ctx.getName();//ctx.getSuite().getXmlSuite().getFileName();
log("RUNNING: Suite: \"" + suiteName + "\" containing \"" + ctx.getAllTestMethods().length + "\" Tests (config: " + ctx.getSuite().getXmlSuite().getFileName() + ")");
}
@Override
public void onFinish(ITestContext context) {
logResults();
suiteName = null;
}
private ITestNGMethod[] resultsToMethods(List<ITestResult> results) {
ITestNGMethod[] result = new ITestNGMethod[results.size()];
int i = 0;
for (ITestResult tr : results) {
result[i++] = tr.getMethod();
}
return result;
}
/**
* Print out test summary
*/
private void logResults() {
//
// Log test summary
//
ITestNGMethod[] ft = resultsToMethods(getFailedTests());
StringBuilder sb = new StringBuilder("\n===============================================\n");
sb.append(" ").append(suiteName).append("\n");
sb.append(" Tests run: ").append(Utils.calculateInvokedMethodCount(getAllTestMethods()));
sb.append(", Failures: ").append(Utils.calculateInvokedMethodCount(ft));
sb.append(", Skips: ").append(Utils.calculateInvokedMethodCount(resultsToMethods(getSkippedTests())));
int confFailures = getConfigurationFailures().size();
int confSkips = getConfigurationSkips().size();
if (confFailures > 0 || confSkips > 0) {
sb.append("\n").append(" Configuration Failures: ").append(confFailures);
sb.append(", Skips: ").append(confSkips);
}
sb.append("\n===============================================");
log(sb.toString());
}
/**
* Log meaningful message for passed in arguments.
* Message itself is of form:
* $status: "$suiteName" - $methodDeclaration ($actualArguments) finished in $x ms ($run of $totalRuns)
*
* @param st status of passed in itr
* @param itr test result to be described
* @param isConfMethod is itr describing configuration method
*/
private void logTestResult(Status st, ITestResult itr, boolean isConfMethod) {
StringBuilder sb = new StringBuilder();
StringBuilder succRate = null;
String stackTrace = "";
switch (st) {
case STARTED:
sb.append("INVOKING");
break;
case SKIP:
sb.append("SKIPPED");
stackTrace = itr.getThrowable() != null
? Utils.stackTrace(itr.getThrowable(), false)[0] : "";
break;
case FAILURE:
sb.append("FAILED");
stackTrace = itr.getThrowable() != null
? Utils.stackTrace(itr.getThrowable(), false)[0] : "";
break;
case SUCCESS:
sb.append("PASSED");
break;
case SUCCESS_PERCENTAGE_FAILURE:
sb.append("PASSED with failures");
break;
default:
//not happen
throw new RuntimeException("Unsupported test status:" + itr.getStatus());
}
if (isConfMethod) {
sb.append(" CONFIGURATION: ");
} else {
sb.append(": ");
}
ITestNGMethod tm = itr.getMethod();
int identLevel = sb.length();
sb.append(getMethodDeclaration(tm));
Object[] params = itr.getParameters();
Class[] paramTypes = tm.getMethod().getParameterTypes();
if (null != params && params.length > 0) {
// The error might be a data provider parameter mismatch, so make
// a special case here
if (params.length != paramTypes.length) {
sb.append("Wrong number of arguments were passed by the Data Provider: found ");
sb.append(params.length);
sb.append(" but expected ");
sb.append(paramTypes.length);
} else {
sb.append("(value(s): ");
for (int i = 0; i < params.length; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append(Utils.toString(params[i], paramTypes[i]));
}
sb.append(")");
}
}
if (Status.STARTED != st) {
sb.append(" finished in ");
sb.append(itr.getEndMillis() - itr.getStartMillis());
sb.append(" ms");
if (!Utils.isStringEmpty(tm.getDescription())) {
sb.append("\n");
for (int i = 0; i < identLevel; i++) {
sb.append(" ");
}
sb.append(tm.getDescription());
}
if (tm.getInvocationCount() > 1) {
sb.append(" (");
sb.append(tm.getCurrentInvocationCount());
sb.append(" of ");
sb.append(tm.getInvocationCount());
sb.append(")");
}
if (!Utils.isStringEmpty(stackTrace)) {
sb.append("\n").append(stackTrace.substring(0, stackTrace.lastIndexOf(System.getProperty("line.separator"))));
}
} else {
if (!isConfMethod && tm.getInvocationCount() > 1) {
sb.append(" success: ");
sb.append(tm.getSuccessPercentage());
sb.append("%");
}
}
log(sb.toString());
}
protected void log(String message) {
//prefix all output lines
System.out.println(message.replaceAll("(?m)^", prefix));
}
/**
*
* @param method method to be described
* @return FQN of a class + method declaration for a method passed in
* ie. test.triangle.CheckCount.testCheckCount(java.lang.String)
*/
private String getMethodDeclaration(ITestNGMethod method) {
//see Utils.detailedMethodName
//perhaps should rather adopt the original method instead
Method m = method.getMethod();
StringBuilder buf = new StringBuilder();
buf.append("\"");
if (suiteName != null) {
buf.append(suiteName);
} else {
buf.append("UNKNOWN");
}
buf.append("\"");
buf.append(" - ");
if (method.isBeforeSuiteConfiguration()) {
buf.append("@BeforeSuite ");
} else if (method.isBeforeTestConfiguration()) {
buf.append("@BeforeTest ");
} else if (method.isBeforeClassConfiguration()) {
buf.append("@BeforeClass ");
} else if (method.isBeforeGroupsConfiguration()) {
buf.append("@BeforeGroups ");
} else if (method.isBeforeMethodConfiguration()) {
buf.append("@BeforeMethod ");
} else if (method.isAfterMethodConfiguration()) {
buf.append("@AfterMethod ");
} else if (method.isAfterGroupsConfiguration()) {
buf.append("@AfterGroups ");
} else if (method.isAfterClassConfiguration()) {
buf.append("@AfterClass ");
} else if (method.isAfterTestConfiguration()) {
buf.append("@AfterTest ");
} else if (method.isAfterSuiteConfiguration()) {
buf.append("@AfterSuite ");
}
buf.append(m.getDeclaringClass().getName());
buf.append(".");
buf.append(m.getName());
buf.append("(");
int i = 0;
for (Class<?> p : m.getParameterTypes()) {
if (i++ > 0) {
buf.append(", ");
}
buf.append(p.getName());
}
buf.append(")");
return buf.toString();
}
@Override
public String toString() {
return "VerboseReporter{" + "suiteName=" + suiteName + '}';
}
}
| |
package com.elusivehawk.util.math;
import java.nio.DoubleBuffer;
import com.elusivehawk.util.parse.json.IJsonSerializer;
import com.elusivehawk.util.storage.BufferHelper;
/**
*
*
*
* @author Elusivehawk
*/
public class DoubleArithmetic extends Arithmetic implements IJsonSerializer
{
private final double[] data;
public DoubleArithmetic(int size)
{
this(new double[size]);
}
@SuppressWarnings("unqualified-field-access")
public DoubleArithmetic(double... fs)
{
data = fs;
}
public DoubleArithmetic(DoubleBuffer buf)
{
this(buf.remaining(), buf);
}
public DoubleArithmetic(int size, DoubleBuffer buf)
{
this(size);
}
public DoubleArithmetic(DoubleArithmetic arith)
{
this(arith.size());
set(arith);
setIsDirty(false);
}
@Override
public final int size()
{
return this.data.length;
}
@Override
public String toJson(int tabs)
{
StringBuilder b = new StringBuilder();
b.append("[");
for (int c = 0; c < this.size(); c++)
{
if (c > 0)
{
b.append(", ");
}
b.append(this.get(c));
}
b.append("]");
return b.toString();
}
@Override
public String toString()
{
StringBuilder ret = new StringBuilder();
ret.append("[");
for (int c = 0; c < this.size(); c++)
{
if (c > 0)
{
ret.append(", ");
}
ret.append(this.get(c));
}
ret.append("]");
return ret.toString();
}
public double get(int index)
{
return this.data[index];
}
public DoubleArithmetic add(int index, double value)
{
return this.add(index, value, this);
}
public DoubleArithmetic add(int index, double value, DoubleArithmetic dest)
{
return dest.set(index, this.get(index) + value);
}
public DoubleArithmetic add(DoubleArithmetic arith)
{
return this.add(arith, this);
}
public DoubleArithmetic add(DoubleArithmetic arith, DoubleArithmetic dest)
{
for (int c = 0; c < this.size(); c++)
{
this.add(c, arith.get(c), dest);
}
return dest;
}
public DoubleArithmetic addAll(double f)
{
for (int c = 0; c < this.size(); c++)
{
this.add(c, f);
}
return this;
}
public DoubleArithmetic div(int index, double value)
{
return this.div(index, value, this);
}
public DoubleArithmetic div(int index, double value, DoubleArithmetic dest)
{
return dest.set(index, this.get(index) / value);
}
public DoubleArithmetic div(DoubleArithmetic arith)
{
return this.div(arith, this);
}
public DoubleArithmetic div(DoubleArithmetic arith, DoubleArithmetic dest)
{
for (int c = 0; c < this.size(); c++)
{
this.div(c, arith.get(c), dest);
}
return dest;
}
public DoubleArithmetic divAll(double f)
{
for (int c = 0; c < this.size(); c++)
{
this.div(c, f);
}
return this;
}
public DoubleArithmetic mul(int index, double value)
{
return this.mul(index, value, this);
}
public DoubleArithmetic mul(int index, double value, DoubleArithmetic dest)
{
return dest.set(index, this.get(index) * value);
}
public DoubleArithmetic mul(DoubleArithmetic arith)
{
return this.mul(arith, this);
}
public DoubleArithmetic mul(DoubleArithmetic arith, DoubleArithmetic dest)
{
for (int c = 0; c < this.size(); c++)
{
this.mul(c, arith.get(c), dest);
}
return dest;
}
public DoubleArithmetic mulAll(double f)
{
for (int c = 0; c < this.size(); c++)
{
this.mul(c, f);
}
return this;
}
public DoubleArithmetic sub(int index, double value)
{
return this.sub(index, value, this);
}
public DoubleArithmetic sub(int index, double value, DoubleArithmetic dest)
{
return dest.set(index, this.get(index) + value);
}
public DoubleArithmetic sub(DoubleArithmetic arith)
{
return this.sub(arith, this);
}
public DoubleArithmetic sub(DoubleArithmetic arith, DoubleArithmetic dest)
{
for (int c = 0; c < this.size(); c++)
{
this.sub(c, arith.get(c), dest);
}
return dest;
}
public DoubleArithmetic subAll(double f)
{
for (int c = 0; c < this.size(); c++)
{
this.sub(c, f);
}
return this;
}
public DoubleArithmetic set(int index, double value)
{
assert !this.isImmutable();
if (this.data[index] != value)
{
if (this.isSync())
{
synchronized (this)
{
this.data[index] = value;
}
}
else
{
this.data[index] = value;
}
this.setIsDirty(true);
}
return this;
}
public DoubleArithmetic set(double... fs)
{
for (int c = 0; c < this.size(); c++)
{
this.set(c, fs[c]);
}
return this;
}
public DoubleArithmetic set(DoubleArithmetic arith)
{
for (int c = 0; c < this.size(); c++)
{
this.set(c, arith.get(c));
}
return this;
}
public DoubleArithmetic set(DoubleBuffer buf)
{
for (int c = 0; c < this.size(); c++)
{
this.set(c, buf.get());
}
return this;
}
public DoubleArithmetic setAll(double value)
{
for (int c = 0; c < this.size(); c++)
{
this.set(c, value);
}
return this;
}
public DoubleArithmetic cos()
{
return this.cos(this);
}
public DoubleArithmetic cos(DoubleArithmetic dest)
{
for (int c = 0; c < this.size(); c++)
{
dest.set(c, Math.cos(this.get(c)));
}
return this;
}
public DoubleArithmetic sin()
{
return this.cos(this);
}
public DoubleArithmetic sin(DoubleArithmetic dest)
{
for (int c = 0; c < this.size(); c++)
{
dest.set(c, Math.sin(this.get(c)));
}
return this;
}
public DoubleBuffer put(DoubleBuffer buf)
{
buf.put(this.data);
return buf;
}
public DoubleBuffer asBuffer()
{
return BufferHelper.makeDoubleBuffer(this.data);
}
}
| |
package com.whenbus.whenbus;
/**
* Created by harsha on 1/4/17.
*/
import android.app.ProgressDialog;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.content.Intent;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import java.io.FileOutputStream;
import butterknife.ButterKnife;
import butterknife.InjectView;
import okhttp3.HttpUrl;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
public class LoginActivity extends AppCompatActivity {
private static final String TAG = "LoginActivity";
private static final int REQUEST_SIGNUP = 0;
@InjectView(R.id.input_email) EditText _emailText;
@InjectView(R.id.input_password) EditText _passwordText;
@InjectView(R.id.btn_login) Button _loginButton;
@InjectView(R.id.link_signup) TextView _signupLink;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
ButterKnife.inject(this);
_loginButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
login();
}
});
_signupLink.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// Start the Signup activity
Intent intent = new Intent(getApplicationContext(), SignupActivity.class);
startActivityForResult(intent, REQUEST_SIGNUP);
}
});
}
public void login() {
Log.d(TAG, "Login");
if (!validate()) {
onLoginFailed();
return;
}
_loginButton.setEnabled(false);
final ProgressDialog progressDialog = new ProgressDialog(LoginActivity.this);
progressDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
progressDialog.setIndeterminate(true);
progressDialog.setMessage("Authenticating...");
progressDialog.show();
String email = _emailText.getText().toString();
String password = _passwordText.getText().toString();
String [] send = new String[2];
send[0] = email;
send[1] = password;
// TODO: Implement your own authentication logic here.
LoginPostTask loginPostTask = new LoginPostTask();
loginPostTask.execute(send);
new android.os.Handler().postDelayed(
new Runnable() {
public void run() {
// On complete call either onLoginSuccess or onLoginFailed
onLoginSuccess();
// onLoginFailed();
progressDialog.dismiss();
}
}, 3000);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_SIGNUP) {
if (resultCode == RESULT_OK) {
// TODO: Implement successful signup logic here
// By default we just finish the Activity and log them in automatically
this.finish();
}
}
}
@Override
public void onBackPressed() {
// disable going back to the MainActivity
moveTaskToBack(true);
}
public void onLoginSuccess() {
_loginButton.setEnabled(true);
finish();
}
public void onLoginFailed() {
Toast.makeText(getBaseContext(), "Login failed", Toast.LENGTH_LONG).show();
_loginButton.setEnabled(true);
}
public boolean validate() {
boolean valid = true;
String email = _emailText.getText().toString();
String password = _passwordText.getText().toString();
if (email.isEmpty() || !android.util.Patterns.EMAIL_ADDRESS.matcher(email).matches()) {
_emailText.setError("enter a valid email address");
valid = false;
} else {
_emailText.setError(null);
}
if (password.isEmpty() || password.length() < 4 || password.length() > 10) {
_passwordText.setError("between 4 and 10 alphanumeric characters");
valid = false;
} else {
_passwordText.setError(null);
}
return valid;
}
public class LoginPostTask extends AsyncTask<String, Integer, Boolean> {
// @Override
// protected void onPreExecute() {
// super.onPreExecute();
//
//// progressDialog = ProgressDialog.show(Confirmation.this, "Please wait...", "Retrieving data ...", true);
//
// }
@Override
protected Boolean doInBackground(String... something) {
Boolean result = true, temp = false;
String postBody = "";
FileOutputStream outputStream;
// if(Double.parseDouble(data11)<3) {
// result = false;
// return result;
// }
MediaType FORM_DATA_TYPE = MediaType.parse("application/x-www-form-urlencoded; charset=utf-8");
// try {
// postBody = "email" + "=" + URLEncoder.encode(something[0], "UTF-8")+
// "&password"+ "=" +URLEncoder.encode(something[1], "UTF-8")
// ;
//
// Log.w("1", "passed");
// } catch (UnsupportedEncodingException ex) {
// Log.w("1", "failed");
// result = false;
// }
OkHttpClient client = new OkHttpClient();
// RequestBody body = RequestBody.create(FORM_DATA_TYPE, postBody);
HttpUrl.Builder urlBuilder = HttpUrl.parse("http://192.168.100.6:8000/login").newBuilder();
urlBuilder.addQueryParameter("email", something[0]);
urlBuilder.addQueryParameter("password", something[1]);
String url = urlBuilder.build().toString();
Request request = new Request.Builder()
.url(url)
// .post(postBody)
.build();
//Send the request
try {
Response response = client.newCall(request).execute();
String responseData = response.body().string();
Log.w("received", responseData);
}
catch (Exception e){
e.printStackTrace();
}
return result;
}
@Override
protected void onProgressUpdate(Integer... value) {
super.onProgressUpdate(value);
}
@Override
protected void onPostExecute(Boolean result){
//Print Success or failure message accordingly
// Toast.makeText(this,result?"Message successfully sent!":"There was some error in sending message. Please try again after some time.",Toast.LENGTH_LONG).show();
}
}
}
| |
/**
*
*/
package org.opensharingtoolkit.daoplayer.logging;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
import org.json.JSONException;
import org.json.JSONStringer;
import android.app.IntentService;
import android.content.Context;
import android.content.Intent;
import android.os.IBinder;
import android.telephony.TelephonyManager;
import android.util.Log;
/**
* @author pszcmg
*
*/
public class LoggingService extends IntentService {
private static final int LEVEL_DEBUG = 2;
private static final long DAY_DURATION = 24*60*60*1000L;
private static final long FILE_POLL_DELAY = 1000;
// hope it is a real singleton?!
private static File mLogDir;
private static File mLogFile;
private static long mLogFileDayDate;
private static long mLogFileLength;
private static long mLogFileTotal;
private static long LOG_FILE_MAX_LENGTH = 10000000; // about 10MB
private static BufferedOutputStream mOutput;
private static String mPendingError;
public static String LOG_VERSION = "2.0";
public LoggingService() {
super("OSTLogging");
}
public static String TAG = "logging";
/** (currently) no remote API - just use Intents
*/
@Override
public IBinder onBind(Intent arg0) {
// no bind interface, at least for now
return null;
}
/** Service create. Almost a no-op.
* File creation deferred to intent handling thread in case storage is
* initially unavailable (this shouldn't block like that).
*/
@Override
public void onCreate() {
Log.i(TAG,"Start LoggingService");
super.onCreate();
}
/** service destroy - flush and tidy.
*/
@Override
public void onDestroy() {
Log.i(TAG,"Destroy LoggingService");
if (mOutput!=null) {
try {
mOutput.flush();
}
catch (Exception e) {
Log.e(TAG,"Error flushing log file on close: "+e);
try {
mOutput.close();
}
catch (Exception e2) {/*ignore*/}
mOutput = null;
mLogFile = null;
mPendingError = e.toString();
}
}
super.onDestroy();
}
/** IntentService intent handler, run in worker thread, serialised.
* May block awaiting storage.
*/
@Override
protected void onHandleIntent(Intent intent) {
// No intent action, no intent filter - only intended to be called explicitly.
long now = System.currentTimeMillis();
long time = intent.getLongExtra("time", now);
String component = intent.getStringExtra("component");
String event = intent.getStringExtra("event");
String info = intent.getStringExtra("info");
int level = intent.getIntExtra("level", LEVEL_DEBUG);
boolean startNewFile = intent.getBooleanExtra("newFile", false);
// Note: info is raw JSON
try {
JSONStringer js = new JSONStringer();
js.object();
js.key("datetime");
js.value(rfcdf.format(time));
//js.key("time");
//js.value(time);
js.key("component");
js.value(component);
js.key("event");
js.value(event);
js.key("level");
js.value(level);
js.endObject();
String json = js.toString();
if (info!=null) {
StringBuilder sb = new StringBuilder();
sb.append(json.substring(0, json.length()-1));
sb.append(",\"info\":");
sb.append(info);
sb.append("}");
json = sb.toString();
}
tryWriteEntry(json, startNewFile);
} catch (JSONException e) {
Log.e(TAG,"Marshalling: "+e+" for "+time+" "+level+" "+component+" "+event+" "+info);
}
//Log.d(TAG,"Log: "+time+" "+level+" "+component+" "+event+" "+info);
}
/** ROOT-locale equivalent */
private SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd'T'HHmmssSSS'Z'", new Locale("","",""));
private Calendar day = Calendar.getInstance(TimeZone.getTimeZone("UTC"), new Locale("","",""));
private SimpleDateFormat rfcdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", new Locale("","",""));
private void tryWriteEntry(String line, boolean startNewFile) {
boolean waitingForStorage = true;
done: while (true) {
// ready to go?
Date now = new Date();
if (mOutput!=null) {
// try writing
try {
if (startNewFile || mLogFileLength >= LOG_FILE_MAX_LENGTH || now.getTime()>mLogFileDayDate+DAY_DURATION) {
// rotate
Log.i(TAG,"Rotating log file at "+mLogFileLength+" bytes and "+(now.getTime()-mLogFileDayDate)+" seconds");
mOutput.write((byte)'\n');
String msg = "{\"time\":"+now.getTime()+",\"component\":\"logger\",\"event\":\"log.rotate\",\"level\":4}";
mOutput.write(msg.getBytes("UTF-8"));
mOutput.write((byte)'\n');
mOutput.flush();
mOutput.close();
mOutput = null;
mLogFile = null;
} else {
byte bs[] = line.getBytes("UTF-8");
mOutput.write(bs);
mOutput.write((byte)'\n');
// TODO delay flush?
mOutput.flush();
// OK
int count = bs.length+1;
mLogFileLength += count;
mLogFileTotal += count;
break done;
}
} catch (Exception e) {
Log.w(TAG,"Error writing entry: "+e);
// ok, tear it down and try again...
mPendingError = e.toString();
try {
if (mOutput!=null)
mOutput.close();
} catch (Exception e2) { /* ignore */ }
mOutput = null;
mLogFile = null;
}
}
// need to create a new log file
// Log dir OK?
if (mLogDir!=null && mLogDir.exists()) {
// create new Log file
startNewFile = false;
String filename = sdf.format(now)+".log";
mLogFile = new File(mLogDir, filename);
Log.i(TAG,"Create new log file "+mLogFile);
try {
mOutput = new BufferedOutputStream(new FileOutputStream(mLogFile));
// write header
JSONStringer js = new JSONStringer();
js.object();
js.key("time");
js.value(now.getTime());
js.key("datetime");
js.value(rfcdf.format(now));
js.key("level");
js.value(4);
js.key("component");
js.value("logger");
js.key("event");
js.value("log.start");
js.key("info");
js.object();
if (mPendingError!=null) {
js.key("pendingError");
js.value(mPendingError);
}
marshallDeviceInfo(js);
js.endObject();
js.endObject();
String json = js.toString();
byte bs[] = json.getBytes("UTF-8");
mOutput.write(bs);
mOutput.write((byte)'\n');
mOutput.flush();
// day date...
day.setTime(now);
day.set(Calendar.HOUR_OF_DAY, 0);
day.set(Calendar.MINUTE, 0);
day.set(Calendar.SECOND, 0);
day.set(Calendar.MILLISECOND, 0);
mLogFileDayDate = day.getTimeInMillis();
mLogFileLength = bs.length;
mLogFileTotal += bs.length;
mPendingError = null;
// carry on...
}
catch (Exception e) {
Log.e(TAG,"Could not create log file "+mLogFile+": "+e);
mLogFile = null;
//?mLogDir = null;
}
} else {
// doesn't exist? - may be unplugged
//mLogDir = getExternalFilesDir(null);
mLogDir = getExternalCacheDir();
if (mLogDir==null) {
if (!waitingForStorage) {
Log.w(TAG, "getLocalFilePrefix with external storage not available");
waitingForStorage = true;
}
try {
Thread.sleep(FILE_POLL_DELAY);
}
catch (InterruptedException e) {
Log.w(TAG,"Interrupted waiting for external files");
}
}
else if (waitingForStorage) {
waitingForStorage = false;
Log.i(TAG, "getLocalFilePrefix succeeded: "+mLogDir);
}
}
}
}
/** get device identifiers, etc. for log file header
*
* @param js Stringer to write into
* @throws JSONException
*/
private void marshallDeviceInfo(JSONStringer js) {
try {
js.key("logVersion");
js.value(LOG_VERSION);
js.key("packageName");
js.value(getPackageName());
String appVersionName = getPackageManager().getPackageInfo(getPackageName(), 0).versionName;
js.key("appVersionName");
js.value(appVersionName);
int appVersionCode = getPackageManager().getPackageInfo(getPackageName(), 0).versionCode;
js.key("appVersionCode");
js.value(appVersionCode);
} catch (Exception e) {
Log.e(TAG,"Error getting appVersion", e);
}
try {
final TelephonyManager tm = (TelephonyManager) getBaseContext().getSystemService(Context.TELEPHONY_SERVICE);
final String deviceId = tm.getDeviceId();
if (deviceId!=null) {
js.key("deviceId");
js.value(deviceId);
}
final String subscriberId = tm.getSubscriberId();
if (subscriberId!=null) {
js.key("subscriberId");
js.value(subscriberId);
}
} catch (Exception e) {
Log.e(TAG,"Error getting deviceId/subscriberId", e);
}
try {
final String androidId = android.provider.Settings.Secure.getString(getContentResolver(), android.provider.Settings.Secure.ANDROID_ID);
if (androidId!=null) {
js.key("androidId");
js.value(androidId);
}
} catch (Exception e) {
Log.e(TAG,"Error getting androidId", e);
}
try {
js.key("BOARD");
js.value(android.os.Build.BOARD);
js.key("DEVICE");
js.value(android.os.Build.DEVICE);
js.key("DISPLAY");
js.value(android.os.Build.DISPLAY);
/* API 9 ?!
if (android.os.Build.SERIAL!=null) {
js.key("SERIAL");
js.value(android.os.Build.SERIAL);
}
*/
js.key("BRAND");
js.value(android.os.Build.BRAND);
js.key("MODEL");
js.value(android.os.Build.MODEL);
js.key("PRODUCT");
js.value(android.os.Build.PRODUCT);
js.key("HARDWARE");
js.value(android.os.Build.HARDWARE);
js.key("RELEASE");
js.value(android.os.Build.VERSION.RELEASE);
js.key("SDK");
js.value(android.os.Build.VERSION.SDK_INT);
}
catch (Exception e) {
Log.e(TAG,"Error getting androidId", e);
}
}
}
| |
/*******************************************************************************
* Copyright 2015 EMBL - European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*******************************************************************************/
package uk.ac.ebi.phenotype.web.controller;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.mousephenotype.cda.common.Constants;
import org.mousephenotype.cda.dto.LifeStage;
import org.mousephenotype.cda.enumerations.EmbryoViability;
import org.mousephenotype.cda.enumerations.ObservationType;
import org.mousephenotype.cda.enumerations.SexType;
import org.mousephenotype.cda.enumerations.ZygosityType;
import org.mousephenotype.cda.solr.service.*;
import org.mousephenotype.cda.solr.service.dto.*;
import org.mousephenotype.cda.solr.service.exception.SpecificExperimentException;
import org.mousephenotype.cda.solr.web.dto.EmbryoViability_DTO;
import org.mousephenotype.cda.solr.web.dto.ViabilityDTO;
import org.mousephenotype.cda.utilities.LifeStageMapper;
import org.mousephenotype.cda.web.ChartType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.configurationprocessor.json.JSONException;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import uk.ac.ebi.phenotype.chart.*;
import uk.ac.ebi.phenotype.error.GenomicFeatureNotFoundException;
import uk.ac.ebi.phenotype.error.ParameterNotFoundException;
import javax.annotation.Resource;
import javax.inject.Inject;
import javax.inject.Named;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.validation.constraints.NotNull;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.springframework.web.bind.annotation.ValueConstants.DEFAULT_NONE;
@Controller
public class ChartsController {
private final Logger log = LoggerFactory.getLogger(this.getClass());
private final CategoricalChartAndTableProvider categoricalChartAndTableProvider;
private final TimeSeriesChartAndTableProvider timeSeriesChartAndTableProvider;
private final UnidimensionalChartAndTableProvider continousChartAndTableProvider;
private final ScatterChartAndTableProvider scatterChartAndTableProvider;
private final AbrChartAndTableProvider abrChartAndTableProvider;
private final ViabilityChartAndDataProvider viabilityChartAndDataProvider;
private final ExperimentService experimentService;
private final StatisticalResultService srService;
private final GeneService geneService;
private final ImageService imageService;
private final ImpressService impressService;
private final GenotypePhenotypeService gpService;
@Resource(name = "globalConfiguration")
private Map<String, String> config;
@Value("${solr_url}")
public String SOLR_URL;
@Inject
public ChartsController(
@NotNull CategoricalChartAndTableProvider categoricalChartAndTableProvider,
@NotNull TimeSeriesChartAndTableProvider timeSeriesChartAndTableProvider,
@NotNull UnidimensionalChartAndTableProvider continousChartAndTableProvider,
@NotNull ScatterChartAndTableProvider scatterChartAndTableProvider,
@NotNull AbrChartAndTableProvider abrChartAndTableProvider,
@NotNull ViabilityChartAndDataProvider viabilityChartAndDataProvider,
@NotNull ExperimentService experimentService,
@NotNull GeneService geneService,
@NotNull ImpressService impressService,
@NotNull ImageService imageService,
@NotNull @Named("statistical-result-service") StatisticalResultService srService,
@NotNull @Named("genotype-phenotype-service") GenotypePhenotypeService gpService
) {
this.categoricalChartAndTableProvider = categoricalChartAndTableProvider;
this.timeSeriesChartAndTableProvider = timeSeriesChartAndTableProvider;
this.continousChartAndTableProvider = continousChartAndTableProvider;
this.scatterChartAndTableProvider = scatterChartAndTableProvider;
this.abrChartAndTableProvider = abrChartAndTableProvider;
this.viabilityChartAndDataProvider = viabilityChartAndDataProvider;
this.experimentService = experimentService;
this.srService = srService;
this.geneService = geneService;
this.impressService = impressService;
this.imageService=imageService;
this.gpService = gpService;
}
/**
* Runs when the request missing an accession ID. This redirects to the
* search page which defaults to showing all genes in the list
*
* @return string to instruct spring to redirect to the search page
*/
@RequestMapping("/stats")
public String rootForward() {
return "redirect:/search";
}
/**
* This method should take in the parameters and then generate a skeleton
* jsp page with urls that can be called by a jquery ajax requests for each
* graph div and table div
*
* @param parameterIds
* @param gender
* @param zygosity
* @param phenotypingCenter
* @param strategies
* @param accessionsParams
* @param model
* @return
* @throws GenomicFeatureNotFoundException
* @throws ParameterNotFoundException
* @throws IOException
* @throws URISyntaxException
* @throws SolrServerException, IOException
*/
@RequestMapping("/charts")
public String charts(@RequestParam(required = false, value = "accession") String[] accessionsParams,
@RequestParam(required = false, value = "parameter_stable_id") String[] parameterIds,
@RequestParam(required = false, value = "gender") String[] gender,
@RequestParam(required = false, value = "zygosity") String[] zygosity,
@RequestParam(required = false, value = "phenotyping_center") String[] phenotypingCenter,
@RequestParam(required = false, value = "strategy") String[] strategies,
@RequestParam(required = false, value = "strain") String[] strains,
@RequestParam(required = false, value = "metadata_group") String[] metadataGroup,
@RequestParam(required = false, value = "chart_type") ChartType chartType,
@RequestParam(required = false, value = "pipeline_stable_id") String[] pipelineStableIds,
@RequestParam(required = false, value = "procedure_stable_id") String[] procedureStableIds,
@RequestParam(required = false, value = "allele_accession_id") String[] alleleAccession,
@RequestParam(required = false, value = "pageTitle") String pageTitle,
@RequestParam(required = false, value = "pageLinkBack") String pageLinkBack,
HttpServletRequest request, HttpServletResponse response,
Model model) {
try {
if ((accessionsParams != null) && (accessionsParams.length > 0) && (parameterIds != null) && (parameterIds.length > 0)) {
for (String parameterStableId : parameterIds) {
if (parameterStableId.contains("_FER_")) {
System.err.println("We don't have data for fertility so we can't display charts");
String url = "http:" + request.getAttribute("mappedHostname").toString() + request.getAttribute("baseUrl").toString() + "/genes/" + accessionsParams[0];
return "redirect:" + url;
}
}
}
response.addHeader("Access-Control-Allow-Origin", "*");//allow javascript requests from other domain - note spring way of doing this does not work!!!! as usual!!!
model.addAttribute("pageTitle", pageTitle);
return createCharts(accessionsParams, pipelineStableIds, procedureStableIds, parameterIds, gender, phenotypingCenter, strains, metadataGroup, zygosity, model, chartType, alleleAccession);
} catch (Exception e){
e.printStackTrace();
}
return "";
}
@RequestMapping("/chart")
public String chart(@RequestParam(required = true, value = "experimentNumber", defaultValue = "1") String experimentNumber,
@RequestParam(required = false, value = "accession") String[] accession,
@RequestParam(required = false, value = "strain_accession_id") String strain,
@RequestParam(required = false, value = "allele_accession_id") String alleleAccession,
@RequestParam(required = false, value = "metadata_group", defaultValue = DEFAULT_NONE) String metadataGroup,
@RequestParam(required = false, value = "pipeline_stable_id") String pipelineStableId,
@RequestParam(required = false, value = "procedure_stable_id") String procedureStableId,
@RequestParam(required = false, value = "parameter_stable_id") String parameterStableId,
@RequestParam(required = false, value = "gender") String[] gender,
@RequestParam(required = false, value = "zygosity") String[] zygosity,
@RequestParam(required = false, value = "phenotyping_center") String phenotypingCenter,
@RequestParam(required = false, value = "strategy") String[] strategies,
@RequestParam(required = false, value = "chart_type") ChartType chartType,
@RequestParam(required = false, value = "chart_only", defaultValue = "false") boolean chartOnly,
@RequestParam(required = false, value = "standAlone") boolean standAlone,
@RequestParam(required = false, value = "fromFile") boolean fromFile,
Model model)
throws IOException, URISyntaxException, SolrServerException, SpecificExperimentException, ParameterNotFoundException {
if (StringUtils.isEmpty(parameterStableId)) {
System.out.println("throwing parameter not found exception");
throw new ParameterNotFoundException("Parameter " + parameterStableId + " can't be found.", parameterStableId);
}
if (!parameterStableId.equals("")) {
boolean isDerivedBodyWeight = Constants.DERIVED_BODY_WEIGHT_PARAMETERS.contains(parameterStableId);
model.addAttribute("isDerivedBodyWeight", isDerivedBodyWeight);
}
UnidimensionalDataSet unidimensionalChartDataSet = null;
ChartData seriesParameterChartData = null;
CategoricalResultAndCharts categoricalResultAndChart = null;
boolean statsError = false;
if (parameterStableId.startsWith("IMPC_FER_")) {
String url = config.get("baseUrl") + "/genes/" + accession[0];
return "redirect:" + url;
}
// Use the first phenotyping center passed in (ignore the others?)
// should only now be one center at this stage for one graph/experiment
// TODO put length check and exception here
// List<String> phenotypingCenters = getParamsAsList(phenotypingCenter);
String metadata = null;
List<String> metadataList = null;
String metaDataGroupString = null;
if (metadataGroup != null && !metadataGroup.equals(DEFAULT_NONE)) {
metaDataGroupString = metadataGroup;
}
List<String> zyList = getParamsAsList(zygosity);
ImpressBaseDTO pipeline = null;
if (pipelineStableId != null && !pipelineStableId.equals("")) {
log.debug("pipe stable id=" + pipelineStableId);
pipeline = impressService.getPipeline(pipelineStableId);
model.addAttribute("pipeline", pipeline);
model.addAttribute("pipelineUrl", impressService.getPipelineUrlByStableKey(pipeline.getStableKey()));
}
model.addAttribute("phenotypingCenter", phenotypingCenter);
ExperimentDTO experiment;
GeneDTO gene = geneService.getGeneById(accession[0]);
model.addAttribute("gene", gene);
long startTimeSolr = System.currentTimeMillis();
if (zygosity.length != 1) {
log.warn("More than one zygosity specified", String.join(", ", zygosity));
}
experiment = experimentService.getSpecificExperimentDTO(
pipelineStableId,
procedureStableId,
parameterStableId,
alleleAccession,
phenotypingCenter,
zygosity[0],
strain,
metaDataGroupString
);
experiment = experimentService.setUrls(experiment, parameterStableId, pipelineStableId, gene.getMgiAccessionId(), Arrays.asList(zygosity), phenotypingCenter, strain, metadataGroup, alleleAccession, SOLR_URL);
experiment.setMarkerAccession(gene.getMgiAccessionId());
Set<SexType> sexes = new HashSet<>(Arrays.asList(SexType.male, SexType.female));
if (experiment.getSexes() != null && ! experiment.getSexes().contains(SexType.not_considered)) {
sexes = experiment.getSexes();
}
experiment.setSexes(sexes);
if (experiment.getSexes().isEmpty()) {
experiment.setSexes(Collections.singleton(SexType.not_considered));
}
long endTimeSolr = System.currentTimeMillis();
long timeTakenSolr = endTimeSolr - startTimeSolr;
System.out.println("solr time taken to get experiment=" + timeTakenSolr);
ProcedureDTO proc;
ParameterDTO parameter = null;
if (experiment != null) {
String pipe = (experiment.getPipelineStableId() != null) ? experiment.getPipelineStableId() : pipelineStableId;
String procStableId = (experiment.getProcedureStableId() != null) ? experiment.getProcedureStableId() : procedureStableId;
proc = impressService.getProcedureByStableId(pipe, procStableId);
String procedureUrl = "";
String parameterUrl = "";
if (proc != null) {
procedureUrl = impressService.getProcedureUrlByStableKeyAndPipelineStableKey(proc.getStableKey(), pipeline.getStableKey());
model.addAttribute("procedureUrl", procedureUrl);
}
parameter = impressService.getParameterByPipelineProcedureParameterStableKey(pipeline.getStableKey(), proc.getStableKey(), parameterStableId);
model.addAttribute("parameter", parameter);
//3i procedures with at least some headline images associated
if (parameter.getStableId().startsWith("MGP_BMI") || parameter.getStableId().startsWith("MGP_MLN") || parameter.getStableId().startsWith("MGP_IMM")) {
addFlowCytometryImages(accession, model, parameter);
}
String xUnits = parameter.getUnitX();
ObservationType observationTypeForParam = parameter.getObservationType();
if (parameter.getStableKey() != null) {
parameterUrl = impressService.getParameterUrlByProcedureAndParameterKey(proc.getStableKey(), parameter.getStableKey());
model.addAttribute("parameterUrl", parameterUrl);
}
model.addAttribute("alleleSymbol", experiment.getAlleleSymbol());
setTitlesForGraph(model, experiment.getGeneticBackgtround(), experiment.getAlleleSymbol());
if (experiment.getMetadataGroup() != null) {
metadata = experiment.getMetadataHtml();
metadataList = experiment.getMetadata();
}
// Do not treat these procedures as standard charts
List<String> notDefault = Arrays.asList("IMPC_EVL_", "IMPC_EVM_", "IMPC_EVO_", "IMPC_EVP_", "IMPC_VIA_");
if (notDefault.stream().noneMatch(parameterStableId::startsWith)) {
if (chartType != null) {
ScatterChartAndData scatterChartAndData;
switch (chartType) {
case UNIDIMENSIONAL_SCATTER_PLOT:
scatterChartAndData = scatterChartAndTableProvider.doScatterData(experiment, null, null, parameter, experimentNumber);
model.addAttribute("scatterChartAndData", scatterChartAndData);
if (observationTypeForParam.equals(ObservationType.unidimensional)) {
List<UnidimensionalStatsObject> unidimenStatsObjects = scatterChartAndData.getUnidimensionalStatsObjects();
unidimensionalChartDataSet = new UnidimensionalDataSet();
unidimensionalChartDataSet.setStatsObjects(unidimenStatsObjects);
model.addAttribute("unidimensionalChartDataSet", unidimensionalChartDataSet);
}
break;
case UNIDIMENSIONAL_ABR_PLOT:
seriesParameterChartData = abrChartAndTableProvider.getAbrChartAndData(experiment, parameter, "abrChart" + experimentNumber, SOLR_URL);
model.addAttribute("abrChart", seriesParameterChartData.getChart());
break;
case UNIDIMENSIONAL_BOX_PLOT:
try {
unidimensionalChartDataSet = continousChartAndTableProvider.doUnidimensionalData(experiment, experimentNumber, parameter, xUnits);
} catch (JSONException e) {
e.printStackTrace();
}
model.addAttribute("unidimensionalChartDataSet", unidimensionalChartDataSet);
scatterChartAndData = scatterChartAndTableProvider.doScatterData(experiment, unidimensionalChartDataSet.getMin(), unidimensionalChartDataSet.getMax(), parameter, experimentNumber);
model.addAttribute("scatterChartAndData", scatterChartAndData);
break;
case CATEGORICAL_STACKED_COLUMN:
categoricalResultAndChart = categoricalChartAndTableProvider.doCategoricalData(experiment, parameter, experimentNumber);
model.addAttribute("categoricalResultAndChart", categoricalResultAndChart);
break;
case TIME_SERIES_LINE:
seriesParameterChartData = timeSeriesChartAndTableProvider.doTimeSeriesData(experiment, parameter, experimentNumber);
model.addAttribute("timeSeriesChartsAndTable", seriesParameterChartData);
break;
default:
log.error("Unknown how to display graph for observation type: " + observationTypeForParam);
break;
}
} else {
log.error("chart type is null");
}
}
}
if (procedureStableId.equals("IMPC_VIA_001")) {
if (parameterStableId.startsWith("IMPC_VIA_")) {
// IMPC VIA 001
// Its a viability outcome param which means its a line level query
// so we don't use the normal experiment query in experiment service
ViabilityDTO viability = experimentService.getSpecificViabilityVersion1ExperimentDTO(parameterStableId, pipelineStableId, accession[0], phenotypingCenter, strain, metaDataGroupString, alleleAccession);
ViabilityDTO viabilityDTO = viabilityChartAndDataProvider.doViabilityData(viability, parameterStableId);
model.addAttribute("viabilityDTO", viabilityDTO);
//if viability data we want to have a message at the top which comes up on stats.jsp
model.addAttribute("isViability", true);
}
} else if (procedureStableId.equals("IMPC_VIA_002")) {
// IMPC VIA 002
// Its a viability outcome param which means its a line level query
// so we don't use the normal experiment query in experiment service
ViabilityDTO viability = experimentService.getSpecificViabilityVersion2ExperimentDTO(parameterStableId, accession[0], phenotypingCenter, strain, metaDataGroupString, alleleAccession);
ViabilityDTO viabilityDTO = viabilityChartAndDataProvider.doViabilityData(viability, parameterStableId);
model.addAttribute("viabilityDTO", viabilityDTO);
//if viability data we want to have a message at the top which comes up on stats.jsp
model.addAttribute("isViability", true);
}
if (parameterStableId.startsWith("IMPC_EVL_")) {
// Its an E9.5 embryonic viability outcome param which means its a line level query
// so we don't use the normal experiment query in experiment service
// Note: EmbryoViability.E9_5 specifies the set of related parameters passed to getSpecificEmbryoViability_ExperimentDTO
EmbryoViability_DTO embryoViability = experimentService.getSpecificEmbryoViability_ExperimentDTO(parameterStableId, pipelineStableId, accession[0], phenotypingCenter, strain, metaDataGroupString, alleleAccession, EmbryoViability.E9_5);
EmbryoViability_DTO embryoViability_DTO = viabilityChartAndDataProvider.doEmbryo_ViabilityData(parameter, embryoViability);
model.addAttribute("embryoViabilityDTO", embryoViability_DTO);
}
if (parameterStableId.startsWith("IMPC_EVM_")) {
// Its an E12.5 embryonic viability outcome param which means its a line level query
// so we don't use the normal experiment query in experiment service
// Note: EmbryoViability.E12_5 specifies the set of related parameters passed to getSpecificEmbryoViability_ExperimentDTO
EmbryoViability_DTO embryoViability = experimentService.getSpecificEmbryoViability_ExperimentDTO(parameterStableId, pipelineStableId, accession[0], phenotypingCenter, strain, metaDataGroupString, alleleAccession, EmbryoViability.E12_5);
EmbryoViability_DTO embryoViability_DTO = viabilityChartAndDataProvider.doEmbryo_ViabilityData(parameter, embryoViability);
model.addAttribute("embryoViabilityDTO", embryoViability_DTO);
}
if (parameterStableId.startsWith("IMPC_EVO_")) {
// Its an E14.5 embryonic viability outcome param which means its a line level query
// so we don't use the normal experiment query in experiment service
// Note: EmbryoViability.E14_5 specifies the set of related parameters passed to getSpecificEmbryoViability_ExperimentDTO
EmbryoViability_DTO embryoViability = experimentService.getSpecificEmbryoViability_ExperimentDTO(parameterStableId, pipelineStableId, accession[0], phenotypingCenter, strain, metaDataGroupString, alleleAccession, EmbryoViability.E14_5);
EmbryoViability_DTO embryoViability_DTO = viabilityChartAndDataProvider.doEmbryo_ViabilityData(parameter, embryoViability);
model.addAttribute("embryoViabilityDTO", embryoViability_DTO);
}
if (parameterStableId.startsWith("IMPC_EVP_")) {
// Its an E18.5 embryonic viability outcome param which means its a line level query
// so we don't use the normal experiment query in experiment service
// Note: EmbryoViability.E18_5 specifies the set of related parameters passed to getSpecificEmbryoViability_ExperimentDTO
EmbryoViability_DTO embryoViability = experimentService.getSpecificEmbryoViability_ExperimentDTO(parameterStableId, pipelineStableId, accession[0], phenotypingCenter, strain, metaDataGroupString, alleleAccession, EmbryoViability.E18_5);
EmbryoViability_DTO embryoViability_DTO = viabilityChartAndDataProvider.doEmbryo_ViabilityData(parameter, embryoViability);
model.addAttribute("embryoViabilityDTO", embryoViability_DTO);
}
model.addAttribute("pipeline", pipeline);
model.addAttribute("phenotypingCenter", phenotypingCenter);
model.addAttribute("experimentNumber", experimentNumber);
model.addAttribute("statsError", statsError);
if (experiment != null) {
model.addAttribute("gpUrl", experiment.getGenotypePhenotypeUrl());
model.addAttribute("srUrl", experiment.getStatisticalResultUrl());
model.addAttribute("phenStatDataUrl", experiment.getDataPhenStatFormatUrl());
}
model.addAttribute("chartOnly", chartOnly);
// Metadata
Map<String, String> metadataMap = null;
if (metadataList != null) {
metadataMap = metadataList
.stream()
.map(x -> Arrays.asList((x.split("="))))
.filter(x -> x.size() == 2)
.collect(Collectors.toMap(
k -> k.get(0),
v -> v.get(1),
(v1, v2) -> v1.concat(", ".concat(v2)),
TreeMap::new
));
}
model.addAttribute("metadata", metadata);
model.addAttribute("metadataMap", metadataMap);
Integer numberFemaleMutantMice = 0;
Integer numberMaleMutantMice = 0;
Integer numberFemaleControlMice = 0;
Integer numberMaleControlMice = 0;
if (unidimensionalChartDataSet != null) {
// Count each specimen only once, no matter how many time's it's been measured
final Set<ObservationDTO> mutants = unidimensionalChartDataSet.getExperiment().getMutants();
final Set<ObservationDTO> controls = unidimensionalChartDataSet.getExperiment().getControls();
numberFemaleMutantMice = (int) mutants.stream().filter(x -> x.getSex().equals(SexType.female.getName())).map(ObservationDTOBase::getExternalSampleId).distinct().count();
numberMaleMutantMice = (int) mutants.stream().filter(x -> x.getSex().equals(SexType.male.getName())).map(ObservationDTOBase::getExternalSampleId).distinct().count();
numberFemaleControlMice = (int) controls.stream().filter(x -> x.getSex().equals(SexType.female.getName())).map(ObservationDTOBase::getExternalSampleId).distinct().count();
numberMaleControlMice = (int) controls.stream().filter(x -> x.getSex().equals(SexType.male.getName())).map(ObservationDTOBase::getExternalSampleId).distinct().count();
}
if (categoricalResultAndChart != null) {
/* final List<CategoricalResult> statsResults = categoricalResultAndChart.getStatsResults();
for (CategoricalResult cr : statsResults) {
numberFemaleControlMice = cr.getFemaleControls();
numberFemaleMutantMice = cr.getFemaleMutants();
numberMaleControlMice = cr.getMaleControls();
numberMaleMutantMice = cr.getMaleMutants();
}*/
final Set<ObservationDTO> mutants = categoricalResultAndChart.getExperiment().getMutants();
final Set<ObservationDTO> controls = categoricalResultAndChart.getExperiment().getControls();
if (mutants != null) {
numberFemaleMutantMice = (int) mutants.stream().filter(x -> x.getSex().equals(SexType.female.getName())).map(ObservationDTOBase::getExternalSampleId).distinct().count();
numberMaleMutantMice = (int) mutants.stream().filter(x -> x.getSex().equals(SexType.male.getName())).map(ObservationDTOBase::getExternalSampleId).distinct().count();
} else {
numberFemaleMutantMice = 0;
numberMaleMutantMice = 0;
}
if (controls != null) {
numberFemaleControlMice = (int) controls.stream().filter(x -> x.getSex().equals(SexType.female.getName())).map(ObservationDTOBase::getExternalSampleId).distinct().count();
numberMaleControlMice = (int) controls.stream().filter(x -> x.getSex().equals(SexType.male.getName())).map(ObservationDTOBase::getExternalSampleId).distinct().count();
} else {
numberFemaleControlMice = 0;
numberMaleControlMice = 0;
}
}
if (seriesParameterChartData != null) {
// Count each specimen only once, no matter how many time's it's been measured
final Set<ObservationDTO> controls = seriesParameterChartData.getExperiment().getControls();
final Set<ObservationDTO> mutants = seriesParameterChartData.getExperiment().getMutants();
numberFemaleMutantMice = (int) mutants.stream().filter(x -> x.getSex().equals(SexType.female.getName())).map(ObservationDTOBase::getExternalSampleId).distinct().count();
numberMaleMutantMice = (int) mutants.stream().filter(x -> x.getSex().equals(SexType.male.getName())).map(ObservationDTOBase::getExternalSampleId).distinct().count();
numberFemaleControlMice = (int) controls.stream().filter(x -> x.getSex().equals(SexType.female.getName())).map(ObservationDTOBase::getExternalSampleId).distinct().count();
numberMaleControlMice = (int) controls.stream().filter(x -> x.getSex().equals(SexType.male.getName())).map(ObservationDTOBase::getExternalSampleId).distinct().count();
}
model.addAttribute("numberFemaleMutantMice", numberFemaleMutantMice);
model.addAttribute("numberMaleMutantMice", numberMaleMutantMice);
model.addAttribute("numberFemaleControlMice", numberFemaleControlMice);
model.addAttribute("numberMaleControlMice", numberMaleControlMice);
final int totalSamples = Stream.of(numberFemaleMutantMice, numberMaleMutantMice, numberFemaleControlMice, numberMaleControlMice).filter(Objects::nonNull).mapToInt(Integer::intValue).sum();
model.addAttribute("numberMice", totalSamples);
if (experiment != null) {
List<GenotypePhenotypeDTO> gpList = new ArrayList<>();
if (experiment.getParameterStableId() != null) {
List<GenotypePhenotypeDTO> addGpList = gpService.getGenotypePhenotypeFor(
gene.getMgiAccessionId(),
experiment.getParameterStableId(),
experiment.getStrain(),
experiment.getAlleleAccession(),
experiment.getZygosities(),
experiment.getOrganisation(),
experiment.getSexes());
gpList.addAll(addGpList);
}
// If we are displaying a chart for Embryo viability, check all possible associated terms
// and add any significant results to the MP terms that are associated to this data
if (Stream.of("IMPC_EVL_", "IMPC_EVM_", "IMPC_EVO_", "IMPC_EVP_", "IMPC_VIA_").anyMatch(parameterStableId::startsWith)) {
EmbryoViability v = EmbryoViability.E9_5;
if (parameterStableId.contains("EVM")) v = EmbryoViability.E12_5;
if (parameterStableId.contains("EVO")) v = EmbryoViability.E14_5;
if (parameterStableId.contains("EVP")) v = EmbryoViability.E18_5;
for (String param : v.parameterList) {
List<GenotypePhenotypeDTO> addGpList = gpService.getGenotypePhenotypeFor(
gene.getMgiAccessionId(),
param,
experiment.getStrain(),
experiment.getAlleleAccession(),
experiment.getZygosities(),
experiment.getOrganisation(),
experiment.getSexes());
gpList.addAll(addGpList);
}
}
// If we are displaying a chart for IPGTT, check all possible derived terms associated to IPG procedure
// and add any significant results to the MP terms that are associated to this data
if (parameterStableId.equalsIgnoreCase("IMPC_IPG_002_001")) {
for (String param : Constants.IMPC_IPG_002_001) {
List<GenotypePhenotypeDTO> addGpList = gpService.getGenotypePhenotypeFor(
gene.getMgiAccessionId(),
param,
experiment.getStrain(),
experiment.getAlleleAccession(),
experiment.getZygosities(),
experiment.getOrganisation(),
experiment.getSexes());
gpList.addAll(addGpList);
}
}
//for line level parameters such as viability
if (org.mousephenotype.cda.common.Constants.viabilityParameters.contains(parameterStableId)) {
for (String param : org.mousephenotype.cda.common.Constants.viabilityParameters) {
List<GenotypePhenotypeDTO> addGpList = gpService.getGenotypePhenotypeFor(
gene.getMgiAccessionId(),
param,
experiment.getStrain(),
experiment.getAlleleAccession(),
experiment.getZygosities(),
experiment.getOrganisation(),
null);//dont' filter out sex based as line level parameters this causes issues with associated phenotype dipslay on chart
gpList.addAll(addGpList);
}
}
List<String> phenotypeTerms = gpList.stream().map(GenotypePhenotypeDTO::getMpTermName).distinct().collect(Collectors.toList());
//for links to phenotype pages we need the MP_ID
List<String> phenotypeIds = gpList.stream().map(GenotypePhenotypeDTO::getMpTermId).distinct().collect(Collectors.toList());
model.addAttribute("phenotypes", phenotypeTerms);
model.addAttribute("phenotypeIds", phenotypeIds);
}
LifeStage parameterLifeStage = LifeStageMapper.getLifeStage(parameterStableId);
List<LifeStage> postnatalLifeStages = Arrays.asList(LifeStage.EARLY_ADULT, LifeStage.MIDDLE_AGED_ADULT, LifeStage.LATE_ADULT);
Boolean isPostnatal = postnatalLifeStages.contains(parameterLifeStage);
model.addAttribute("isPostnatal", isPostnatal);
model.addAttribute("lifeStage", LifeStageMapper.getLifeStage(parameterStableId).getName());
return "chart";
}
private void addFlowCytometryImages(String[] accession, Model model, ParameterDTO parameter)
throws SolrServerException, IOException {
log.debug("flow cytomerty for 3i detected get headline images");
//lets get the 3i headline images
//example query http://wp-np3-84.ebi.ac.uk:8986/solr/impc_images/select?q=parameter_stable_id:MGP_IMM_233_001
//or maybe we need to filter by parameter association first based no the initial parameter
//spleen Immunophenotyping e.g. Sik3 has many
//chart example= http://localhost:8090/phenotype-archive/charts?phenotyping_center=WTSI&accession=MGI:2446296¶meter_stable_id=MGP_IMM_086_001
//bone marrow chart example=http://localhost:8090/phenotype-archive/charts?phenotyping_center=WTSI&accession=MGI:1353467¶meter_stable_id=MGP_BMI_018_001
//http://localhost:8090/phenotype-archive/charts?phenotyping_center=WTSI&accession=MGI:1353467¶meter_stable_id=MGP_BMI_018_001
//http://wp-np3-84.ebi.ac.uk:8986/solr/impc_images/select?q=parameter_stable_id:MGP_IMM_233_001&fq=parameter_association_stable_id:MGP_IMM_086_001&fq=gene_symbol:Sik3
//http://localhost:8090/phenotype-archive/charts?phenotyping_center=WTSI&accession=MGI:1915276¶meter_stable_id=MGP_MLN_114_001
//accession[0]
QueryResponse imagesResponse = imageService.getHeadlineImages(accession[0], null,1000, null, null, parameter.getStableId());
log.debug("number of images found="+imagesResponse.getResults().getNumFound());
List<ImageDTO> wtAndMutantImages = imagesResponse.getBeans(ImageDTO.class);
List<ImageDTO> controlImages=new ArrayList<>();
List<ImageDTO> mutantImages=new ArrayList<>();
for(ImageDTO image: wtAndMutantImages) {
if(image.isControl())
{
log.debug("control found");
controlImages.add(image);
}
if(image.isMutant()) {
log.debug("mutant found");
mutantImages.add(image);
}
}
int imageCountMax=controlImages.size();
if(mutantImages.size()>imageCountMax) {
imageCountMax=mutantImages.size();
}
model.addAttribute("controlImages", controlImages);
model.addAttribute("mutantImages", mutantImages);
log.debug("imageCountMax="+imageCountMax);
model.addAttribute("imageCountMax",imageCountMax);
}
private void setTitlesForGraph(Model model, String geneticBackground, String alleleSymbol) {
model.addAttribute("symbol", (alleleSymbol != null) ? alleleSymbol : "unknown");
model.addAttribute("geneticBackgroundString", (geneticBackground != null) ? geneticBackground : "unknown");
}
private String createCharts(String[] accessionsParams, String[] pipelineStableIdsArray, String[] procedureStableIdsArray, String[] parameterIds, String[] gender, String[] phenotypingCenter,
String[] strains, String[] metadataGroup, String[] zygosity, Model model, ChartType chartType, String[] alleleAccession)
throws SolrServerException, IOException, GenomicFeatureNotFoundException, ParameterNotFoundException, URISyntaxException {
Long time = System.currentTimeMillis();
GraphUtils graphUtils = new GraphUtils(experimentService, srService, impressService);
List<String> geneIds = getParamsAsList(accessionsParams);
List<String> paramIds = getParamsAsList(parameterIds);
List<String> genderList = getParamsAsList(gender);
List<String> phenotypingCentersList = getParamsAsList(phenotypingCenter);
List<String> strainsList = getParamsAsList(strains);
List<String> metadataGroups = getParamsAsList(metadataGroup);
List<String> pipelineStableIds = getParamsAsList(pipelineStableIdsArray);
List<String> procedureStableIds = getParamsAsList(procedureStableIdsArray);
List<String> alleleAccessions = getParamsAsList(alleleAccession);
// add sexes explicitly here so graphs urls are created separately
if (genderList.isEmpty()) {
genderList.add(SexType.male.name());
genderList.add(SexType.female.name());
}
List<String> zyList = getParamsAsList(zygosity);
if (zyList.isEmpty()) {
zyList.add(ZygosityType.homozygote.name());
zyList.add(ZygosityType.heterozygote.name());
zyList.add(ZygosityType.hemizygote.name());
}
Set<String> allGraphUrlSet = new LinkedHashSet<>();
String allParameters = "";
// All ABR parameters are displayed on the same chart so we don't want to duplicate an identical chart for every ABR parameter
List<String> abrParameters = new ArrayList<>(paramIds);
abrParameters.retainAll(Constants.ABR_PARAMETERS);
if (abrParameters.size() > 1){
for (int i = 1; i < abrParameters.size(); i++) { // remove all ABR params but the first one
paramIds.remove(abrParameters.get(i));
}
}
if(geneIds.size()==0) {
System.err.println("There are no geneIds for this request....probably and error");
}
for (String geneId : geneIds) {
GeneDTO gene = geneService.getGeneById(geneId);
if (gene == null) {
throw new GenomicFeatureNotFoundException("Gene " + geneId + " can't be found.", geneId);
}
log.debug(gene.toString());
model.addAttribute("gene", gene);
List<String> pNames = new ArrayList<>();
for (String parameterId : paramIds) {
ParameterDTO parameter = impressService.getParameterByStableId(parameterId);
if(parameter==null) {
System.err.println("no parameter returned skipping for parameterId="+parameterId);
continue;
}
pNames.add(StringUtils.capitalize(parameter.getName()) + " (" + parameter.getStableId() + ")");
Set<String> graphUrlsForParam = graphUtils.getGraphUrls(
pipelineStableIds,
procedureStableIds,
parameter.getStableId(),
geneId,
alleleAccessions,
zyList,
strainsList,
phenotypingCentersList,
metadataGroups
);
allGraphUrlSet.addAll(graphUrlsForParam);
}// end of parameterId iterations
allParameters = StringUtils.join(pNames, ", ");
}// end of gene iterations
log.debug(allGraphUrlSet.size() + " chart links.");
List allUrls=putEarlyAdultViabilityFirst(allGraphUrlSet);//we want early adult viability first if present rather than embryo viability data
model.addAttribute("allGraphUrlSet", allUrls);
model.addAttribute("allParameters", allParameters);
return "stats";
}
private List<String> putEarlyAdultViabilityFirst(Set<String> urlsSet) {
//if we have the main early adult viability chart we want to show that top
//so reorder here
List<String> urlsList=new ArrayList<>();
urlsList.addAll(urlsSet);
Iterator urlsIt=urlsList.iterator();
String viaUrl="";
while(urlsIt.hasNext()){
String tempUrl=(String)urlsIt.next();
if(tempUrl.contains("_VIA_001_001")){
viaUrl=new String(tempUrl);
urlsIt.remove();
}
}
if(!viaUrl.isEmpty()) {
urlsList.add(0, viaUrl);
}
return urlsList;
}
/**
* Convenience method that just changes an array [] to a more modern LIst (I
* hate arrays! :) )
*
* @param parameterIds
* @return
*/
private List<String> getParamsAsList(String[] parameterIds) {
List<String> paramIds = new ArrayList<>();
if (parameterIds != null) {
paramIds.addAll(Arrays.stream(parameterIds).collect(Collectors.toSet()));
}
return paramIds;
}
@RequestMapping("/colors")
public String colors(Model model) {
model.addAttribute("maleColors", ChartColors.maleRgb);
model.addAttribute("femaleColors", ChartColors.femaleRgb);
model.addAttribute("highDifferenceColors",ChartColors.highDifferenceColors);
return "colors";
}
}
| |
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.sunshine.app;
import android.annotation.TargetApi;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Build;
import android.os.Bundle;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceActivity;
import android.preference.PreferenceManager;
import android.support.design.widget.Snackbar;
import android.text.TextUtils;
import android.view.View;
import android.widget.ImageView;
import com.bumptech.glide.util.Util;
import com.example.android.sunshine.app.data.WeatherContract;
import com.example.android.sunshine.app.sync.SunshineSyncAdapter;
import com.google.android.gms.location.places.Place;
import com.google.android.gms.location.places.ui.PlacePicker;
import com.google.android.gms.maps.model.LatLng;
/**
* A {@link PreferenceActivity} that presents a set of application settings.
* <p>
* See <a href="http://developer.android.com/design/patterns/settings.html">
* Android Design: Settings</a> for design guidelines and the <a
* href="http://developer.android.com/guide/topics/ui/settings.html">Settings
* API Guide</a> for more information on developing a Settings UI.
*/
public class SettingsActivity extends PreferenceActivity
implements Preference.OnPreferenceChangeListener, SharedPreferences.OnSharedPreferenceChangeListener {
protected final static int PLACE_PICKER_REQUEST = 9090;
private ImageView mAttribution;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Add 'general' preferences, defined in the XML file
addPreferencesFromResource(R.xml.pref_general);
// For all preferences, attach an OnPreferenceChangeListener so the UI summary can be
// updated when the preference changes.
bindPreferenceSummaryToValue(findPreference(getString(R.string.pref_location_key)));
bindPreferenceSummaryToValue(findPreference(getString(R.string.pref_units_key)));
bindPreferenceSummaryToValue(findPreference(getString(R.string.pref_art_pack_key)));
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mAttribution = new ImageView(this);
mAttribution.setImageResource(R.drawable.powered_by_google_light);
if(!Utility.isLocationLatLonAvailable(this)){
mAttribution.setVisibility(View.GONE);
}
setListFooter(mAttribution);
}
}
// Registers a shared preference change listener that gets notified when preferences change
@Override
protected void onResume() {
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this);
sp.registerOnSharedPreferenceChangeListener(this);
super.onResume();
}
// Unregisters a shared preference change listener
@Override
protected void onPause() {
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this);
sp.unregisterOnSharedPreferenceChangeListener(this);
super.onPause();
}
/**
* Attaches a listener so the summary is always updated with the preference value.
* Also fires the listener once, to initialize the summary (so it shows up before the value
* is changed.)
*/
private void bindPreferenceSummaryToValue(Preference preference) {
// Set the listener to watch for value changes.
preference.setOnPreferenceChangeListener(this);
// Set the preference summaries
setPreferenceSummary(preference,
PreferenceManager
.getDefaultSharedPreferences(preference.getContext())
.getString(preference.getKey(), ""));
}
private void setPreferenceSummary(Preference preference, Object value) {
String stringValue = value.toString();
String key = preference.getKey();
if (preference instanceof ListPreference) {
// For list preferences, look up the correct display value in
// the preference's 'entries' list (since they have separate labels/values).
ListPreference listPreference = (ListPreference) preference;
int prefIndex = listPreference.findIndexOfValue(stringValue);
if (prefIndex >= 0) {
preference.setSummary(listPreference.getEntries()[prefIndex]);
}
} else if (key.equals(getString(R.string.pref_location_key))) {
@SunshineSyncAdapter.LocationStatus int status = Utility.getLocationStatus(this);
switch (status) {
case SunshineSyncAdapter.LOCATION_STATUS_OK:
preference.setSummary(stringValue);
break;
case SunshineSyncAdapter.LOCATION_STATUS_UNKNOWN:
preference.setSummary(getString(R.string.pref_location_unknown_description, value.toString()));
break;
case SunshineSyncAdapter.LOCATION_STATUS_INVALID:
preference.setSummary(getString(R.string.pref_location_error_description, value.toString()));
break;
default:
// Note --- if the server is down we still assume the value
// is valid
preference.setSummary(stringValue);
}
} else {
// For other preferences, set the summary to the value's simple string representation.
preference.setSummary(stringValue);
}
}
// This gets called before the preference is changed
@Override
public boolean onPreferenceChange(Preference preference, Object value) {
setPreferenceSummary(preference, value);
return true;
}
// This gets called after the preference is changed, which is important because we
// start our synchronization here
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
if ( key.equals(getString(R.string.pref_location_key)) ) {
// we've changed the location
// first clear locationStatus
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.remove(getString(R.string.pref_location_latitude));
editor.remove(getString(R.string.pref_location_longitude));
editor.commit();
if(mAttribution!=null){
mAttribution.setVisibility(View.GONE);
}
Utility.resetLocationStatus(this);
SunshineSyncAdapter.syncImmediately(this);
} else if ( key.equals(getString(R.string.pref_units_key)) ) {
// units have changed. update lists of weather entries accordingly
getContentResolver().notifyChange(WeatherContract.WeatherEntry.CONTENT_URI, null);
} else if ( key.equals(getString(R.string.pref_location_status_key)) ) {
// our location status has changed. Update the summary accordingly
Preference locationPreference = findPreference(getString(R.string.pref_location_key));
bindPreferenceSummaryToValue(locationPreference);
} else if ( key.equals(getString(R.string.pref_art_pack_key)) ) {
// art pack have changed. update lists of weather entries accordingly
getContentResolver().notifyChange(WeatherContract.WeatherEntry.CONTENT_URI, null);
}
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@Override
public Intent getParentActivityIntent() {
return super.getParentActivityIntent().addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(requestCode==PLACE_PICKER_REQUEST){
if(resultCode==RESULT_OK){
Place place = PlacePicker.getPlace(data, this);
String address = place.getAddress().toString();
LatLng latLong = place.getLatLng();
//if the provided place doesn't have an address, we'll form a display-friendly
//string from the latlng values
if(TextUtils.isEmpty(address)){
address = String.format("(%.2f, %.2f)", latLong.latitude, latLong.longitude);
}
SharedPreferences sharedPreferences= PreferenceManager.getDefaultSharedPreferences(this);
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putString(getString(R.string.pref_location_key), address);
editor.putFloat(getString(R.string.pref_location_latitude), (float) latLong.latitude);
editor.putFloat(getString(R.string.pref_location_longitude), (float) latLong.longitude);
editor.commit();
Preference locationPreference = findPreference(getString(R.string.pref_location_key));
setPreferenceSummary(locationPreference, address);
if(mAttribution!=null){
mAttribution.setVisibility(View.VISIBLE);
}else{
View rootView = findViewById(android.R.id.content);
Snackbar.make(rootView, getString(R.string.attribution_text), Snackbar.LENGTH_LONG).show();
}
Utility.resetLocationStatus(this);
SunshineSyncAdapter.syncImmediately(this);
}
}else{
super.onActivityResult(requestCode, resultCode, data);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.log4j;
import org.apache.log4j.spi.LoggerRepository;
import org.apache.log4j.spi.LoggerFactory;
import org.apache.log4j.spi.RepositorySelector;
import org.apache.log4j.spi.DefaultRepositorySelector;
import org.apache.log4j.spi.RootLogger;
import org.apache.log4j.spi.NOPLoggerRepository;
import org.apache.log4j.helpers.Loader;
import org.apache.log4j.helpers.OptionConverter;
import org.apache.log4j.helpers.LogLog;
import java.net.URL;
import java.net.MalformedURLException;
import java.util.Enumeration;
import java.io.StringWriter;
import java.io.PrintWriter;
/**
* Use the <code>LogManager</code> class to retreive {@link Logger}
* instances or to operate on the current {@link
* LoggerRepository}. When the <code>LogManager</code> class is loaded
* into memory the default initalzation procedure is inititated. The
* default intialization procedure</a> is described in the <a
* href="../../../../manual.html#defaultInit">short log4j manual</a>.
*
* @author Ceki Gülcü */
public class LogManager {
/**
* @deprecated This variable is for internal use only. It will
* become package protected in future versions.
* */
static public final String DEFAULT_CONFIGURATION_FILE = "log4j.properties";
static final String DEFAULT_XML_CONFIGURATION_FILE = "log4j.xml";
/**
* @deprecated This variable is for internal use only. It will
* become private in future versions.
* */
static final public String DEFAULT_CONFIGURATION_KEY="log4j.configuration";
/**
* @deprecated This variable is for internal use only. It will
* become private in future versions.
* */
static final public String CONFIGURATOR_CLASS_KEY="log4j.configuratorClass";
/**
* @deprecated This variable is for internal use only. It will
* become private in future versions.
*/
public static final String DEFAULT_INIT_OVERRIDE_KEY =
"log4j.defaultInitOverride";
static private Object guard = null;
static private RepositorySelector repositorySelector;
static {
// By default we use a DefaultRepositorySelector which always returns 'h'.
Hierarchy h = new Hierarchy(new RootLogger((Level) Level.DEBUG));
repositorySelector = new DefaultRepositorySelector(h);
/** Search for the properties file log4j.properties in the CLASSPATH. */
String override =OptionConverter.getSystemProperty(DEFAULT_INIT_OVERRIDE_KEY,
null);
// if there is no default init override, then get the resource
// specified by the user or the default config file.
if(override == null || "false".equalsIgnoreCase(override)) {
String configurationOptionStr = OptionConverter.getSystemProperty(
DEFAULT_CONFIGURATION_KEY,
null);
String configuratorClassName = OptionConverter.getSystemProperty(
CONFIGURATOR_CLASS_KEY,
null);
URL url = null;
// if the user has not specified the log4j.configuration
// property, we search first for the file "log4j.xml" and then
// "log4j.properties"
if(configurationOptionStr == null) {
url = Loader.getResource(DEFAULT_XML_CONFIGURATION_FILE);
if(url == null) {
url = Loader.getResource(DEFAULT_CONFIGURATION_FILE);
}
} else {
try {
url = new URL(configurationOptionStr);
} catch (MalformedURLException ex) {
// so, resource is not a URL:
// attempt to get the resource from the class path
url = Loader.getResource(configurationOptionStr);
}
}
// If we have a non-null url, then delegate the rest of the
// configuration to the OptionConverter.selectAndConfigure
// method.
if(url != null) {
LogLog.debug("Using URL ["+url+"] for automatic log4j configuration.");
try {
// OptionConverter.selectAndConfigure(url, configuratorClassName,
// LogManager.getLoggerRepository());
} catch (NoClassDefFoundError e) {
LogLog.warn("Error during default initialization", e);
}
} else {
LogLog.debug("Could not find resource: ["+configurationOptionStr+"].");
}
} else {
LogLog.debug("Default initialization of overridden by " +
DEFAULT_INIT_OVERRIDE_KEY + "property.");
}
}
/**
Sets <code>LoggerFactory</code> but only if the correct
<em>guard</em> is passed as parameter.
<p>Initally the guard is null. If the guard is
<code>null</code>, then invoking this method sets the logger
factory and the guard. Following invocations will throw a {@link
IllegalArgumentException}, unless the previously set
<code>guard</code> is passed as the second parameter.
<p>This allows a high-level component to set the {@link
RepositorySelector} used by the <code>LogManager</code>.
<p>For example, when tomcat starts it will be able to install its
own repository selector. However, if and when Tomcat is embedded
within JBoss, then JBoss will install its own repository selector
and Tomcat will use the repository selector set by its container,
JBoss. */
static
public
void setRepositorySelector(RepositorySelector selector, Object guard)
throws IllegalArgumentException {
if((LogManager.guard != null) && (LogManager.guard != guard)) {
throw new IllegalArgumentException(
"Attempted to reset the LoggerFactory without possessing the guard.");
}
if(selector == null) {
throw new IllegalArgumentException("RepositorySelector must be non-null.");
}
LogManager.guard = guard;
LogManager.repositorySelector = selector;
}
/**
* This method tests if called from a method that
* is known to result in class members being abnormally
* set to null but is assumed to be harmless since the
* all classes are in the process of being unloaded.
*
* @param ex exception used to determine calling stack.
* @return true if calling stack is recognized as likely safe.
*/
private static boolean isLikelySafeScenario(final Exception ex) {
StringWriter stringWriter = new StringWriter();
ex.printStackTrace(new PrintWriter(stringWriter));
String msg = stringWriter.toString();
return msg.indexOf("org.apache.catalina.loader.WebappClassLoader.stop") != -1;
}
static
public
LoggerRepository getLoggerRepository() {
if (repositorySelector == null) {
repositorySelector = new DefaultRepositorySelector(new NOPLoggerRepository());
guard = null;
Exception ex = new IllegalStateException("Class invariant violation");
String msg =
"log4j called after unloading, see http://logging.apache.org/log4j/1.2/faq.html#unload.";
if (isLikelySafeScenario(ex)) {
LogLog.debug(msg, ex);
} else {
LogLog.error(msg, ex);
}
}
return repositorySelector.getLoggerRepository();
}
/**
Retrieve the appropriate root logger.
*/
public
static
Logger getRootLogger() {
// Delegate the actual manufacturing of the logger to the logger repository.
return getLoggerRepository().getRootLogger();
}
/**
Retrieve the appropriate {@link Logger} instance.
*/
public
static
Logger getLogger(final String name) {
// Delegate the actual manufacturing of the logger to the logger repository.
return getLoggerRepository().getLogger(name);
}
/**
Retrieve the appropriate {@link Logger} instance.
*/
public
static
Logger getLogger(final Class clazz) {
// Delegate the actual manufacturing of the logger to the logger repository.
return getLoggerRepository().getLogger(clazz.getName());
}
/**
Retrieve the appropriate {@link Logger} instance.
*/
public
static
Logger getLogger(final String name, final LoggerFactory factory) {
// Delegate the actual manufacturing of the logger to the logger repository.
return getLoggerRepository().getLogger(name, factory);
}
public
static
Logger exists(final String name) {
return getLoggerRepository().exists(name);
}
public
static
Enumeration getCurrentLoggers() {
return getLoggerRepository().getCurrentLoggers();
}
public
static
void shutdown() {
getLoggerRepository().shutdown();
}
public
static
void resetConfiguration() {
getLoggerRepository().resetConfiguration();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.functions.aggfunctions;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.table.data.DecimalData;
import org.apache.flink.table.data.GenericRowData;
import org.apache.flink.table.data.StringData;
import org.apache.flink.table.data.binary.BinaryStringData;
import org.apache.flink.table.functions.AggregateFunction;
import org.apache.flink.table.runtime.typeutils.DecimalDataTypeInfo;
import org.apache.flink.table.runtime.typeutils.RowDataTypeInfo;
import org.apache.flink.table.runtime.typeutils.StringDataTypeInfo;
import org.apache.flink.table.types.logical.BigIntType;
import org.apache.flink.table.types.logical.LogicalType;
import static org.apache.flink.table.runtime.types.TypeInfoLogicalTypeConverter.fromTypeInfoToLogicalType;
/**
* built-in FirstValue aggregate function.
*/
public abstract class FirstValueAggFunction<T> extends AggregateFunction<T, GenericRowData> {
@Override
public boolean isDeterministic() {
return false;
}
@Override
public GenericRowData createAccumulator() {
// The accumulator schema:
// firstValue: T
// firstOrder: Long
GenericRowData acc = new GenericRowData(2);
acc.setField(0, null);
acc.setField(1, Long.MAX_VALUE);
return acc;
}
public void accumulate(GenericRowData acc, Object value) {
if (value != null && acc.getLong(1) == Long.MAX_VALUE) {
acc.setField(0, value);
acc.setField(1, System.currentTimeMillis());
}
}
public void accumulate(GenericRowData acc, Object value, Long order) {
if (value != null && acc.getLong(1) > order) {
acc.setField(0, value);
acc.setField(1, order);
}
}
public void resetAccumulator(GenericRowData acc) {
acc.setField(0, null);
acc.setField(1, Long.MAX_VALUE);
}
@Override
public T getValue(GenericRowData acc) {
return (T) acc.getField(0);
}
@Override
public TypeInformation<GenericRowData> getAccumulatorType() {
LogicalType[] fieldTypes = new LogicalType[] {
fromTypeInfoToLogicalType(getResultType()),
new BigIntType()
};
String[] fieldNames = new String[] {
"value",
"time"
};
return (TypeInformation) new RowDataTypeInfo(fieldTypes, fieldNames);
}
/**
* Built-in Byte FirstValue aggregate function.
*/
public static class ByteFirstValueAggFunction extends FirstValueAggFunction<Byte> {
@Override
public TypeInformation<Byte> getResultType() {
return Types.BYTE;
}
}
/**
* Built-in Short FirstValue aggregate function.
*/
public static class ShortFirstValueAggFunction extends FirstValueAggFunction<Short> {
@Override
public TypeInformation<Short> getResultType() {
return Types.SHORT;
}
}
/**
* Built-in Int FirstValue aggregate function.
*/
public static class IntFirstValueAggFunction extends FirstValueAggFunction<Integer> {
@Override
public TypeInformation<Integer> getResultType() {
return Types.INT;
}
}
/**
* Built-in Long FirstValue aggregate function.
*/
public static class LongFirstValueAggFunction extends FirstValueAggFunction<Long> {
@Override
public TypeInformation<Long> getResultType() {
return Types.LONG;
}
}
/**
* Built-in Float FirstValue aggregate function.
*/
public static class FloatFirstValueAggFunction extends FirstValueAggFunction<Float> {
@Override
public TypeInformation<Float> getResultType() {
return Types.FLOAT;
}
}
/**
* Built-in Double FirstValue aggregate function.
*/
public static class DoubleFirstValueAggFunction extends FirstValueAggFunction<Double> {
@Override
public TypeInformation<Double> getResultType() {
return Types.DOUBLE;
}
}
/**
* Built-in Boolean FirstValue aggregate function.
*/
public static class BooleanFirstValueAggFunction extends FirstValueAggFunction<Boolean> {
@Override
public TypeInformation<Boolean> getResultType() {
return Types.BOOLEAN;
}
}
/**
* Built-in DecimalData FirstValue aggregate function.
*/
public static class DecimalFirstValueAggFunction extends FirstValueAggFunction<DecimalData> {
private DecimalDataTypeInfo decimalTypeInfo;
public DecimalFirstValueAggFunction(DecimalDataTypeInfo decimalTypeInfo) {
this.decimalTypeInfo = decimalTypeInfo;
}
public void accumulate(GenericRowData acc, DecimalData value) {
super.accumulate(acc, value);
}
public void accumulate(GenericRowData acc, DecimalData value, Long order) {
super.accumulate(acc, value, order);
}
@Override
public TypeInformation<DecimalData> getResultType() {
return decimalTypeInfo;
}
}
/**
* Built-in String FirstValue aggregate function.
*/
public static class StringFirstValueAggFunction extends FirstValueAggFunction<StringData> {
@Override
public TypeInformation<StringData> getResultType() {
return StringDataTypeInfo.INSTANCE;
}
public void accumulate(GenericRowData acc, StringData value) {
if (value != null) {
super.accumulate(acc, ((BinaryStringData) value).copy());
}
}
public void accumulate(GenericRowData acc, StringData value, Long order) {
// just ignore nulls values and orders
if (value != null) {
super.accumulate(acc, ((BinaryStringData) value).copy(), order);
}
}
}
}
| |
/*
* Copyright 2014 - 2019 Michael Rapp
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package de.mrapp.android.preference.activity.animation;
import android.animation.ObjectAnimator;
import android.view.View;
import android.view.animation.AccelerateDecelerateInterpolator;
import androidx.annotation.NonNull;
import androidx.core.view.ViewCompat;
import androidx.recyclerview.widget.RecyclerView;
import de.mrapp.util.Condition;
import de.mrapp.util.datastructure.ListenerList;
/**
* A scroll listener, which allows to animate a view to become hidden or shown depending on the
* observed list view's scrolling direction.
*
* @author Michael Rapp
* @since 2.0.0
*/
public class HideViewOnScrollAnimation extends RecyclerView.OnScrollListener {
/**
* Contains all possible directions, which can be used to translate the animated view in order
* hide it.
*/
public enum Direction {
/**
* If the view should be translated upwards.
*/
UP,
/**
* If the view should be translated downwards.
*/
DOWN
}
/**
* The default duration of the animation, which is used to show or hide the view, in
* milliseconds.
*/
private static final long DEFAULT_ANIMATION_DURATION = 300L;
/**
* The view, which is animated by the listener.
*/
private final View animatedView;
/**
* The direction, which is used to translate the view in order to hide it.
*/
private final Direction direction;
/**
* The duration of the animation, which is used to show or hide the view, in milliseconds.
*/
private final long animationDuration;
/**
* True, if the observed recycler view was scrolling up, when the listener was called the last
* time.
*/
private Boolean scrollingUp;
/**
* True, if the animated view is currently hidden, false otherwise.
*/
private boolean hidden;
/**
* The initial position of the view, which is animated by the listener.
*/
private float initialPosition = -1.0f;
/**
* A set, which contains the listeners, which should be notified about the animation's internal
* state.
*/
private ListenerList<HideViewOnScrollAnimationListener> listeners;
/**
* Notifies all listeners, which have been registered to be notified about the animation's
* internal state, when the observed list view is scrolling downwards.
*
* @param animatedView
* The view, which is animated by the observed animation, as an instance of the class
* {@link View}
* @param scrollPosition
* The current scroll position of the list view's first item in pixels as an {@link
* Integer} value
*/
private void notifyOnScrollingDown(@NonNull final View animatedView, final int scrollPosition) {
for (HideViewOnScrollAnimationListener listener : listeners) {
listener.onScrollingDown(this, animatedView, scrollPosition);
}
}
/**
* Notifies all listeners, which have been registered to be notified about the animation's
* internal state, when the observed list view is scrolling upwards.
*
* @param animatedView
* The view, which is animated by the observed animation, as an instance of the class
* {@link View}
* @param scrollPosition
* The current scroll position of the list view's first item in pixels as an {@link
* Integer} value
*/
private void notifyOnScrollingUp(@NonNull final View animatedView, final int scrollPosition) {
for (HideViewOnScrollAnimationListener listener : listeners) {
listener.onScrollingUp(this, animatedView, scrollPosition);
}
}
/**
* The method, which is invoked, when the observed list view is scrolling upwards.
*/
private void onScrollingUp() {
if (hidden) {
hidden = false;
if (animatedView.getAnimation() == null) {
ObjectAnimator animator = createAnimator(false);
animator.start();
}
}
}
/**
* The method, which is invoked, when the observed list view is scrolling downwards.
*/
private void onScrollingDown() {
if (!hidden) {
hidden = true;
if (animatedView.getAnimation() == null) {
ObjectAnimator animator = createAnimator(true);
animator.start();
}
}
}
/**
* Creates and returns an animator, which allows to translate the animated view to become shown
* or hidden.
*
* @param hide
* True, if the view should become hidden, false otherwise
* @return The animator, which has been created, as an instance of the class {@link
* ObjectAnimator}
*/
private ObjectAnimator createAnimator(final boolean hide) {
if (initialPosition == -1.0f) {
initialPosition = animatedView.getY();
}
float targetPosition = hide ? initialPosition - animatedView.getHeight() : initialPosition;
if (direction == Direction.DOWN) {
targetPosition = hide ? initialPosition + animatedView.getHeight() : initialPosition;
}
ObjectAnimator animation =
ObjectAnimator.ofFloat(animatedView, "y", animatedView.getY(), targetPosition);
animation.setInterpolator(new AccelerateDecelerateInterpolator());
animation.setDuration(animationDuration);
return animation;
}
/**
* Creates a new scroll listener, which allows to animate a view to become hidden or shown
* depending on the observed list view's scrolling direction.
*
* @param view
* The view, which should be animated by the listener, as an instance of the class
* {@link View}. The view may not be null
* @param direction
* The direction, which should be be used to translate the view in order to hide it, as
* a value of the enum {@link Direction}. The direction may either be <code>UP</code>
* or
* <code>DOWN</code>
*/
public HideViewOnScrollAnimation(@NonNull final View view, @NonNull final Direction direction) {
this(view, direction, DEFAULT_ANIMATION_DURATION);
}
/**
* Creates a new scroll listener, which allows to animate a view to become hidden or shown
* depending on the observed list view's scrolling direction.
*
* @param view
* The view, which should be animated by the listener, as an instance of the class
* {@link View}. The view may not be null
* @param direction
* The direction, which should be be used to translate the view in order to hide it, as
* a value of the enum {@link Direction}. The direction may either be <code>UP</code>
* or
* <code>DOWN</code>
* @param animationDuration
* The duration of the animation, which is used to show or hide the view, in
* milliseconds as a {@link Long} value. The duration must be greater than 0
*/
public HideViewOnScrollAnimation(@NonNull final View view, @NonNull final Direction direction,
final long animationDuration) {
Condition.INSTANCE.ensureNotNull(view, "The view may not be null");
Condition.INSTANCE.ensureNotNull(direction, "The direction may not be null");
Condition.INSTANCE.ensureGreater(animationDuration, 0,
"The animation duration must be greater than 0");
this.animatedView = view;
this.direction = direction;
this.animationDuration = animationDuration;
this.listeners = new ListenerList<>();
}
/**
* Shows the view.
*/
public final void showView() {
if (animatedView.getAnimation() != null) {
animatedView.getAnimation().cancel();
}
ObjectAnimator animator = createAnimator(false);
animator.start();
}
/**
* Hides the view.
*/
public final void hideView() {
if (animatedView.getAnimation() != null) {
animatedView.getAnimation().cancel();
}
ObjectAnimator animator = createAnimator(true);
animator.start();
}
/**
* Returns the view, which is animated by the listener.
*
* @return The view, which is animated by the listener as an instance of the class {@link View}
*/
public final View getView() {
return animatedView;
}
/**
* Returns the direction, which should be be used to translate the view in order to hide it.
*
* @return The direction, which should be be used to translate the view in order to hide it, as
* a value of the enum {@link Direction}. The direction may either be <code>UP</code> or
* <code>DOWN</code>
*/
public final Direction getDirection() {
return direction;
}
/**
* Returns the duration of the animation, which is used to show or hide the view.
*
* @return The duration of the animation, which is used to show or hide the view, in
* milliseconds as a {@link Long} value
*/
public final long getAnimationDuration() {
return animationDuration;
}
/**
* Adds a new listener, which should be notified about the animation's internal state, to the
* animation.
*
* @param listener
* The listener, which should be added, as an instance of the type {@link
* HideViewOnScrollAnimationListener}. The listener may not be null
*/
public final void addListener(@NonNull final HideViewOnScrollAnimationListener listener) {
Condition.INSTANCE.ensureNotNull(listener, "The listener may not be null");
listeners.add(listener);
}
/**
* Removes a specific listener, which should not be notified about the animation's internal
* state, from the animation.
*
* @param listener
* The listener, which should be removed, as an instance of the tpye {@link
* HideViewOnScrollAnimationListener}. The listener may not be null
*/
public final void removeListener(@NonNull final HideViewOnScrollAnimationListener listener) {
Condition.INSTANCE.ensureNotNull(listener, "The listener may not be null");
listeners.remove(listener);
}
@Override
public final void onScrolled(final RecyclerView recyclerView, final int dx, final int dy) {
if (ViewCompat.isLaidOut(animatedView)) {
boolean isScrollingUp = dy < 0;
if (this.scrollingUp == null || this.scrollingUp != isScrollingUp) {
this.scrollingUp = isScrollingUp;
if (scrollingUp) {
onScrollingUp();
notifyOnScrollingUp(animatedView, dy);
} else {
onScrollingDown();
notifyOnScrollingDown(animatedView, dy);
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
// Generated from protobuf
package org.apache.drill.exec.proto.beans;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.List;
import com.dyuproject.protostuff.GraphIOUtil;
import com.dyuproject.protostuff.Input;
import com.dyuproject.protostuff.Message;
import com.dyuproject.protostuff.Output;
import com.dyuproject.protostuff.Schema;
public final class BitToUserHandshake implements Externalizable, Message<BitToUserHandshake>, Schema<BitToUserHandshake>
{
public static Schema<BitToUserHandshake> getSchema()
{
return DEFAULT_INSTANCE;
}
public static BitToUserHandshake getDefaultInstance()
{
return DEFAULT_INSTANCE;
}
static final BitToUserHandshake DEFAULT_INSTANCE = new BitToUserHandshake();
private int rpcVersion;
private HandshakeStatus status;
private String errorId;
private String errorMessage;
private RpcEndpointInfos serverInfos;
private List<String> authenticationMechanisms;
private List<RpcType> supportedMethods;
private Boolean encrypted;
private int maxWrappedSize;
public BitToUserHandshake()
{
}
// getters and setters
// rpcVersion
public int getRpcVersion()
{
return rpcVersion;
}
public BitToUserHandshake setRpcVersion(int rpcVersion)
{
this.rpcVersion = rpcVersion;
return this;
}
// status
public HandshakeStatus getStatus()
{
return status == null ? HandshakeStatus.SUCCESS : status;
}
public BitToUserHandshake setStatus(HandshakeStatus status)
{
this.status = status;
return this;
}
// errorId
public String getErrorId()
{
return errorId;
}
public BitToUserHandshake setErrorId(String errorId)
{
this.errorId = errorId;
return this;
}
// errorMessage
public String getErrorMessage()
{
return errorMessage;
}
public BitToUserHandshake setErrorMessage(String errorMessage)
{
this.errorMessage = errorMessage;
return this;
}
// serverInfos
public RpcEndpointInfos getServerInfos()
{
return serverInfos;
}
public BitToUserHandshake setServerInfos(RpcEndpointInfos serverInfos)
{
this.serverInfos = serverInfos;
return this;
}
// authenticationMechanisms
public List<String> getAuthenticationMechanismsList()
{
return authenticationMechanisms;
}
public BitToUserHandshake setAuthenticationMechanismsList(List<String> authenticationMechanisms)
{
this.authenticationMechanisms = authenticationMechanisms;
return this;
}
// supportedMethods
public List<RpcType> getSupportedMethodsList()
{
return supportedMethods;
}
public BitToUserHandshake setSupportedMethodsList(List<RpcType> supportedMethods)
{
this.supportedMethods = supportedMethods;
return this;
}
// encrypted
public Boolean getEncrypted()
{
return encrypted;
}
public BitToUserHandshake setEncrypted(Boolean encrypted)
{
this.encrypted = encrypted;
return this;
}
// maxWrappedSize
public int getMaxWrappedSize()
{
return maxWrappedSize;
}
public BitToUserHandshake setMaxWrappedSize(int maxWrappedSize)
{
this.maxWrappedSize = maxWrappedSize;
return this;
}
// java serialization
public void readExternal(ObjectInput in) throws IOException
{
GraphIOUtil.mergeDelimitedFrom(in, this, this);
}
public void writeExternal(ObjectOutput out) throws IOException
{
GraphIOUtil.writeDelimitedTo(out, this, this);
}
// message method
public Schema<BitToUserHandshake> cachedSchema()
{
return DEFAULT_INSTANCE;
}
// schema methods
public BitToUserHandshake newMessage()
{
return new BitToUserHandshake();
}
public Class<BitToUserHandshake> typeClass()
{
return BitToUserHandshake.class;
}
public String messageName()
{
return BitToUserHandshake.class.getSimpleName();
}
public String messageFullName()
{
return BitToUserHandshake.class.getName();
}
public boolean isInitialized(BitToUserHandshake message)
{
return true;
}
public void mergeFrom(Input input, BitToUserHandshake message) throws IOException
{
for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
{
switch(number)
{
case 0:
return;
case 2:
message.rpcVersion = input.readInt32();
break;
case 3:
message.status = HandshakeStatus.valueOf(input.readEnum());
break;
case 4:
message.errorId = input.readString();
break;
case 5:
message.errorMessage = input.readString();
break;
case 6:
message.serverInfos = input.mergeObject(message.serverInfos, RpcEndpointInfos.getSchema());
break;
case 7:
if(message.authenticationMechanisms == null)
message.authenticationMechanisms = new ArrayList<String>();
message.authenticationMechanisms.add(input.readString());
break;
case 8:
if(message.supportedMethods == null)
message.supportedMethods = new ArrayList<RpcType>();
message.supportedMethods.add(RpcType.valueOf(input.readEnum()));
break;
case 9:
message.encrypted = input.readBool();
break;
case 10:
message.maxWrappedSize = input.readInt32();
break;
default:
input.handleUnknownField(number, this);
}
}
}
public void writeTo(Output output, BitToUserHandshake message) throws IOException
{
if(message.rpcVersion != 0)
output.writeInt32(2, message.rpcVersion, false);
if(message.status != null)
output.writeEnum(3, message.status.number, false);
if(message.errorId != null)
output.writeString(4, message.errorId, false);
if(message.errorMessage != null)
output.writeString(5, message.errorMessage, false);
if(message.serverInfos != null)
output.writeObject(6, message.serverInfos, RpcEndpointInfos.getSchema(), false);
if(message.authenticationMechanisms != null)
{
for(String authenticationMechanisms : message.authenticationMechanisms)
{
if(authenticationMechanisms != null)
output.writeString(7, authenticationMechanisms, true);
}
}
if(message.supportedMethods != null)
{
for(RpcType supportedMethods : message.supportedMethods)
{
if(supportedMethods != null)
output.writeEnum(8, supportedMethods.number, true);
}
}
if(message.encrypted != null)
output.writeBool(9, message.encrypted, false);
if(message.maxWrappedSize != 0)
output.writeInt32(10, message.maxWrappedSize, false);
}
public String getFieldName(int number)
{
switch(number)
{
case 2: return "rpcVersion";
case 3: return "status";
case 4: return "errorId";
case 5: return "errorMessage";
case 6: return "serverInfos";
case 7: return "authenticationMechanisms";
case 8: return "supportedMethods";
case 9: return "encrypted";
case 10: return "maxWrappedSize";
default: return null;
}
}
public int getFieldNumber(String name)
{
final Integer number = __fieldMap.get(name);
return number == null ? 0 : number.intValue();
}
private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
static
{
__fieldMap.put("rpcVersion", 2);
__fieldMap.put("status", 3);
__fieldMap.put("errorId", 4);
__fieldMap.put("errorMessage", 5);
__fieldMap.put("serverInfos", 6);
__fieldMap.put("authenticationMechanisms", 7);
__fieldMap.put("supportedMethods", 8);
__fieldMap.put("encrypted", 9);
__fieldMap.put("maxWrappedSize", 10);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.rest.test;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
import org.elasticsearch.test.rest.parser.DoSectionParser;
import org.elasticsearch.test.rest.parser.RestTestParseException;
import org.elasticsearch.test.rest.parser.RestTestSuiteParseContext;
import org.elasticsearch.test.rest.section.ApiCallSection;
import org.elasticsearch.test.rest.section.DoSection;
import org.hamcrest.MatcherAssert;
import java.io.IOException;
import java.util.Map;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class DoSectionParserTests extends AbstractParserTestCase {
public void testParseDoSectionNoBody() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
"get:\n" +
" index: test_index\n" +
" type: test_type\n" +
" id: 1"
);
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
ApiCallSection apiCallSection = doSection.getApiCallSection();
assertThat(apiCallSection, notNullValue());
assertThat(apiCallSection.getApi(), equalTo("get"));
assertThat(apiCallSection.getParams().size(), equalTo(3));
assertThat(apiCallSection.getParams().get("index"), equalTo("test_index"));
assertThat(apiCallSection.getParams().get("type"), equalTo("test_type"));
assertThat(apiCallSection.getParams().get("id"), equalTo("1"));
assertThat(apiCallSection.hasBody(), equalTo(false));
}
public void testParseDoSectionNoParamsNoBody() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
"cluster.node_info: {}"
);
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
ApiCallSection apiCallSection = doSection.getApiCallSection();
assertThat(apiCallSection, notNullValue());
assertThat(apiCallSection.getApi(), equalTo("cluster.node_info"));
assertThat(apiCallSection.getParams().size(), equalTo(0));
assertThat(apiCallSection.hasBody(), equalTo(false));
}
public void testParseDoSectionWithJsonBody() throws Exception {
String body = "{ \"include\": { \"field1\": \"v1\", \"field2\": \"v2\" }, \"count\": 1 }";
parser = YamlXContent.yamlXContent.createParser(
"index:\n" +
" index: test_1\n" +
" type: test\n" +
" id: 1\n" +
" body: " + body
);
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
ApiCallSection apiCallSection = doSection.getApiCallSection();
assertThat(apiCallSection, notNullValue());
assertThat(apiCallSection.getApi(), equalTo("index"));
assertThat(apiCallSection.getParams().size(), equalTo(3));
assertThat(apiCallSection.getParams().get("index"), equalTo("test_1"));
assertThat(apiCallSection.getParams().get("type"), equalTo("test"));
assertThat(apiCallSection.getParams().get("id"), equalTo("1"));
assertThat(apiCallSection.hasBody(), equalTo(true));
assertJsonEquals(apiCallSection.getBodies().get(0), body);
}
public void testParseDoSectionWithJsonMultipleBodiesAsLongString() throws Exception {
String bodies[] = new String[]{
"{ \"index\": { \"_index\":\"test_index\", \"_type\":\"test_type\", \"_id\":\"test_id\" } }\n",
"{ \"f1\":\"v1\", \"f2\":42 }\n",
"{ \"index\": { \"_index\":\"test_index2\", \"_type\":\"test_type2\", \"_id\":\"test_id2\" } }\n",
"{ \"f1\":\"v2\", \"f2\":47 }\n"
};
parser = YamlXContent.yamlXContent.createParser(
"bulk:\n" +
" refresh: true\n" +
" body: |\n" +
" " + bodies[0] +
" " + bodies[1] +
" " + bodies[2] +
" " + bodies[3]
);
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
ApiCallSection apiCallSection = doSection.getApiCallSection();
assertThat(apiCallSection, notNullValue());
assertThat(apiCallSection.getApi(), equalTo("bulk"));
assertThat(apiCallSection.getParams().size(), equalTo(1));
assertThat(apiCallSection.getParams().get("refresh"), equalTo("true"));
assertThat(apiCallSection.hasBody(), equalTo(true));
assertThat(apiCallSection.getBodies().size(), equalTo(4));
}
public void testParseDoSectionWithJsonMultipleBodiesRepeatedProperty() throws Exception {
String[] bodies = new String[] {
"{ \"index\": { \"_index\":\"test_index\", \"_type\":\"test_type\", \"_id\":\"test_id\" } }",
"{ \"f1\":\"v1\", \"f2\":42 }",
};
parser = YamlXContent.yamlXContent.createParser(
"bulk:\n" +
" refresh: true\n" +
" body: \n" +
" " + bodies[0] + "\n" +
" body: \n" +
" " + bodies[1]
);
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
ApiCallSection apiCallSection = doSection.getApiCallSection();
assertThat(apiCallSection, notNullValue());
assertThat(apiCallSection.getApi(), equalTo("bulk"));
assertThat(apiCallSection.getParams().size(), equalTo(1));
assertThat(apiCallSection.getParams().get("refresh"), equalTo("true"));
assertThat(apiCallSection.hasBody(), equalTo(true));
assertThat(apiCallSection.getBodies().size(), equalTo(bodies.length));
for (int i = 0; i < bodies.length; i++) {
assertJsonEquals(apiCallSection.getBodies().get(i), bodies[i]);
}
}
public void testParseDoSectionWithYamlBody() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
"search:\n" +
" body:\n" +
" _source: [ include.field1, include.field2 ]\n" +
" query: { match_all: {} }"
);
String body = "{ \"_source\": [ \"include.field1\", \"include.field2\" ], \"query\": { \"match_all\": {} }}";
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
ApiCallSection apiCallSection = doSection.getApiCallSection();
assertThat(apiCallSection, notNullValue());
assertThat(apiCallSection.getApi(), equalTo("search"));
assertThat(apiCallSection.getParams().size(), equalTo(0));
assertThat(apiCallSection.hasBody(), equalTo(true));
assertThat(apiCallSection.getBodies().size(), equalTo(1));
assertJsonEquals(apiCallSection.getBodies().get(0), body);
}
public void testParseDoSectionWithYamlMultipleBodies() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
"bulk:\n" +
" refresh: true\n" +
" body:\n" +
" - index:\n" +
" _index: test_index\n" +
" _type: test_type\n" +
" _id: test_id\n" +
" - f1: v1\n" +
" f2: 42\n" +
" - index:\n" +
" _index: test_index2\n" +
" _type: test_type2\n" +
" _id: test_id2\n" +
" - f1: v2\n" +
" f2: 47"
);
String[] bodies = new String[4];
bodies[0] = "{\"index\": {\"_index\": \"test_index\", \"_type\": \"test_type\", \"_id\": \"test_id\"}}";
bodies[1] = "{ \"f1\":\"v1\", \"f2\": 42 }";
bodies[2] = "{\"index\": {\"_index\": \"test_index2\", \"_type\": \"test_type2\", \"_id\": \"test_id2\"}}";
bodies[3] = "{ \"f1\":\"v2\", \"f2\": 47 }";
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
ApiCallSection apiCallSection = doSection.getApiCallSection();
assertThat(apiCallSection, notNullValue());
assertThat(apiCallSection.getApi(), equalTo("bulk"));
assertThat(apiCallSection.getParams().size(), equalTo(1));
assertThat(apiCallSection.getParams().get("refresh"), equalTo("true"));
assertThat(apiCallSection.hasBody(), equalTo(true));
assertThat(apiCallSection.getBodies().size(), equalTo(bodies.length));
for (int i = 0; i < bodies.length; i++) {
assertJsonEquals(apiCallSection.getBodies().get(i), bodies[i]);
}
}
public void testParseDoSectionWithYamlMultipleBodiesRepeatedProperty() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
"bulk:\n" +
" refresh: true\n" +
" body:\n" +
" index:\n" +
" _index: test_index\n" +
" _type: test_type\n" +
" _id: test_id\n" +
" body:\n" +
" f1: v1\n" +
" f2: 42\n"
);
String[] bodies = new String[2];
bodies[0] = "{\"index\": {\"_index\": \"test_index\", \"_type\": \"test_type\", \"_id\": \"test_id\"}}";
bodies[1] = "{ \"f1\":\"v1\", \"f2\": 42 }";
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
ApiCallSection apiCallSection = doSection.getApiCallSection();
assertThat(apiCallSection, notNullValue());
assertThat(apiCallSection.getApi(), equalTo("bulk"));
assertThat(apiCallSection.getParams().size(), equalTo(1));
assertThat(apiCallSection.getParams().get("refresh"), equalTo("true"));
assertThat(apiCallSection.hasBody(), equalTo(true));
assertThat(apiCallSection.getBodies().size(), equalTo(bodies.length));
for (int i = 0; i < bodies.length; i++) {
assertJsonEquals(apiCallSection.getBodies().get(i), bodies[i]);
}
}
public void testParseDoSectionWithYamlBodyMultiGet() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
"mget:\n" +
" body:\n" +
" docs:\n" +
" - { _index: test_2, _type: test, _id: 1}\n" +
" - { _index: test_1, _type: none, _id: 1}"
);
String body = "{ \"docs\": [ " +
"{\"_index\": \"test_2\", \"_type\":\"test\", \"_id\":1}, " +
"{\"_index\": \"test_1\", \"_type\":\"none\", \"_id\":1} " +
"]}";
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
ApiCallSection apiCallSection = doSection.getApiCallSection();
assertThat(apiCallSection, notNullValue());
assertThat(apiCallSection.getApi(), equalTo("mget"));
assertThat(apiCallSection.getParams().size(), equalTo(0));
assertThat(apiCallSection.hasBody(), equalTo(true));
assertThat(apiCallSection.getBodies().size(), equalTo(1));
assertJsonEquals(apiCallSection.getBodies().get(0), body);
}
public void testParseDoSectionWithBodyStringified() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
"index:\n" +
" index: test_1\n" +
" type: test\n" +
" id: 1\n" +
" body: \"{ _source: true, query: { match_all: {} } }\""
);
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
ApiCallSection apiCallSection = doSection.getApiCallSection();
assertThat(apiCallSection, notNullValue());
assertThat(apiCallSection.getApi(), equalTo("index"));
assertThat(apiCallSection.getParams().size(), equalTo(3));
assertThat(apiCallSection.getParams().get("index"), equalTo("test_1"));
assertThat(apiCallSection.getParams().get("type"), equalTo("test"));
assertThat(apiCallSection.getParams().get("id"), equalTo("1"));
assertThat(apiCallSection.hasBody(), equalTo(true));
assertThat(apiCallSection.getBodies().size(), equalTo(1));
//stringified body is taken as is
assertJsonEquals(apiCallSection.getBodies().get(0), "{ _source: true, query: { match_all: {} } }");
}
public void testParseDoSectionWithBodiesStringifiedAndNot() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
"index:\n" +
" body:\n" +
" - \"{ _source: true, query: { match_all: {} } }\"\n" +
" - { size: 100, query: { match_all: {} } }"
);
String body = "{ \"size\": 100, \"query\": { \"match_all\": {} } }";
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
ApiCallSection apiCallSection = doSection.getApiCallSection();
assertThat(apiCallSection.getApi(), equalTo("index"));
assertThat(apiCallSection.getParams().size(), equalTo(0));
assertThat(apiCallSection.hasBody(), equalTo(true));
assertThat(apiCallSection.getBodies().size(), equalTo(2));
//stringified body is taken as is
assertJsonEquals(apiCallSection.getBodies().get(0), "{ _source: true, query: { match_all: {} } }");
assertJsonEquals(apiCallSection.getBodies().get(1), body);
}
public void testParseDoSectionWithCatch() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
"catch: missing\n" +
"indices.get_warmer:\n" +
" index: test_index\n" +
" name: test_warmer"
);
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
assertThat(doSection.getCatch(), equalTo("missing"));
assertThat(doSection.getApiCallSection(), notNullValue());
assertThat(doSection.getApiCallSection().getApi(), equalTo("indices.get_warmer"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(2));
assertThat(doSection.getApiCallSection().hasBody(), equalTo(false));
}
public void testParseDoSectionWithHeaders() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
"headers:\n" +
" Authorization: \"thing one\"\n" +
" Content-Type: \"application/json\"\n" +
"indices.get_warmer:\n" +
" index: test_index\n" +
" name: test_warmer"
);
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
assertThat(doSection.getApiCallSection(), notNullValue());
assertThat(doSection.getApiCallSection().getApi(), equalTo("indices.get_warmer"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(2));
assertThat(doSection.getApiCallSection().hasBody(), equalTo(false));
assertThat(doSection.getApiCallSection().getHeaders(), notNullValue());
assertThat(doSection.getApiCallSection().getHeaders().size(), equalTo(2));
assertThat(doSection.getApiCallSection().getHeaders().get("Authorization"), equalTo("thing one"));
assertThat(doSection.getApiCallSection().getHeaders().get("Content-Type"), equalTo("application/json"));
}
public void testParseDoSectionWithoutClientCallSection() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
"catch: missing\n"
);
DoSectionParser doSectionParser = new DoSectionParser();
try {
doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
fail("Expected RestTestParseException");
} catch (RestTestParseException e) {
assertThat(e.getMessage(), is("client call section is mandatory within a do section"));
}
}
public void testParseDoSectionMultivaluedField() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
"indices.get_field_mapping:\n" +
" index: test_index\n" +
" type: test_type\n" +
" field: [ text , text1 ]"
);
DoSectionParser doSectionParser = new DoSectionParser();
DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser));
assertThat(doSection.getCatch(), nullValue());
assertThat(doSection.getApiCallSection(), notNullValue());
assertThat(doSection.getApiCallSection().getApi(), equalTo("indices.get_field_mapping"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(3));
assertThat(doSection.getApiCallSection().getParams().get("index"), equalTo("test_index"));
assertThat(doSection.getApiCallSection().getParams().get("type"), equalTo("test_type"));
assertThat(doSection.getApiCallSection().getParams().get("field"), equalTo("text,text1"));
assertThat(doSection.getApiCallSection().hasBody(), equalTo(false));
assertThat(doSection.getApiCallSection().getBodies().size(), equalTo(0));
}
private static void assertJsonEquals(Map<String, Object> actual, String expected) throws IOException {
Map<String,Object> expectedMap;
try (XContentParser parser = JsonXContent.jsonXContent.createParser(expected)) {
expectedMap = parser.mapOrdered();
}
MatcherAssert.assertThat(actual, equalTo(expectedMap));
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.glue.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* A workflow graph represents the complete workflow containing all the Glue components present in the workflow and all
* the directed connections between them.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/WorkflowGraph" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class WorkflowGraph implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* A list of the the Glue components belong to the workflow represented as nodes.
* </p>
*/
private java.util.List<Node> nodes;
/**
* <p>
* A list of all the directed connections between the nodes belonging to the workflow.
* </p>
*/
private java.util.List<Edge> edges;
/**
* <p>
* A list of the the Glue components belong to the workflow represented as nodes.
* </p>
*
* @return A list of the the Glue components belong to the workflow represented as nodes.
*/
public java.util.List<Node> getNodes() {
return nodes;
}
/**
* <p>
* A list of the the Glue components belong to the workflow represented as nodes.
* </p>
*
* @param nodes
* A list of the the Glue components belong to the workflow represented as nodes.
*/
public void setNodes(java.util.Collection<Node> nodes) {
if (nodes == null) {
this.nodes = null;
return;
}
this.nodes = new java.util.ArrayList<Node>(nodes);
}
/**
* <p>
* A list of the the Glue components belong to the workflow represented as nodes.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setNodes(java.util.Collection)} or {@link #withNodes(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param nodes
* A list of the the Glue components belong to the workflow represented as nodes.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public WorkflowGraph withNodes(Node... nodes) {
if (this.nodes == null) {
setNodes(new java.util.ArrayList<Node>(nodes.length));
}
for (Node ele : nodes) {
this.nodes.add(ele);
}
return this;
}
/**
* <p>
* A list of the the Glue components belong to the workflow represented as nodes.
* </p>
*
* @param nodes
* A list of the the Glue components belong to the workflow represented as nodes.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public WorkflowGraph withNodes(java.util.Collection<Node> nodes) {
setNodes(nodes);
return this;
}
/**
* <p>
* A list of all the directed connections between the nodes belonging to the workflow.
* </p>
*
* @return A list of all the directed connections between the nodes belonging to the workflow.
*/
public java.util.List<Edge> getEdges() {
return edges;
}
/**
* <p>
* A list of all the directed connections between the nodes belonging to the workflow.
* </p>
*
* @param edges
* A list of all the directed connections between the nodes belonging to the workflow.
*/
public void setEdges(java.util.Collection<Edge> edges) {
if (edges == null) {
this.edges = null;
return;
}
this.edges = new java.util.ArrayList<Edge>(edges);
}
/**
* <p>
* A list of all the directed connections between the nodes belonging to the workflow.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setEdges(java.util.Collection)} or {@link #withEdges(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param edges
* A list of all the directed connections between the nodes belonging to the workflow.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public WorkflowGraph withEdges(Edge... edges) {
if (this.edges == null) {
setEdges(new java.util.ArrayList<Edge>(edges.length));
}
for (Edge ele : edges) {
this.edges.add(ele);
}
return this;
}
/**
* <p>
* A list of all the directed connections between the nodes belonging to the workflow.
* </p>
*
* @param edges
* A list of all the directed connections between the nodes belonging to the workflow.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public WorkflowGraph withEdges(java.util.Collection<Edge> edges) {
setEdges(edges);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getNodes() != null)
sb.append("Nodes: ").append(getNodes()).append(",");
if (getEdges() != null)
sb.append("Edges: ").append(getEdges());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof WorkflowGraph == false)
return false;
WorkflowGraph other = (WorkflowGraph) obj;
if (other.getNodes() == null ^ this.getNodes() == null)
return false;
if (other.getNodes() != null && other.getNodes().equals(this.getNodes()) == false)
return false;
if (other.getEdges() == null ^ this.getEdges() == null)
return false;
if (other.getEdges() != null && other.getEdges().equals(this.getEdges()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getNodes() == null) ? 0 : getNodes().hashCode());
hashCode = prime * hashCode + ((getEdges() == null) ? 0 : getEdges().hashCode());
return hashCode;
}
@Override
public WorkflowGraph clone() {
try {
return (WorkflowGraph) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.glue.model.transform.WorkflowGraphMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.query.ast;
import java.text.ParseException;
import java.util.ArrayList;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.PropertyValue;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.query.ast.ComparisonImpl.LikePattern;
import org.apache.jackrabbit.oak.query.index.FilterImpl;
import org.apache.jackrabbit.oak.spi.query.PropertyValues;
import static org.apache.jackrabbit.oak.api.Type.STRING;
import static org.apache.jackrabbit.oak.api.Type.STRINGS;
/**
* A fulltext "contains(...)" condition.
*/
public class FullTextSearchImpl extends ConstraintImpl {
private final String selectorName;
private final String propertyName;
private final StaticOperandImpl fullTextSearchExpression;
private SelectorImpl selector;
public FullTextSearchImpl(String selectorName, String propertyName,
StaticOperandImpl fullTextSearchExpression) {
this.selectorName = selectorName;
this.propertyName = propertyName;
this.fullTextSearchExpression = fullTextSearchExpression;
}
public StaticOperandImpl getFullTextSearchExpression() {
return fullTextSearchExpression;
}
@Override
boolean accept(AstVisitor v) {
return v.visit(this);
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("contains(");
builder.append(quote(selectorName));
if (propertyName != null) {
builder.append('.');
builder.append(quote(propertyName));
builder.append(", ");
} else {
builder.append(".*, ");
}
builder.append(getFullTextSearchExpression());
builder.append(')');
return builder.toString();
}
@Override
public boolean evaluate() {
StringBuilder buff = new StringBuilder();
if (propertyName != null) {
PropertyValue p = selector.currentProperty(propertyName);
if (p == null) {
return false;
}
appendString(buff, p);
} else {
Tree tree = getTree(selector.currentPath());
if (tree == null) {
return false;
}
for (PropertyState p : tree.getProperties()) {
appendString(buff, PropertyValues.create(p));
}
}
// TODO fulltext conditions: need a way to disable evaluation
// if a fulltext index is used, to avoid filtering too much
// (we don't know what exact options are used in the fulltext index)
// (stop word, special characters,...)
PropertyValue v = fullTextSearchExpression.currentValue();
try {
FullTextExpression expr = FullTextParser.parse(v.getValue(Type.STRING));
return expr.evaluate(buff.toString());
} catch (ParseException e) {
throw new IllegalArgumentException("Invalid expression: " + fullTextSearchExpression, e);
}
}
private static void appendString(StringBuilder buff, PropertyValue p) {
if (p.isArray()) {
for (String v : p.getValue(STRINGS)) {
buff.append(v).append(' ');
}
} else {
buff.append(p.getValue(STRING)).append(' ');
}
}
public void bindSelector(SourceImpl source) {
selector = source.getExistingSelector(selectorName);
}
@Override
public void restrict(FilterImpl f) {
if (propertyName != null) {
if (f.getSelector() == selector) {
f.restrictProperty(propertyName, Operator.NOT_EQUAL, null);
}
}
f.restrictFulltextCondition(fullTextSearchExpression.currentValue().getValue(Type.STRING));
}
@Override
public void restrictPushDown(SelectorImpl s) {
if (s == selector) {
selector.restrictSelector(this);
}
}
/**
* A parser for fulltext condition literals. The grammar is defined in the
* <a href="http://www.day.com/specs/jcr/2.0/6_Query.html#6.7.19">
* JCR 2.0 specification, 6.7.19 FullTextSearch</a>,
* as follows (a bit simplified):
* <pre>
* FullTextSearchLiteral ::= Disjunct {' OR ' Disjunct}
* Disjunct ::= Term {' ' Term}
* Term ::= ['-'] SimpleTerm
* SimpleTerm ::= Word | '"' Word {' ' Word} '"'
* </pre>
*/
public static class FullTextParser {
String text;
int parseIndex;
public static FullTextExpression parse(String text) throws ParseException {
FullTextParser p = new FullTextParser();
p.text = text;
FullTextExpression e = p.parseOr();
return e;
}
FullTextExpression parseOr() throws ParseException {
FullTextOr or = new FullTextOr();
or.list.add(parseAnd());
while (parseIndex < text.length()) {
if (text.substring(parseIndex).startsWith("OR ")) {
parseIndex += 3;
or.list.add(parseAnd());
} else {
break;
}
}
return or.simplify();
}
FullTextExpression parseAnd() throws ParseException {
FullTextAnd and = new FullTextAnd();
and.list.add(parseTerm());
while (parseIndex < text.length()) {
if (text.substring(parseIndex).startsWith("OR ")) {
break;
}
and.list.add(parseTerm());
}
return and.simplify();
}
FullTextExpression parseTerm() throws ParseException {
if (parseIndex >= text.length()) {
throw getSyntaxError("term");
}
boolean not = false;
StringBuilder buff = new StringBuilder();
char c = text.charAt(parseIndex);
if (c == '-') {
if (++parseIndex >= text.length()) {
throw getSyntaxError("term");
}
not = true;
}
boolean escaped = false;
if (c == '\"') {
parseIndex++;
while (true) {
if (parseIndex >= text.length()) {
throw getSyntaxError("double quote");
}
c = text.charAt(parseIndex++);
if (c == '\\') {
escaped = true;
if (parseIndex >= text.length()) {
throw getSyntaxError("escaped char");
}
c = text.charAt(parseIndex++);
buff.append(c);
} else if (c == '\"') {
if (parseIndex < text.length() && text.charAt(parseIndex) != ' ') {
throw getSyntaxError("space");
}
parseIndex++;
break;
} else {
buff.append(c);
}
}
} else {
do {
c = text.charAt(parseIndex++);
if (c == '\\') {
escaped = true;
if (parseIndex >= text.length()) {
throw getSyntaxError("escaped char");
}
c = text.charAt(parseIndex++);
buff.append(c);
} else if (c == ' ') {
break;
} else {
buff.append(c);
}
} while (parseIndex < text.length());
}
if (buff.length() == 0) {
throw getSyntaxError("term");
}
String text = buff.toString();
FullTextTerm term = new FullTextTerm(text, not, escaped);
return term.simplify();
}
private ParseException getSyntaxError(String expected) {
int index = Math.max(0, Math.min(parseIndex, text.length() - 1));
String query = text.substring(0, index) + "(*)" + text.substring(index).trim();
if (expected != null) {
query += "; expected: " + expected;
}
return new ParseException("FullText expression: " + query, index);
}
}
/**
* The base class for fulltext condition expression.
*/
public abstract static class FullTextExpression {
public abstract boolean evaluate(String value);
abstract FullTextExpression simplify();
}
/**
* A fulltext "and" condition.
*/
static class FullTextAnd extends FullTextExpression {
ArrayList<FullTextExpression> list = new ArrayList<FullTextExpression>();
@Override
public boolean evaluate(String value) {
for (FullTextExpression e : list) {
if (!e.evaluate(value)) {
return false;
}
}
return true;
}
@Override
FullTextExpression simplify() {
return list.size() == 1 ? list.get(0) : this;
}
@Override
public String toString() {
StringBuilder buff = new StringBuilder();
int i = 0;
for (FullTextExpression e : list) {
if (i++ > 0) {
buff.append(' ');
}
buff.append(e.toString());
}
return buff.toString();
}
}
/**
* A fulltext "or" condition.
*/
static class FullTextOr extends FullTextExpression {
ArrayList<FullTextExpression> list = new ArrayList<FullTextExpression>();
@Override
public boolean evaluate(String value) {
for (FullTextExpression e : list) {
if (e.evaluate(value)) {
return true;
}
}
return false;
}
@Override
FullTextExpression simplify() {
return list.size() == 1 ? list.get(0).simplify() : this;
}
@Override
public String toString() {
StringBuilder buff = new StringBuilder();
int i = 0;
for (FullTextExpression e : list) {
if (i++ > 0) {
buff.append(" OR ");
}
buff.append(e.toString());
}
return buff.toString();
}
}
/**
* A fulltext term, or a "not" term.
*/
static class FullTextTerm extends FullTextExpression {
private final boolean not;
private final String text;
private final String filteredText;
private final LikePattern like;
FullTextTerm(String text, boolean not, boolean escaped) {
this.text = text;
this.not = not;
// for testFulltextIntercapSQL
// filter special characters such as '
// to make tests pass, for example the
// FulltextQueryTest.testFulltextExcludeSQL,
// which searches for:
// "text ''fox jumps'' -other"
// (please note the two single quotes instead of
// double quotes before for and after jumps)
boolean pattern = false;
if (escaped) {
filteredText = text;
} else {
StringBuilder buff = new StringBuilder();
for (int i = 0; i < text.length(); i++) {
char c = text.charAt(i);
if (c == '*') {
buff.append('%');
pattern = true;
} else if (c == '?') {
buff.append('_');
pattern = true;
} else if (c == '_') {
buff.append("\\_");
pattern = true;
} else if (Character.isLetterOrDigit(c) || " +-:&".indexOf(c) >= 0) {
buff.append(c);
}
}
this.filteredText = buff.toString().toLowerCase();
}
if (pattern) {
like = new LikePattern("%" + filteredText + "%");
} else {
like = null;
}
}
@Override
public boolean evaluate(String value) {
// for testFulltextIntercapSQL
value = value.toLowerCase();
if (like != null) {
return like.matches(value);
}
if (not) {
return value.indexOf(filteredText) < 0;
}
return value.indexOf(filteredText) >= 0;
}
@Override
FullTextExpression simplify() {
return this;
}
@Override
public String toString() {
return (not ? "-" : "") + "\"" + text.replaceAll("\"", "\\\"") + "\"";
}
}
}
| |
package org.knowm.xchange.cexio;
import static org.knowm.xchange.utils.DateUtils.fromISODateString;
import com.fasterxml.jackson.databind.exc.InvalidFormatException;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.knowm.xchange.cexio.dto.account.CexIOBalance;
import org.knowm.xchange.cexio.dto.account.CexIOBalanceInfo;
import org.knowm.xchange.cexio.dto.account.CexIOFeeInfo.FeeDetails;
import org.knowm.xchange.cexio.dto.marketdata.CexIODepth;
import org.knowm.xchange.cexio.dto.marketdata.CexIOTicker;
import org.knowm.xchange.cexio.dto.marketdata.CexIOTrade;
import org.knowm.xchange.cexio.dto.trade.*;
import org.knowm.xchange.currency.Currency;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order;
import org.knowm.xchange.dto.Order.OrderType;
import org.knowm.xchange.dto.account.Balance;
import org.knowm.xchange.dto.account.Fee;
import org.knowm.xchange.dto.account.Wallet;
import org.knowm.xchange.dto.marketdata.OrderBook;
import org.knowm.xchange.dto.marketdata.Ticker;
import org.knowm.xchange.dto.marketdata.Trade;
import org.knowm.xchange.dto.marketdata.Trades;
import org.knowm.xchange.dto.marketdata.Trades.TradeSortType;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.OpenOrders;
import org.knowm.xchange.dto.trade.UserTrade;
import org.knowm.xchange.utils.DateUtils;
/** Author: brox Since: 2/6/14 */
public class CexIOAdapters {
private static final String ORDER_TYPE_BUY = "buy";
/**
* Adapts a CexIOTrade to a Trade Object
*
* @param trade CexIO trade object
* @param currencyPair trade currencies
* @return The XChange Trade
*/
public static Trade adaptTrade(CexIOTrade trade, CurrencyPair currencyPair) {
BigDecimal amount = trade.getAmount();
BigDecimal price = trade.getPrice();
Date date = DateUtils.fromMillisUtc(trade.getDate() * 1000L);
OrderType type = trade.getType().equals(ORDER_TYPE_BUY) ? OrderType.BID : OrderType.ASK;
return new Trade(type, amount, currencyPair, price, date, String.valueOf(trade.getTid()));
}
/**
* Adapts a CexIOTrade[] to a Trades Object
*
* @param cexioTrades The CexIO trade data returned by API
* @param currencyPair trade currencies
* @return The trades
*/
public static Trades adaptTrades(CexIOTrade[] cexioTrades, CurrencyPair currencyPair) {
List<Trade> tradesList = new ArrayList<>();
long lastTradeId = 0;
for (CexIOTrade trade : cexioTrades) {
long tradeId = trade.getTid();
if (tradeId > lastTradeId) {
lastTradeId = tradeId;
}
// Date is reversed order. Insert at index 0 instead of appending
tradesList.add(0, adaptTrade(trade, currencyPair));
}
return new Trades(tradesList, lastTradeId, TradeSortType.SortByID);
}
/**
* Adapts a CexIOTicker to a Ticker Object
*
* @param ticker The exchange specific ticker
* @return The ticker
*/
public static Ticker adaptTicker(CexIOTicker ticker) {
if (ticker.getPair() == null) {
throw new IllegalArgumentException("Missing currency pair in ticker: " + ticker);
}
return adaptTicker(ticker, adaptCurrencyPair(ticker.getPair()));
}
/**
* Adapts a CexIOTicker to a Ticker Object
*
* @param ticker The exchange specific ticker
* @param currencyPair The currency pair (e.g. BTC/USD)
* @return The ticker
*/
public static Ticker adaptTicker(CexIOTicker ticker, CurrencyPair currencyPair) {
BigDecimal last = ticker.getLast();
BigDecimal bid = ticker.getBid();
BigDecimal ask = ticker.getAsk();
BigDecimal high = ticker.getHigh();
BigDecimal low = ticker.getLow();
BigDecimal volume = ticker.getVolume();
Date timestamp = new Date(ticker.getTimestamp() * 1000L);
return new Ticker.Builder()
.currencyPair(currencyPair)
.last(last)
.bid(bid)
.ask(ask)
.high(high)
.low(low)
.volume(volume)
.timestamp(timestamp)
.build();
}
/**
* Adapts Cex.IO Depth to OrderBook Object
*
* @param depth Cex.IO order book
* @param currencyPair The currency pair (e.g. BTC/USD)
* @return The XChange OrderBook
*/
public static OrderBook adaptOrderBook(CexIODepth depth, CurrencyPair currencyPair) {
List<LimitOrder> asks = createOrders(currencyPair, OrderType.ASK, depth.getAsks());
List<LimitOrder> bids = createOrders(currencyPair, OrderType.BID, depth.getBids());
Date date = new Date(depth.getTimestamp() * 1000);
return new OrderBook(date, asks, bids);
}
/**
* Adapts CexIOBalanceInfo to Wallet
*
* @param cexIOBalanceInfo CexIOBalanceInfo balance
* @return The account info
*/
public static Wallet adaptWallet(CexIOBalanceInfo cexIOBalanceInfo) {
List<Balance> balances = new ArrayList<>();
for (String ccyName : cexIOBalanceInfo.getBalances().keySet()) {
CexIOBalance cexIOBalance = cexIOBalanceInfo.getBalances().get(ccyName);
balances.add(adaptBalance(Currency.getInstance(ccyName), cexIOBalance));
}
return Wallet.Builder.from(balances).build();
}
public static Balance adaptBalance(Currency currency, CexIOBalance balance) {
BigDecimal inOrders = balance.getOrders();
BigDecimal frozen = inOrders == null ? BigDecimal.ZERO : inOrders;
return new Balance(currency, null, balance.getAvailable(), frozen);
}
public static List<LimitOrder> createOrders(
CurrencyPair currencyPair, OrderType orderType, List<List<BigDecimal>> orders) {
List<LimitOrder> limitOrders = new ArrayList<>();
if (orders == null) return limitOrders;
for (List<BigDecimal> o : orders) {
checkArgument(
o.size() == 2, "Expected a pair (price, amount) but got {0} elements.", o.size());
limitOrders.add(createOrder(currencyPair, o, orderType));
}
return limitOrders;
}
public static LimitOrder createOrder(
CurrencyPair currencyPair, List<BigDecimal> priceAndAmount, OrderType orderType) {
return new LimitOrder(
orderType, priceAndAmount.get(1), currencyPair, "", null, priceAndAmount.get(0));
}
public static void checkArgument(boolean argument, String msgPattern, Object... msgArgs) {
if (!argument) {
throw new IllegalArgumentException(MessageFormat.format(msgPattern, msgArgs));
}
}
public static OpenOrders adaptOpenOrders(List<CexIOOrder> cexIOOrderList) {
List<LimitOrder> limitOrders = new ArrayList<>();
for (CexIOOrder cexIOOrder : cexIOOrderList) {
OrderType orderType =
cexIOOrder.getType() == CexIOOrder.Type.buy ? OrderType.BID : OrderType.ASK;
String id = Long.toString(cexIOOrder.getId());
limitOrders.add(
new LimitOrder(
orderType,
cexIOOrder.getAmount(),
cexIOOrder.getAmount().subtract(cexIOOrder.getPending()),
new CurrencyPair(cexIOOrder.getSymbol1(), cexIOOrder.getSymbol2()),
id,
DateUtils.fromMillisUtc(cexIOOrder.getTime()),
cexIOOrder.getPrice()));
}
return new OpenOrders(limitOrders);
}
public static UserTrade adaptArchivedOrder(CexIOArchivedOrder cexIOArchivedOrder) {
try {
Date timestamp = fromISODateString(cexIOArchivedOrder.time);
OrderType orderType = cexIOArchivedOrder.type.equals("sell") ? OrderType.ASK : OrderType.BID;
BigDecimal originalAmount = cexIOArchivedOrder.amount;
CurrencyPair currencyPair =
new CurrencyPair(cexIOArchivedOrder.symbol1, cexIOArchivedOrder.symbol2);
BigDecimal price = cexIOArchivedOrder.averageExecutionPrice;
String id = cexIOArchivedOrder.id;
String orderId = cexIOArchivedOrder.orderId;
Currency feeCcy =
cexIOArchivedOrder.feeCcy == null
? null
: Currency.getInstance(cexIOArchivedOrder.feeCcy);
BigDecimal fee = cexIOArchivedOrder.feeValue;
return new UserTrade(
orderType, originalAmount, currencyPair, price, timestamp, id, orderId, fee, feeCcy);
} catch (InvalidFormatException e) {
throw new IllegalStateException("Cannot format date " + cexIOArchivedOrder.time, e);
}
}
public static Order adaptOrder(CexIOOpenOrder cexIOOrder) {
OrderType orderType = cexIOOrder.type.equals("sell") ? OrderType.ASK : OrderType.BID;
BigDecimal originalAmount = new BigDecimal(cexIOOrder.amount);
CurrencyPair currencyPair = new CurrencyPair(cexIOOrder.symbol1, cexIOOrder.symbol2);
Date timestamp = new Date(cexIOOrder.time);
BigDecimal limitPrice = new BigDecimal(cexIOOrder.price);
Order.OrderStatus status = adaptOrderStatus(cexIOOrder);
BigDecimal cumulativeAmount = null;
try {
BigDecimal remains = new BigDecimal(cexIOOrder.remains);
cumulativeAmount = originalAmount.subtract(remains);
} catch (Exception e) {
}
return new LimitOrder(
orderType,
originalAmount,
currencyPair,
cexIOOrder.orderId,
timestamp,
limitPrice,
null,
cumulativeAmount,
null,
status);
}
public static LimitOrder adaptOrder(CexIOFullOrder cexIOOrder) {
OrderType orderType = cexIOOrder.type.equals("sell") ? OrderType.ASK : OrderType.BID;
BigDecimal originalAmount = new BigDecimal(cexIOOrder.amount);
CurrencyPair currencyPair = new CurrencyPair(cexIOOrder.symbol1, cexIOOrder.symbol2);
Date timestamp = new Date(cexIOOrder.time);
BigDecimal limitPrice = new BigDecimal(cexIOOrder.price);
Order.OrderStatus status = adaptOrderStatus(cexIOOrder);
BigDecimal cumulativeAmount = null;
if (cexIOOrder.remains != null) {
BigDecimal remains = new BigDecimal(cexIOOrder.remains);
cumulativeAmount = originalAmount.subtract(remains);
}
BigDecimal totalAmountMaker =
cexIOOrder.totalAmountMaker != null
? new BigDecimal(cexIOOrder.totalAmountMaker)
: BigDecimal.ZERO;
BigDecimal totalAmountTaker =
cexIOOrder.totalAmountTaker != null
? new BigDecimal(cexIOOrder.totalAmountTaker)
: BigDecimal.ZERO;
BigDecimal tradedAmount = totalAmountMaker.add(totalAmountTaker);
BigDecimal averagePrice = null;
if (cumulativeAmount != null && tradedAmount.compareTo(BigDecimal.ZERO) > 0) {
averagePrice = tradedAmount.divide(cumulativeAmount, 2, RoundingMode.HALF_UP);
}
BigDecimal feeMaker =
cexIOOrder.feeMaker != null ? new BigDecimal(cexIOOrder.feeMaker) : BigDecimal.ZERO;
BigDecimal feeTaker =
cexIOOrder.feeTaker != null ? new BigDecimal(cexIOOrder.feeTaker) : BigDecimal.ZERO;
BigDecimal fee = feeMaker.add(feeTaker);
return new LimitOrder(
orderType,
originalAmount,
currencyPair,
cexIOOrder.orderId,
timestamp,
limitPrice,
averagePrice,
cumulativeAmount,
fee.compareTo(BigDecimal.ZERO) > 0 ? fee : null,
status);
}
/**
* From CEX API <a href="https://cex.io/rest-api#/definitions/OrderStatus">documentation </a> <br>
* Order status can assume follow values ('d' = done, fully executed OR 'c' = canceled, not
* executed OR 'cd' = cancel-done, partially executed OR 'a' = active, created)
*
* @param cexIOOrder cex raw order
* @return OrderStatus
*/
private static Order.OrderStatus adaptOrderStatus(CexIOOpenOrder cexIOOrder) {
if ("c".equalsIgnoreCase(cexIOOrder.status)) return Order.OrderStatus.CANCELED;
if ("d".equalsIgnoreCase(cexIOOrder.status)) return Order.OrderStatus.FILLED;
if ("a".equalsIgnoreCase(cexIOOrder.status)) {
try {
BigDecimal remains = new BigDecimal(cexIOOrder.remains);
BigDecimal amount = new BigDecimal(cexIOOrder.amount);
if (remains.compareTo(BigDecimal.ZERO) > 0 && remains.compareTo(amount) < 0)
return Order.OrderStatus.PARTIALLY_FILLED;
else return Order.OrderStatus.PENDING_NEW;
} catch (NumberFormatException ex) {
return Order.OrderStatus.PENDING_NEW;
}
}
if ("cd".equalsIgnoreCase(cexIOOrder.status)) {
try {
BigDecimal remains = new BigDecimal(cexIOOrder.remains);
BigDecimal amount = new BigDecimal(cexIOOrder.amount);
if (remains.compareTo(BigDecimal.ZERO) > 0 && remains.compareTo(amount) < 0)
return Order.OrderStatus.PARTIALLY_CANCELED;
else return Order.OrderStatus.CANCELED;
} catch (NumberFormatException ex) {
return Order.OrderStatus.CANCELED;
}
}
return Order.OrderStatus.UNKNOWN;
}
/**
* CexIO position status is not documented, testing API we can infer that they are similar to
* order status {@link #adaptOrderStatus(CexIOOpenOrder)}
*
* @param cexioPosition cex raw order
* @return OrderStatus
*/
public static Order.OrderStatus adaptPositionStatus(CexioPosition cexioPosition) {
if ("c".equalsIgnoreCase(cexioPosition.getStatus())) return Order.OrderStatus.CANCELED;
if ("d".equalsIgnoreCase(cexioPosition.getStatus())) return Order.OrderStatus.FILLED;
if ("a".equalsIgnoreCase(cexioPosition.getStatus())) return Order.OrderStatus.NEW;
return Order.OrderStatus.UNKNOWN;
}
private static CurrencyPair adaptCurrencyPair(String pair) {
// Currency pair is in the format: "BCH:USD"
return new CurrencyPair(pair.replace(":", "/"));
}
private static Fee adaptFeeDetails(FeeDetails feeDetails) {
// It might be worth expanding the Fee structure in xchange-core to contain both buy and sell
// fees
return new Fee(
feeDetails.getBuyMaker().max(feeDetails.getSellMaker()),
feeDetails.getBuy().max(feeDetails.getSell()));
}
public static Map<CurrencyPair, Fee> adaptDynamicTradingFees(Map<CurrencyPair, FeeDetails> fees) {
Map<CurrencyPair, Fee> result = new HashMap<CurrencyPair, Fee>();
for (Map.Entry<CurrencyPair, FeeDetails> entry : fees.entrySet()) {
result.put(entry.getKey(), adaptFeeDetails(entry.getValue()));
}
return result;
}
}
| |
/*
* Copyright (C) 2010 Preston Lacey http://javaflacencoder.sourceforge.net/
* All Rights Reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package javaFlacEncoder;
import java.util.Vector;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.io.IOException;
import java.io.FileOutputStream;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.concurrent.LinkedBlockingQueue;
/**
* This class defines a FLAC Encoder with a simple interface for enabling FLAC
* encoding support in an application. This class is appropriate for use in the
* case where you have raw pcm audio samples that you wish to encode. Currently,
* fixed-blocksize only is implemented, and the "Maximum Block Size" set in the
* StreamConfiguration object is used as the actual block size.
* <br><br><br>
* An encoding process is simple, and should follow these steps:<br>
* <BLOCKQUOTE>
* 1) Set StreamConfiguration to appropriate values. After a stream is opened,
* this must not be altered until the stream is closed.<br>
* 2) Set FLACOutputStream, object to write results to.<br>
* 3) Open FLAC Stream<br>
* 4) Set EncodingConfiguration(if defaults are insufficient).<br>
* 5) Add samples to encoder<br>
* 6) Encode Samples<br>
* 7) Close stream<br>
* (note: steps 4,5, and 6 may be done repeatedly, in any order. However, see
* related method documentation for info on concurrent use)
* </BLOCKQUOTE><br><br>
*
* @author Preston Lacey
*/
public class FLACEncoder {
/* For debugging, higher level equals more output */
int DEBUG_LEV = 0;
/**
* Maximum Threads to use for encoding frames(more threads than this will
* exist, these threads are reserved for encoding of frames only).
*/
public int MAX_THREADED_FRAMES = 2;
/* encodingConfig: Must never stay null(default supplied by constructor) */
EncodingConfiguration encodingConfig = null;
/* streamConfig: Must never stay null(default supplied by constructor) */
StreamConfiguration streamConfig = null;
/* Set true if frames are actively being encoded(can't change settings
* while this is true) */
volatile Boolean isEncoding = false;
/* synchronize on this object when encoding or changing configurations */
private final Object configWriteLock = new Object();
/* Store for blocks which are ready to encode. Always insert end, pop head*/
private Vector<int[]> blockQueue = null;
/* Stores samples for a block which is not yet full(not ready for queue) */
private int[] unfinishedBlock = null;
/* Stores count of inter-frame samples in unfinishedBlock */
private int unfinishedBlockUsed = 0;
/* Object to write results to. Must be set before opening stream */
private FLACOutputStream out = null;
/* contains FLAC_id used in the flac stream header to signify FLAC format */
EncodedElement FLAC_id = FLACStreamIdentifier.getIdentifier();
/* Frame object used to encode when not using threads */
Frame frame = null;
/* md object used to calculate MD5 hash */
MessageDigest md = null;
/* threadManager used with threaded encoding */
BlockThreadManager2 threadManager = null;
/* threagedFrames keeps track of frames given to threadManager. We must still
* update the configurations of them as needed. If we ever create new
* frames(e.g, when changing stream configuration), we must create a new
* threadManager as well.
*/
Frame[] threadedFrames = null;
/* minimum frame size seen so far. Used in the stream header */
int minFrameSize = 0x7FFFFFFF;
/* maximum frame size seen so far. Used in stream header */
int maxFrameSize = 0;
/* minimum block size used so far. Used in stream header */
int minBlockSize = 0x7FFFFFFF;
/* maximum block size used so far. Used in stream header */
int maxBlockSize = 0;
/* total number of samples encoded to output. Used in stream header */
volatile long samplesInStream;
/* next frame number to use */
long nextFrameNumber = 0;
/* position of header in output stream location(needed so we can update
* the header info(md5, minBlockSize, etc), once encoding is done
*/
long streamHeaderPos = 0;
/* should be set when any error has occured that invalidates results.
* This should not be relied on currently, practice not followed well.
*/
boolean error = false;
/* store used encodeRequests so we don't have to reallocate space for them*/
LinkedBlockingQueue<BlockEncodeRequest> usedBlockEncodeRequests = null;
LinkedBlockingQueue<int[]> usedIntArrays = null;
/**
* Constructor which creates a new encoder object with the default settings.
* The StreamConfiguration should be reset to match the audio used and an
* output stream set, but the default EncodingConfiguration should be ok for
* most purposes. When using threaded encoding, the default number of
* threads used is equal to FLACEncoder.MAX_THREADED_FRAMES.
*/
public FLACEncoder() {
usedBlockEncodeRequests = new LinkedBlockingQueue<BlockEncodeRequest>();
usedIntArrays = new LinkedBlockingQueue<int[]>();
blockQueue = new Vector<int[]>();
StreamConfiguration defaultStreamConfig = new StreamConfiguration();
encodingConfig = new EncodingConfiguration();
frame = new Frame(defaultStreamConfig);
frame.registerConfiguration(encodingConfig);
//frameThread = new FrameThread(frame);
threadManager = new BlockThreadManager2(this);
threadedFrames = new Frame[MAX_THREADED_FRAMES];
for(int i = 0; i < MAX_THREADED_FRAMES; i++) {
threadedFrames[i] = new Frame(defaultStreamConfig);
threadManager.addFrameThread(threadedFrames[i]);
}
try {
md = MessageDigest.getInstance("md5");
reset();
}catch(NoSuchAlgorithmException e) {
System.err.println("Critical Error: No md5 algorithm exists. " +
"This encoder can not function.");
}
}
/**
* Set the encoding configuration to that specified. The given encoding
* configuration is not stored by this object, but instead copied. This
* is to prevent the alteration of the config during an encode process.
*
* @param ec EncodingConfiguration to use.
* @return true if the configuration was altered; false if the configuration
* cannot be altered(such as if another thread is currently encoding).
*/
public boolean setEncodingConfiguration(EncodingConfiguration ec) {
boolean changed = false;
if(!isEncoding && ec != null) {
synchronized(configWriteLock) {
encodingConfig = ec;
frame.registerConfiguration(ec);
for(int i = 0; i < MAX_THREADED_FRAMES; i++)
threadedFrames[i].registerConfiguration(ec);
}
changed = true;
}
return changed;
}
/**
* Set the stream configuration to that specified. The given stream
* configuration is not stored by this object, but instead copied. This
* is to prevent the alteration of the config during an encode process.
* This method must not be called in the middle of a stream, stream contents
* may become invalid. A call to setStreamConfiguration() should
* be followed next by setting the output stream if not yet done, and then
* calling openFLACStream();
*
* @param sc StreamConfiguration to use.
* @return true if the configuration was altered; false if the configuration
* cannot be altered(such as if another thread is currently encoding).
*/
public boolean setStreamConfiguration(StreamConfiguration sc) {
boolean changed = false;
if(!isEncoding && sc != null) {
synchronized(configWriteLock) {
streamConfig = sc;
frame = new Frame(sc);
threadManager = new BlockThreadManager2(this);
threadedFrames = new Frame[MAX_THREADED_FRAMES];
for(int i = 0; i < MAX_THREADED_FRAMES; i++) {
threadedFrames[i] = new Frame(sc);
threadManager.addFrameThread(threadedFrames[i]);
}
this.setEncodingConfiguration(this.encodingConfig);
}
changed = true;
}
return changed;
}
/**
* Reset the values to their initial state, in preparation of starting a
* new stream.
*/
private void reset() {
//reset stream
md.reset();
minFrameSize = minFrameSize = 0x7FFFFFFF;
maxFrameSize = 0;
minBlockSize = 0x7FFFFFFF;
maxBlockSize = 0;
samplesInStream = 0;
streamHeaderPos = 0;
unfinishedBlock = null;
unfinishedBlockUsed = 0;
blockQueue.clear();
nextFrameNumber = 0;
}
/**
* Close the current FLAC stream. Updates the stream header information.
* If called on an open stream, operation is undefined. Do not do this.
*/
private void closeFLACStream() {
//reset position in output stream to beginning.
//re-write the updated stream info.
if(DEBUG_LEV > 0)
System.err.println("FLACEncoder::closeFLACStream : Begin");
streamConfig.setMaxBlockSize(maxBlockSize);
streamConfig.setMinBlockSize(minBlockSize);
byte[] md5 = md.digest();
EncodedElement streamInfo = MetadataBlockStreamInfo.getStreamInfo(
streamConfig, minFrameSize, maxFrameSize, samplesInStream,
md5);
out.seek(streamHeaderPos);
try {
this.writeDataToOutput(streamInfo);
}catch(IOException e) {
System.err.println("FLACEncoder::closeFLACStream(): ERROR WRiting to output");
}
}
/**
* Begin a new FLAC stream. Prior to calling this, you must have already
* set the StreamConfiguration and the output stream, both of which must not
* change until encoding is finished and the stream is closed.
* @throws IOException if there is an error writing the headers to output.
*/
public void openFLACStream() throws IOException {
//reset all data.
reset();
//write FLAC stream identifier
out.write(FLAC_id.getData(), 0, FLAC_id.getUsableBits()/8);
//write stream headers. These must be updated at close of stream
byte[] md5Hash = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0};//blank hash. Don't know it yet.
EncodedElement streamInfo = MetadataBlockStreamInfo.getStreamInfo(
streamConfig, minFrameSize, maxFrameSize, samplesInStream,
md5Hash);
//mark stream info location(so we can return to it and re-write headers,
// assuming stream is seekable. Then write header.
int size = streamInfo.getUsableBits()/8;
EncodedElement metadataBlockHeader =
MetadataBlockHeader.getMetadataBlockHeader(true,
MetadataBlockHeader.MetadataBlockType.STREAMINFO, size);
this.writeDataToOutput(metadataBlockHeader);
streamHeaderPos = out.getPos();
out.write(streamInfo.getData(), 0, size);
}
/**
* Add samples to the encoder, so they may then be encoded. This method uses
* breaks the samples into blocks, which will then be made available to
* encode.
*
* @param samples Array holding the samples to encode. For all multi-channel
* audio, the samples must be interleaved in this array. For example, with
* stereo: sample 0 will belong to the first channel, 1 the second, 2 the
* first, 3 the second, etc. Samples are interpreted according to the
* current configuration(for things such as channel and bits-per-sample).
*
* @param count Number of interchannel samples to add. For example, with
* stero: if this is 4000, then "samples" must contain 4000 left samples and
* 4000 right samples, interleaved in the array.
*
* @return true if samples were added, false otherwise. A value of false may
* result if "count" is set to a size that is too large to be valid with the
* given array and current configuration.
*/
public boolean addSamples(int[] samples, int count) {
boolean added = false;
//get number of channels
int channels = streamConfig.getChannelCount();
int maxFrames = samples.length/channels;//input wav frames, not flac
int validSamples = count*channels;
if(DEBUG_LEV > 0) {
System.err.println("addSamples(...): ");
System.err.println("maxFrames: "+maxFrames);
System.err.println("validSamples: "+validSamples);
if(DEBUG_LEV > 10)
System.err.println("count:"+count+":channels:"+channels);
}
if(count <= maxFrames) {//sample count is ok
added = true;
//break sample input into appropriately sized blocks
int samplesUsed = 0;//number of input samples used
if(unfinishedBlock != null) {
//finish off last block first.
if(DEBUG_LEV > 10) {
System.err.println("addSamples(...): filling unfinishedBlock");
}
int blockSize = streamConfig.getMaxBlockSize();
int[] block = unfinishedBlock;
int unfinishedBlockRemaining = blockSize*channels-unfinishedBlockUsed;
if(unfinishedBlockRemaining <=0) {
System.err.println("MAJOR ERROR HERE. Unfinsihed block remaining invalid: "+
unfinishedBlockRemaining);
System.exit(-1);
}
int nextSampleStop = samplesUsed+unfinishedBlockRemaining;
if(nextSampleStop > validSamples) {
nextSampleStop = validSamples;
}
int i;
for(i = 0; i < unfinishedBlockRemaining && i < nextSampleStop; i++) {
block[unfinishedBlockUsed+i] = samples[samplesUsed+i];
}
unfinishedBlockUsed += i;
samplesUsed = nextSampleStop;
if(unfinishedBlockUsed == blockSize*channels) {
//System.err.println("Adding block: "+blocksAdded++ +":"+blockQueue.size());
blockQueue.add(block);
unfinishedBlockUsed = 0;
unfinishedBlock = null;
}
else if(unfinishedBlockUsed > blockSize*channels) {
System.err.println("Error: FLACEncoder.addSamples(...) " +
"unfinished block = "+unfinishedBlockUsed);
System.exit(-1);
}
}
while(samplesUsed < validSamples) {
if(DEBUG_LEV > 20)
System.err.println("addSamples(...): creating new block");
//copy values to approrpiate locations
//add each finished array to the queue
/*<implement_for_variable_blocksize>
* blockSize = this.getNextBlockSize(samples, validSamples);*/
int blockSize = streamConfig.getMaxBlockSize();
//int[] block = new int[blockSize*channels];
int[] block = getBlock(blockSize*channels);
int nextSampleStop = samplesUsed+blockSize*channels;
if(nextSampleStop > validSamples) {
//We don't have enough samples to make a full block.
if(DEBUG_LEV > 20)
System.err.println("addSamples(...): setting partial Block");
//fill unfinishedBlock
nextSampleStop = validSamples;
unfinishedBlock = block;
unfinishedBlockUsed = validSamples-samplesUsed;
}
else {
blockQueue.add(block);
//System.err.println("Adding block: "+blocksAdded++ +":"+blockQueue.size());
}
//System.err.println("samplesUsed: " + samplesUsed);
//System.err.println("Nextsamplestop: " + nextSampleStop);
for(int i = 0; i < nextSampleStop-samplesUsed; i++)
block[i] = samples[samplesUsed+i];
samplesUsed = nextSampleStop;
}
}
else {
System.err.println("Error: FLACEncoder.addSamples "+
"given count out of bounds");
}
if(DEBUG_LEV > 20) {
System.err.println("Blocks stored: " +blockQueue.size());
System.err.println("Samples in partial block: " + unfinishedBlockUsed);
}
return added;
}
/**
* This function is for development purposes only. It likely serves no
* further point and perhaps is worthy of being removed.
* @param block
* @param count
* @param iter
*/
private void outputBlockToFile(int[] block, int count, int iter) {
//DEBUGGING, for development only!
try {
FileOutputStream fout = new FileOutputStream("samples.txt");
//OutputStreamWriter tOut = new OutputStreamWriter(fout);
PrintWriter pOut = new PrintWriter(fout);
for(int i = 0; i < count; i++) {
String temp = Integer.toString(i)+":";
temp = temp + Integer.toString(block[i*iter]);
System.err.print(temp);
pOut.println(temp);
}
pOut.flush();
pOut.close();
fout.close();
System.exit(0);
System.err.println("sample file written:");
}
catch(FileNotFoundException e) {
System.err.println("Error creating file");
}catch(IOException e) {
System.err.println("Error handling file");
}
}
/**
* Notify the encoder that a BlockEncodeRequest has finished, and is now
* ready to be written to file. The encoder expects that these requests come
* back in the same order the encoder sent them out. This is intended to
* be used in threading mode only at the moment(sending them to a
* BlockThreadManager object)
*
* @param ber BlockEncodeRequest that is ready to write to file.
*/
public void blockFinished(BlockEncodeRequest ber) {
synchronized (ber) {
try {
writeDataToOutput(ber.result.getNext());
}catch(IOException e) {
System.err.println("blockFinished: Error writing to output");
e.printStackTrace();
error = true;
}
//update encodedCount and count, and blocks, MD5
if(ber.count != ber.encodedSamples) {
System.err.println("Error encoding frame number: "+
ber.frameNumber+", FLAC stream potentially invalid");
}
samplesInStream += ber.encodedSamples;
if(ber.encodedSamples > maxBlockSize)
maxBlockSize = ber.encodedSamples;
if(ber.encodedSamples < minBlockSize)
minBlockSize = ber.encodedSamples;
int frameSize = ber.result.getTotalBits()%8;
if(frameSize > maxFrameSize) maxFrameSize = frameSize;
if(frameSize < minFrameSize) minFrameSize = frameSize;
addSamplesToMD5(ber.samples, ber.encodedSamples, ber.skip+1,
streamConfig.getBitsPerSample());
usedIntArrays.add(ber.samples);
ber.samples = null;
ber.result = null;
usedBlockEncodeRequests.add(ber);
}
}
/**
* Attempt to Encode a certain number of samples(threaded version).
* Encodes as close to count as possible. Uses multiple threads to speed up
* encoding.
*
* @param count number of samples to attempt to encode. Actual number
* encoded may be greater or less if count does not end on a block boundary.
*
* @param end true to finalize stream after encode, false otherwise. If set
* to true, no more encoding must be attempted until a new stream is began.
*
* @return number of samples encoded. This may be greater or less than
* requested count if count does not end on a block boundary. This is NOT an
* error condition.
*
* @throws IOException if there was an error writing the results to file.
*/
public int t_encodeSamples(int count, boolean end) throws IOException {
int encodedCount = 0;
//pull blocks from the queue, check size, and encode if size is smaller
//than remaining count.
int blocksLeft = blockQueue.size();
int channels = streamConfig.getChannelCount();
while(count > 0 && blocksLeft > 0) {
if(DEBUG_LEV > 20) {
System.err.println("while: count:blocksLeft : "+
count+":"+blocksLeft);
}
int[] block = blockQueue.elementAt(0);
if(block.length <= count*channels) {
//encode
int encodedSamples = block.length/channels;//interchannel samples
EncodedElement result = new EncodedElement();
//BlockEncodeRequest ber = new BlockEncodeRequest();
BlockEncodeRequest ber = usedBlockEncodeRequests.poll();
if(ber == null) ber = new BlockEncodeRequest();
ber.setAll(block, encodedSamples, 0,channels-1, nextFrameNumber++,
result);
threadManager.addRequest(ber);
blockQueue.remove(0);
blocksLeft--;
count -= encodedSamples;
encodedCount += encodedSamples;
}
else {
//can't encode a full block.
System.err.println("Error with block in queue?");
break;
}
}
//block while requests remain!!!!
threadManager.blockWhileQueueExceeds(5);
if(end) {
threadManager.stop();
threadManager.blockWhileQueueExceeds(0);
}
//handle "end" setting
if(end && count >= 0 && this.samplesAvailableToEncode() >= count) {
//handle remaining count
if(count > 0 && unfinishedBlockUsed >= count) {
int[] block = null;
if(blockQueue.size() > 0) {
block = blockQueue.elementAt(0);
}
else
block = unfinishedBlock;
int encodedSamples = count;//interchannel samples
EncodedElement result = new EncodedElement();
int encoded = frame.encodeSamples(block, encodedSamples, 0,
channels-1, result, nextFrameNumber);
if(encoded != encodedSamples) {
//ERROR! Return immediately. Do not add results to output.
System.err.println("FLACEncoder::encodeSamples : (end)Error in encoding");
count = -1;
}
else {
writeDataToOutput(result.getNext());
//update encodedCount and count
encodedCount += encodedSamples;
count -= encodedSamples;
//addSamplesToMD5(block, encodedSamples, 0,channels);
addSamplesToMD5(block, encodedSamples, channels,
streamConfig.getBitsPerSample());
samplesInStream += encodedSamples;
nextFrameNumber++;
if(encodedSamples > maxBlockSize) maxBlockSize = encodedSamples;
if(encodedSamples < minBlockSize) minBlockSize = encodedSamples;
int frameSize = result.getTotalBits()%8;
if(frameSize > maxFrameSize) maxFrameSize = frameSize;
if(frameSize < minFrameSize) minFrameSize = frameSize;
//System.err.println("Count: " + count);
}
}
//close stream if all requested were written.
if(count == 0) {
closeFLACStream();
}
}
else if (end == true) {
System.err.println("End set but not done. Error likely. "+
"This can happen if number of samples requested to " +
"encode exeeds available samples");
}
return encodedCount;
}
/**
* Attempt to Encode a certain number of samples. Encodes as close to count
* as possible.
*
* @param count number of samples to attempt to encode. Actual number
* encoded may be greater or less if count does not end on a block boundary.
*
* @param end true to finalize stream after encode, false otherwise. If set
* to true, no more encoding must be attempted until a new stream is began.
* @return number of samples encoded. This may be greater or less than
* requested count if count does not end on a block boundary. This is NOT an
* error condition.
* @throws IOException if there was an error writing the results to file.
*/
public int encodeSamples(int count, boolean end) throws IOException {
// System.err.println("starting encoding :");
int encodedCount = 0;
//pull blocks from the queue, check size, and encode if size is smaller
//than remaining count.
int blocksLeft = blockQueue.size();
int channels = streamConfig.getChannelCount();
while(count > 0 && blocksLeft > 0) {
if(DEBUG_LEV > 20) {
System.err.println("while: count:blocksLeft : "+
count+":"+blocksLeft);
}
int[] block = blockQueue.elementAt(0);
if(block.length <= count*channels) {
//encode
int encodedSamples = block.length/channels;//interchannel samples
//count -= encodedSamples;
EncodedElement result = new EncodedElement();
int encoded = frame.encodeSamples(block, encodedSamples, 0,
channels-1, result, nextFrameNumber);
if(encoded != encodedSamples) {
//ERROR! Return immediately. Do not add results to output.
System.err.println("FLACEncoder::encodeSamples : Error in encoding");
count = -1;
break;
}
//write encoded results to output.
//System.err.println("writing frame: "+nextFrameNumber);
writeDataToOutput(result.getNext());
//update encodedCount and count, and blocks, MD5
blockQueue.remove(0);
blocksLeft--;
encodedCount += encodedSamples;
//System.err.println("Count pre: " + count);
count -= encodedSamples;
samplesInStream += encodedSamples;
nextFrameNumber++;
if(encodedSamples > maxBlockSize)
maxBlockSize = encodedSamples;
if(encodedSamples < minBlockSize)
minBlockSize = encodedSamples;
int frameSize = result.getTotalBits()%8;
if(frameSize > maxFrameSize) maxFrameSize = frameSize;
if(frameSize < minFrameSize) minFrameSize = frameSize;
//addSamplesToMD5(block, encodedSamples, 0,channels);
addSamplesToMD5(block, encodedSamples, channels,
streamConfig.getBitsPerSample());
usedIntArrays.add(block);
//System.err.println("Count post: " + count);
}
else {
if(blockQueue.size() > 0) {
System.err.println("Can't encode full but blocksize != 0");
System.err.println("Blockqueue size: "+blockQueue.size());
System.err.println("Block size: "+block.length);
System.err.println("Count: "+count);
}
//can't encode a full block.
break;
}
}
//handle "end" setting
if(end)
threadManager.stop();
if(end && count >= 0 && this.samplesAvailableToEncode() >= count) {
//handle remaining count
if(count > 0 && unfinishedBlockUsed >= count) {
int[] block = null;
if(blockQueue.size() > 0) {
block = blockQueue.elementAt(0);
}
else
block = unfinishedBlock;
int encodedSamples = count;//interchannel samples
EncodedElement result = new EncodedElement();
int encoded = frame.encodeSamples(block, encodedSamples, 0,
channels-1, result, nextFrameNumber);
if(encoded != encodedSamples) {
//ERROR! Return immediately. Do not add results to output.
System.err.println("FLACEncoder::encodeSamples : (end)Error in encoding");
count = -1;
}
else {
writeDataToOutput(result.getNext());
//update encodedCount and count
encodedCount += encodedSamples;
count -= encodedSamples;
//addSamplesToMD5(block, encodedSamples, 0,channels);
addSamplesToMD5(block, encodedSamples, channels,
streamConfig.getBitsPerSample());
samplesInStream += encodedSamples;
nextFrameNumber++;
if(encodedSamples > maxBlockSize) maxBlockSize = encodedSamples;
if(encodedSamples < minBlockSize) minBlockSize = encodedSamples;
int frameSize = result.getTotalBits()%8;
if(frameSize > maxFrameSize) maxFrameSize = frameSize;
if(frameSize < minFrameSize) minFrameSize = frameSize;
System.err.println("Count: " + count);
}
}
//close stream if all requested were written.
if(count == 0) {
closeFLACStream();
}
}
else if (end == true) {
System.err.println("End set but not done. Error likely." );
}
return encodedCount;
}
/**
* Add samples to the MD5 hash.
* CURRENTLY ONLY MAY WORK FOR: sample sizes which are divisible by 8. Need
* to create some audio to test with.
* @param samples
* @param count
* @param channels
*/
private void addSamplesToMD5(int[] samples, int count, int channels,
int sampleSize) {
int bytesPerSample = sampleSize/8;
if(sampleSize%8 != 0)
bytesPerSample++;
byte[] dataMD5 = new byte[count*bytesPerSample*channels];
for(int i = 0; i < count*channels; i++) {
for(int x = 0; x < bytesPerSample; x++) {
dataMD5[i*bytesPerSample+x] = (byte)(samples[i] >> x*8);
}
}
md.update(dataMD5, 0, count*bytesPerSample*channels);
}
/**
* Write the data stored in an EncodedElement to the output stream.
* All data will be written along byte boundaries, but the elements in the
* given list need not end on byte boundaries. If the data of an element
* does not end on a byte boundary, then the space remaining in that last
* byte will be used as an offset, and merged(using an "OR"), with the first
* byte of the following element.
*
* @param data
* @return
* @throws IOException
*/
private int writeDataToOutput(EncodedElement data) throws IOException {
int writtenBytes = 0;
int offset = 0;
EncodedElement current = data;
int currentByte = 0;
byte unfullByte = 0;
byte[] eleData = null;
int usableBits = 0;
int lastByte = 0;
while(current != null) {
//System.err.println("current != null" );
eleData = current.getData();
usableBits = current.getUsableBits();
currentByte = 0;
//if offset is not zero, merge first byte with existing byte
if(offset != 0) {
unfullByte = (byte)(unfullByte | eleData[currentByte++]);
out.write(unfullByte);
}
//write all full bytes of element.
lastByte = usableBits/8;
//System.err.println("eleData.length:currentByte:length : "+
// eleData.length+":"+currentByte+":"+(lastByte-currentByte));
if(lastByte > 0)
out.write(eleData, currentByte, lastByte-currentByte);
//save non-full byte(if present), and set "offset" for next element.
//offset = usableBits - lastByte*8;
offset = usableBits %8;
if(offset != 0) {
unfullByte = eleData[lastByte];
}
//update current.
current = current.getNext();
}
//if non-full byte remains. write.
if(offset != 0) {
out.write(eleData, lastByte, 1);
}
return writtenBytes;
}
/**
* Get number of samples which are ready to encode. More samples may exist
* in the encoder as a partial block. Use samplesAvailableToEncode() if you
* wish to include those as well.
* @return number of samples in full blocks, ready to encode.
*/
public int fullBlockSamplesAvailableToEncode() {
int available = 0;
int channels = streamConfig.getChannelCount();
for(int[] block: blockQueue) {
available += block.length/channels;
}
return available;
}
/**
* Get number of samples that are available to encode. This includes samples
* which are in a partial block(and so would only be written if "end" was
* set true in encodeSamples(int count,boolean end);
* @return number of samples availble to encode.
*/
public int samplesAvailableToEncode() {
int available = 0;
//sum all in blockQueue
int channels = streamConfig.getChannelCount();
for(int[] block : blockQueue) {
available += block.length/channels;
}
//add remaining in unfinishedBlock.
available += unfinishedBlockUsed;
return available;
}
/**
* Set the output stream to use. This must not be called while an encode
* process is active.
* @param fos output stream to use. This must not be null.
*/
public void setOutputStream(FLACOutputStream fos) {
out = fos;
}
public int[] getBlock(int size) {
int[] result = usedIntArrays.poll();
if(result == null) {
result = new int[size];
//System.err.println("Created new int array from null");
}
else if(result.length < size) {
usedIntArrays.offer(result);
result = new int[size];
//System.err.println("created new int array from bad size");
}
return result;
}
//int[] block = new int[blockSize*channels];
}
| |
/* Copyright (c) 2001-2011, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb;
import org.hsqldb.error.Error;
import org.hsqldb.error.ErrorCode;
import org.hsqldb.lib.ArrayUtil;
import org.hsqldb.lib.OrderedHashSet;
import org.hsqldb.persist.PersistentStore;
/**
* Represents the chain of insert / delete / rollback / commit actions on a row.
*
* @author Fred Toussi (fredt@users dot sourceforge dot net)
* @version 2.3.2
* @since 2.0.0
*/
public class RowAction extends RowActionBase {
//
final TableBase table;
final PersistentStore store;
Row memoryRow;
long rowId;
boolean isMemory;
RowAction updatedAction;
public static RowAction addInsertAction(Session session, TableBase table,
Row row) {
RowAction action = new RowAction(session, table, ACTION_INSERT, row,
null);
row.rowAction = action;
return action;
}
public static RowAction addDeleteAction(Session session, TableBase table,
Row row, int[] colMap) {
RowAction action = row.rowAction;
if (action == null) {
action = new RowAction(session, table, ACTION_DELETE, row, colMap);
row.rowAction = action;
return action;
}
return action.addDeleteAction(session, colMap);
}
public static boolean addRefAction(Session session, Row row,
int[] colMap) {
RowAction action = row.rowAction;
if (action == null) {
action = new RowAction(session, row.getTable(), ACTION_REF, row,
colMap);
row.rowAction = action;
return true;
}
return action.addRefAction(session, colMap);
}
public RowAction(Session session, TableBase table, byte type, Row row,
int[] colMap) {
this.session = session;
this.type = type;
this.actionTimestamp = session.actionTimestamp;
this.table = table;
this.store = table.getRowStore(session);
this.isMemory = row.isMemory();
this.memoryRow = row;
this.rowId = row.getPos();
this.changeColumnMap = colMap;
}
private RowAction(RowAction other) {
this.session = other.session;
this.type = other.type;
this.actionTimestamp = other.actionTimestamp;
this.table = other.table;
this.store = other.store;
this.isMemory = other.isMemory;
this.memoryRow = other.memoryRow;
this.rowId = other.rowId;
this.changeColumnMap = other.changeColumnMap;
}
synchronized public int getType() {
return type;
}
synchronized RowAction addDeleteAction(Session session, int[] colMap) {
if (type == ACTION_NONE) {
setAsAction(session, ACTION_DELETE);
changeColumnMap = colMap;
} else {
RowActionBase action = this;
while (true) {
if (action.rolledback) {
if (action.next == null) {
break;
}
action = action.next;
continue;
}
switch (action.type) {
case ACTION_INSERT : {
if (action.commitTimestamp == 0
&& session != action.session) {
throw Error.runtimeError(ErrorCode.U_S0500,
"RowAction");
}
break;
}
case ACTION_DELETE_FINAL :
case ACTION_DELETE : {
if (session != action.session) {
if (action.commitTimestamp == 0) {
if (!session.tempSet.isEmpty()) {
session.tempSet.clear();
}
session.tempSet.add(action);
}
return null;
}
break;
}
case ACTION_REF : {
if (session != action.session
&& action.commitTimestamp == 0) {
if (colMap == null
|| ArrayUtil.haveCommonElement(
colMap, action.changeColumnMap)) {
if (!session.tempSet.isEmpty()) {
session.tempSet.clear();
}
session.tempSet.add(action);
return null;
}
}
break;
}
}
if (action.next == null) {
break;
}
action = action.next;
}
RowActionBase newAction = new RowActionBase(session,
ACTION_DELETE);
newAction.changeColumnMap = colMap;
action.next = newAction;
}
return this;
}
synchronized boolean addRefAction(Session session, int[] colMap) {
if (type == ACTION_NONE) {
setAsAction(session, ACTION_REF);
changeColumnMap = colMap;
return true;
}
RowActionBase action = this;
do {
if (session == action.session) {
if (action.type == ACTION_REF
&& action.changeColumnMap == colMap
&& action.commitTimestamp == 0) {
return false;
}
if (action.type == ACTION_INSERT) {
if (action.commitTimestamp == 0) {
return false;
}
}
} else {
if (action.type == ACTION_DELETE
&& action.commitTimestamp == 0) {
if (action.changeColumnMap == null
|| ArrayUtil.haveCommonElement(
colMap, action.changeColumnMap)) {
if (!session.tempSet.isEmpty()) {
session.tempSet.clear();
}
session.tempSet.add(action);
return false;
}
}
}
if (action.next == null) {
break;
}
action = action.next;
} while (true);
RowActionBase newAction = new RowActionBase(session, ACTION_REF);
newAction.changeColumnMap = colMap;
action.next = newAction;
return true;
}
public boolean checkDeleteActions() {
return false;
}
public synchronized RowAction duplicate(Row newRow) {
RowAction action = new RowAction(session, table, type, newRow,
changeColumnMap);
return action;
}
synchronized void setAsAction(Session session, byte type) {
this.session = session;
this.type = type;
actionTimestamp = session.actionTimestamp;
changeColumnMap = null;
}
synchronized void setAsAction(RowActionBase action) {
super.setAsAction(action);
}
public void setAsNoOp() {
// memoryRow = null;
session = null;
actionTimestamp = 0;
commitTimestamp = 0;
rolledback = false;
deleteComplete = false;
changeColumnMap = null;
prepared = false;
type = ACTION_NONE;
next = null;
}
private void setAsDeleteFinal(long timestamp) {
actionTimestamp = 0;
commitTimestamp = timestamp;
rolledback = false;
deleteComplete = false;
prepared = false;
changeColumnMap = null;
type = ACTION_DELETE_FINAL;
next = null;
}
/** for two-phased pre-commit */
synchronized void prepareCommit(Session session) {
RowActionBase action = this;
do {
if (action.session == session && action.commitTimestamp == 0) {
action.prepared = true;
}
action = action.next;
} while (action != null);
}
synchronized int commit(Session session) {
RowActionBase action = this;
int actiontype = ACTION_NONE;
do {
if (action.session == session && action.commitTimestamp == 0) {
action.commitTimestamp = session.actionTimestamp;
action.prepared = false;
if (action.type == ACTION_INSERT) {
actiontype = action.type;
} else if (action.type == ACTION_DELETE) {
if (actiontype == ACTION_INSERT) {
// ACTION_INSERT + ACTION_DELETE
actiontype = ACTION_INSERT_DELETE;
} else {
actiontype = action.type;
}
}
}
action = action.next;
} while (action != null);
return actiontype;
}
public boolean isDeleted() {
RowActionBase action = this;
do {
if (action.commitTimestamp != 0) {
if (action.type == ACTION_DELETE
|| action.type == ACTION_DELETE_FINAL) {
return true;
}
}
action = action.next;
} while (action != null);
return false;
}
/**
* returns type of commit performed on timestamp. ACTION_NONE if none.
* assumes rolled-back actions have already been merged
*/
synchronized int getCommitTypeOn(long timestamp) {
RowActionBase action = this;
int actionType = ACTION_NONE;
do {
if (action.commitTimestamp == timestamp) {
if (action.type == ACTION_INSERT) {
actionType = action.type;
} else if (action.type == ACTION_DELETE) {
if (actionType == ACTION_INSERT) {
// ACTION_INSERT + ACTION_DELETE
actionType = ACTION_INSERT_DELETE;
} else {
actionType = action.type;
}
}
}
action = action.next;
} while (action != null);
return actionType;
}
/**
* returns false if another committed session has altered the same row
*/
synchronized boolean canCommit(Session session, OrderedHashSet set) {
RowActionBase action;
long timestamp = session.transactionTimestamp;
long commitTimestamp = 0;
final boolean readCommitted = session.isolationLevel
== SessionInterface.TX_READ_COMMITTED;
boolean hasDelete = false;
action = this;
if (readCommitted) {
do {
if (action.session == session
&& action.type == ACTION_DELETE) {
// for READ_COMMITTED, use action timestamp for later conflicts
if (action.commitTimestamp == 0) {
timestamp = action.actionTimestamp;
}
}
action = action.next;
} while (action != null);
action = this;
}
do {
if (action.session == session) {
if (action.type == ACTION_DELETE) {
hasDelete = true;
}
} else {
if (action.rolledback || action.type != ACTION_DELETE) {
action = action.next;
continue;
}
if (action.prepared) {
return false;
}
if (action.commitTimestamp == 0) {
set.add(action);
} else if (action.commitTimestamp > commitTimestamp) {
commitTimestamp = action.commitTimestamp;
}
}
action = action.next;
} while (action != null);
if (!hasDelete) {
return true;
}
return commitTimestamp < timestamp;
}
synchronized void complete(Session session) {
RowActionBase action;
action = this;
do {
if (action.session == session) {
if (action.actionTimestamp == 0) {
action.actionTimestamp = session.actionTimestamp;
}
}
action = action.next;
} while (action != null);
}
/**
* returns false if cannot complete
* when READ COMMITTED, false result always means repeat action and adds
* to set parameter the sessions to wait on (may be no wait)
*/
synchronized boolean complete(Session session, OrderedHashSet set) {
RowActionBase action;
boolean readCommitted = session.isolationLevel
== SessionInterface.TX_READ_COMMITTED;
boolean result = true;
action = this;
do {
if (action.rolledback || action.type == ACTION_NONE) {
action = action.next;
continue;
}
if (action.session == session) {
//
} else {
if (action.prepared) {
set.add(action.session);
return false;
}
if (readCommitted) {
if (action.commitTimestamp > session.actionTimestamp) {
// 2.0 -- investigate
// can redo - if deletes
// can redo - if dup, but will likely fail at retry
// can redo - if ref, but will likely fail at retry
set.add(session);
result = false;
} else if (action.commitTimestamp == 0) {
set.add(action.session);
result = false;
}
} else if (action.commitTimestamp
> session.transactionTimestamp) {
return false;
}
}
action = action.next;
} while (action != null);
return result;
}
public synchronized long getPos() {
return rowId;
}
synchronized void setPos(long pos) {
rowId = pos;
}
private int getRollbackType(Session session) {
int actionType = ACTION_NONE;
RowActionBase action = this;
do {
if (action.session == session && action.rolledback) {
if (action.type == ACTION_DELETE) {
if (actionType == ACTION_INSERT) {
actionType = ACTION_INSERT_DELETE;
} else {
actionType = action.type;
}
} else if (action.type == ACTION_INSERT) {
actionType = action.type;
}
}
action = action.next;
} while (action != null);
return actionType;
}
/**
* Rollback actions for a session including and after the given timestamp
*/
synchronized void rollback(Session session, long timestamp) {
RowActionBase action = this;
do {
if (action.session == session && action.commitTimestamp == 0) {
if (action.actionTimestamp >= timestamp) {
action.commitTimestamp = session.actionTimestamp;
action.rolledback = true;
action.prepared = false;
}
}
action = action.next;
} while (action != null);
}
/**
* merge rolled back actions
*/
synchronized int mergeRollback(Session session, long timestamp, Row row) {
RowActionBase action = this;
RowActionBase head = null;
RowActionBase tail = null;
int rollbackAction = getRollbackType(session);
do {
if (action.session == session && action.rolledback) {
if (tail != null) {
tail.next = null;
}
} else {
if (head == null) {
head = tail = action;
} else {
tail.next = action;
tail = action;
}
}
action = action.next;
} while (action != null);
if (head == null) {
switch (rollbackAction) {
case ACTION_INSERT :
case ACTION_INSERT_DELETE :
setAsDeleteFinal(timestamp);
break;
case ACTION_DELETE :
case ACTION_NONE :
default :
setAsNoOp();
break;
}
} else {
if (head != this) {
setAsAction(head);
}
}
return rollbackAction;
}
/**
* merge session actions committed on given timestamp.
*
* may be called more than once on same action
*
*/
synchronized void mergeToTimestamp(long timestamp) {
RowActionBase action = this;
RowActionBase head = null;
RowActionBase tail = null;
int commitType = getCommitTypeOn(timestamp);
if (type == ACTION_DELETE_FINAL || type == ACTION_NONE) {
return;
}
if (commitType == ACTION_DELETE
|| commitType == ACTION_INSERT_DELETE) {
setAsDeleteFinal(timestamp);
return;
}
do {
boolean expired = false;;
if (action.commitTimestamp != 0) {
if (action.commitTimestamp <= timestamp) {
expired = true;
} else if (action.type == ACTION_REF) {
expired = true;
}
}
if (expired) {
if (tail != null) {
tail.next = null;
}
} else {
if (head == null) {
head = tail = action;
} else {
tail.next = action;
tail = action;
}
}
action = action.next;
} while (action != null);
if (head == null) {
switch (commitType) {
case ACTION_DELETE :
case ACTION_INSERT_DELETE :
setAsDeleteFinal(timestamp);
break;
case ACTION_NONE :
case ACTION_INSERT :
default :
setAsNoOp();
break;
}
} else if (head != this) {
setAsAction(head);
}
mergeExpiredRefActions();
}
public synchronized boolean canRead(Session session, int mode) {
long threshold;
int actionType = ACTION_NONE;
if (type == ACTION_DELETE_FINAL) {
return false;
}
if (type == ACTION_NONE) {
return true;
}
RowActionBase action = this;
if (session == null) {
threshold = Long.MAX_VALUE;
} else {
switch (session.isolationLevel) {
case SessionInterface.TX_READ_UNCOMMITTED :
threshold = Long.MAX_VALUE;
break;
case SessionInterface.TX_READ_COMMITTED :
threshold = session.actionTimestamp;
break;
case SessionInterface.TX_REPEATABLE_READ :
case SessionInterface.TX_SERIALIZABLE :
default :
threshold = session.transactionTimestamp;
break;
}
}
do {
if (action.type == ACTION_REF) {
action = action.next;
continue;
}
if (action.rolledback) {
if (action.type == ACTION_INSERT) {
actionType = ACTION_DELETE;
}
action = action.next;
continue;
}
if (session == action.session) {
if (action.type == ACTION_DELETE) {
actionType = action.type;
} else if (action.type == ACTION_INSERT) {
actionType = action.type;
}
action = action.next;
continue;
} else if (action.commitTimestamp == 0) {
if (action.type == ACTION_NONE) {
throw Error.runtimeError(ErrorCode.U_S0500, "RowAction");
} else if (action.type == ACTION_INSERT) {
if (mode == TransactionManager.ACTION_READ) {
actionType = action.ACTION_DELETE;
} else if (mode == TransactionManager.ACTION_DUP) {
actionType = ACTION_INSERT;
session.tempSet.clear();
session.tempSet.add(action);
} else if (mode == TransactionManager.ACTION_REF) {
actionType = ACTION_DELETE;
}
break;
} else if (action.type == ACTION_DELETE) {
if (mode == TransactionManager.ACTION_DUP) {
//
} else if (mode == TransactionManager.ACTION_REF) {
actionType = ACTION_DELETE;
}
}
action = action.next;
continue;
} else if (action.commitTimestamp < threshold) {
if (action.type == ACTION_DELETE) {
actionType = ACTION_DELETE;
} else if (action.type == ACTION_INSERT) {
actionType = ACTION_INSERT;
}
} else {
if (action.type == ACTION_INSERT) {
if (mode == TransactionManager.ACTION_READ) {
actionType = action.ACTION_DELETE;
} else if (mode == TransactionManager.ACTION_DUP) {
actionType = ACTION_INSERT;
session.tempSet.clear();
session.tempSet.add(action);
} else if (mode == TransactionManager.ACTION_REF) {
actionType = ACTION_DELETE;
}
}
}
action = action.next;
continue;
} while (action != null);
if (actionType == ACTION_NONE || actionType == ACTION_INSERT) {
return true;
}
return false;
}
public boolean hasCurrentRefAction() {
RowActionBase action = this;
do {
if (action.type == ACTION_REF && action.commitTimestamp == 0) {
return true;
}
action = action.next;
} while (action != null);
return false;
}
/** eliminate all expired updatedAction in chain */
private RowAction mergeExpiredRefActions() {
if (updatedAction != null) {
updatedAction = updatedAction.mergeExpiredRefActions();
}
if (hasCurrentRefAction()) {
return this;
}
return updatedAction;
}
public synchronized String describe(Session session) {
StringBuilder sb = new StringBuilder();
RowActionBase action = this;
do {
if (action == this) {
sb.append(this.rowId).append(' ');
}
sb.append(action.session.getId()).append(' ');
sb.append(action.type).append(' ').append(action.actionTimestamp);
sb.append(' ').append(action.commitTimestamp);
if (action.commitTimestamp != 0) {
if (action.rolledback) {
sb.append('r');
} else {
sb.append('c');
}
}
sb.append(" - ");
action = action.next;
} while (action != null);
return sb.toString();
}
}
| |
package com.ushaqi.zhuishushenqi.widget;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.Path;
import android.graphics.Path.Direction;
import android.graphics.PorterDuff.Mode;
import android.graphics.PorterDuffXfermode;
import android.graphics.RectF;
import android.graphics.drawable.ShapeDrawable;
import android.graphics.drawable.shapes.RoundRectShape;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.view.MotionEvent;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.DecelerateInterpolator;
import android.widget.ImageView;
import com.c.a.D;
import com.c.a.b;
import com.ushaqi.zhuishushenqi.R.styleable;
public class CoverLoadingLayer extends ImageView
{
private b A = new x(this);
private int a = -1308622848;
private float b = -1.0F;
private float c = -1.0F;
private float d = -1.0F;
private float e = -1.0F;
private float f = -1.0F;
private int g;
private float h;
private float i;
private float j;
private com.c.a.w k;
private com.c.a.w l;
private com.c.a.w m;
private float n;
private int o;
private int p;
private y q;
private CoverLoadingLayer.Status r;
private com.c.a.w s;
private float t;
private int u;
private float v;
private D w = new t(this);
private b x = new u(this);
private D y = new v(this);
private D z = new w(this);
public CoverLoadingLayer(Context paramContext)
{
super(paramContext);
a(paramContext, null);
}
public CoverLoadingLayer(Context paramContext, AttributeSet paramAttributeSet)
{
super(paramContext, paramAttributeSet);
a(paramContext, paramAttributeSet);
}
private static int a(float paramFloat)
{
return (int)(360.0F * (paramFloat / 100.0F) - 90.0F);
}
private void a(Context paramContext, AttributeSet paramAttributeSet)
{
if (paramAttributeSet != null)
{
TypedArray localTypedArray = paramContext.obtainStyledAttributes(paramAttributeSet, R.styleable.CoverLoadingLayer);
this.f = localTypedArray.getDimension(1, -1.0F);
localTypedArray.recycle();
}
this.v = 0.0F;
this.n = getResources().getDimension(2131099909);
this.c = getResources().getDimension(2131099908);
this.j = getResources().getDimension(2131099910);
this.i = getResources().getDimension(2131099912);
this.h = getResources().getDimension(2131099911);
this.t = getResources().getDimension(2131099907);
this.u = getResources().getColor(2131427513);
this.e = this.c;
i();
this.k = com.c.a.w.a(new float[] { 0.001F, 1.0F });
int i1 = getResources().getInteger(17694721);
this.k.a(i1);
this.k.a(this.z);
this.k.a(this.x);
this.k.a(new DecelerateInterpolator());
this.l = com.c.a.w.a(new float[] { 1.0F, 0.001F });
this.l.a(i1);
this.l.a(this.w);
this.l.a(this.A);
this.l.a(new AccelerateInterpolator());
}
private boolean a(int paramInt)
{
if (paramInt > 100)
return true;
if (((this.m != null) && (this.m.d())) || (this.r == CoverLoadingLayer.Status.PAUSE))
{
this.p = paramInt;
return true;
}
return false;
}
private float b(float paramFloat)
{
return paramFloat * getResources().getDisplayMetrics().density;
}
private com.c.a.w h()
{
float[] arrayOfFloat = new float[2];
arrayOfFloat[0] = this.n;
arrayOfFloat[1] = (2.0F * this.n);
com.c.a.w localw = com.c.a.w.a(arrayOfFloat);
localw.a(getResources().getInteger(17694721));
localw.a(new AccelerateInterpolator());
localw.a(new r(this));
localw.a(new s(this));
return localw;
}
private void i()
{
this.r = CoverLoadingLayer.Status.NONE;
this.o = 0;
this.p = 0;
this.b = this.n;
this.d = -1.0F;
}
private boolean j()
{
return (this.l != null) && (this.l.d());
}
public final boolean a()
{
return this.o == 100;
}
public final void b()
{
this.r = CoverLoadingLayer.Status.PAUSE;
this.d = this.e;
invalidate();
}
public final void c()
{
i();
invalidate();
}
public final void d()
{
this.r = CoverLoadingLayer.Status.PENDING;
invalidate();
}
public final void e()
{
this.r = CoverLoadingLayer.Status.PREPARE;
invalidate();
}
public final void f()
{
this.r = CoverLoadingLayer.Status.DOWNLOADED;
invalidate();
}
public final boolean g()
{
return (this.s != null) && (this.s.d());
}
protected void onDraw(Canvas paramCanvas)
{
super.onDraw(paramCanvas);
int i1 = getWidth();
getHeight();
if (this.f == -1.0F)
this.f = (0.1F * i1);
if (this.r == CoverLoadingLayer.Status.NONE);
Canvas localCanvas1;
Paint localPaint1;
int i4;
int i5;
Paint localPaint2;
do
{
return;
if (this.r == CoverLoadingLayer.Status.DOWNLOADED)
{
paramCanvas.save();
paramCanvas.translate(getWidth() - this.t, 0.0F);
ShapeDrawable localShapeDrawable3 = new ShapeDrawable(new p(this.f));
localShapeDrawable3.setBounds(0, 0, (int)this.t, (int)this.t);
localShapeDrawable3.getPaint().setColor(this.u);
localShapeDrawable3.draw(paramCanvas);
paramCanvas.restore();
return;
}
new Paint().setColor(-65536);
new Paint().setColor(-16711936);
Bitmap localBitmap1 = Bitmap.createBitmap(getWidth(), getHeight(), Bitmap.Config.ARGB_8888);
localBitmap1.eraseColor(0);
localCanvas1 = new Canvas(localBitmap1);
localPaint1 = new Paint();
localPaint1.setColor(getResources().getColor(17170445));
localPaint1.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR));
localPaint1.setAntiAlias(true);
int i2 = getWidth();
int i3 = getHeight();
localCanvas1.drawColor(this.a);
i4 = i2 / 2;
i5 = i3 / 2;
localCanvas1.drawCircle(i4, i5, this.b, localPaint1);
localPaint2 = new Paint(1);
localPaint2.setAntiAlias(true);
localPaint2.setColor(this.a);
RectF localRectF1 = new RectF(i4 - this.c, i5 - this.c, i4 + this.c, i5 + this.c);
if (this.r == CoverLoadingLayer.Status.PROGRESS)
localCanvas1.drawArc(localRectF1, this.g, 270 - this.g, true, localPaint2);
if (this.f != 0.0F)
{
Path localPath = new Path();
localPath.addRoundRect(new RectF(0.0F, 0.0F, getWidth(), getHeight()), this.f, this.f, Path.Direction.CCW);
paramCanvas.clipPath(localPath);
}
if ((this.r == CoverLoadingLayer.Status.PENDING) || (this.r == CoverLoadingLayer.Status.PREPARE))
{
localCanvas1.drawCircle(getWidth() / 2, getHeight() / 2, this.n, localPaint2);
Paint localPaint3 = new Paint(1);
localPaint3.setColor(getResources().getColor(2131427386));
float f1 = this.n - this.e;
localPaint3.setStrokeWidth(f1);
localPaint3.setStyle(Paint.Style.STROKE);
localCanvas1.drawCircle(getWidth() / 2, getHeight() / 2, this.e + f1 / 2.0F, localPaint3);
localCanvas1.save();
localCanvas1.translate(getWidth() / 2 - b(1.5F), getHeight() / 2 - b(5.0F));
ShapeDrawable localShapeDrawable1 = new ShapeDrawable(new RoundRectShape(new float[] { 1.0F, 1.0F, 1.0F, 1.0F, 0.0F, 0.0F, 0.0F, 0.0F }, null, null));
localShapeDrawable1.setBounds(0, 0, (int)b(2.0F), (int)b(7.0F));
localShapeDrawable1.getPaint().setColor(localPaint3.getColor());
localShapeDrawable1.draw(localCanvas1);
ShapeDrawable localShapeDrawable2 = new ShapeDrawable(new RoundRectShape(new float[] { 0.0F, 0.0F, 1.0F, 1.0F, 1.0F, 1.0F, 0.0F, 0.0F }, null, null));
localShapeDrawable2.setBounds((int)b(2.0F), (int)b(5.0F), (int)b(6.0F), (int)b(7.0F));
localShapeDrawable2.getPaint().setColor(localPaint3.getColor());
localShapeDrawable2.draw(localCanvas1);
localCanvas1.restore();
}
paramCanvas.drawBitmap(localBitmap1, 0.0F, 0.0F, null);
}
while ((this.r != CoverLoadingLayer.Status.PAUSE) || (2.0F * this.d <= 1.0F));
localCanvas1.drawCircle(i4, i5, this.d, localPaint1);
Bitmap localBitmap2 = Bitmap.createBitmap(getWidth(), getHeight(), Bitmap.Config.ARGB_8888);
Canvas localCanvas2 = new Canvas(localBitmap2);
localCanvas2.drawCircle(i4, i5, this.d, localPaint2);
Paint localPaint4 = new Paint();
localPaint4.setColor(-1);
int i6 = (int)(i4 - this.j / 2.0F - this.i / 2.0F);
RectF localRectF2 = new RectF();
localRectF2.left = (i6 - this.i / 2.0F);
localRectF2.right = (i6 + this.i / 2.0F);
localRectF2.top = (i5 - this.h / 2.0F);
localRectF2.bottom = (i5 + this.h / 2.0F);
localCanvas2.drawRoundRect(localRectF2, 1.0F, 1.0F, localPaint4);
int i7 = (int)(i4 + this.j / 2.0F + this.i / 2.0F);
RectF localRectF3 = new RectF();
localRectF3.left = (i7 - this.i / 2.0F);
localRectF3.right = (i7 + this.i / 2.0F);
localRectF3.top = (i5 - this.h / 2.0F);
localRectF3.bottom = (i5 + this.h / 2.0F);
localCanvas2.drawRoundRect(localRectF3, 1.0F, 1.0F, localPaint4);
paramCanvas.drawBitmap(localBitmap2, 0.0F, 0.0F, null);
}
public boolean onTouchEvent(MotionEvent paramMotionEvent)
{
int i2;
if (paramMotionEvent.getAction() == 1)
{
int i1;
if ((!g()) && (!j()))
{
if ((this.m == null) || (!this.m.d()))
break label85;
i2 = 1;
i1 = 0;
if (i2 == 0);
}
else
{
i1 = 1;
}
if (i1 == 0)
{
if (this.r != CoverLoadingLayer.Status.PROGRESS)
break label90;
b();
if (this.q != null)
this.q.a();
}
}
label85: label90:
do
{
do
{
do
{
return true;
i2 = 0;
break;
if (this.r != CoverLoadingLayer.Status.PAUSE)
break label134;
this.d = 0.0F;
this.r = CoverLoadingLayer.Status.PROGRESS;
invalidate();
}
while (this.q == null);
this.q.b();
return true;
}
while (this.r != CoverLoadingLayer.Status.PENDING);
b();
}
while (this.q == null);
label134: this.q.c();
return true;
}
public void setCornerRadius(float paramFloat)
{
this.f = paramFloat;
}
public void setCoverListener(y paramy)
{
this.q = paramy;
}
public void setProgress(int paramInt)
{
int i1 = 1;
if (a(paramInt));
while (true)
{
return;
if (paramInt < this.o)
{
clearAnimation();
if (a(paramInt))
continue;
this.o = paramInt;
this.g = a(this.o);
this.r = CoverLoadingLayer.Status.PROGRESS;
invalidate();
if (!a())
continue;
h().a();
return;
}
this.r = CoverLoadingLayer.Status.PROGRESS;
int i2 = this.o;
this.o = paramInt;
if (this.m != null)
this.m.b();
this.d = 0.0F;
int[] arrayOfInt = new int[2];
arrayOfInt[0] = a(i2);
arrayOfInt[i1] = a(paramInt);
com.c.a.w localw = com.c.a.w.a(arrayOfInt);
localw.a(300L);
localw.a(this.y);
localw.a(new q(this));
this.m = localw;
if ((this.k != null) && (this.k.d()));
while ((i1 == 0) && (!j()))
{
this.m.a();
return;
i1 = 0;
}
}
}
}
/* Location: E:\Progs\Dev\Android\Decompile\apktool\zssq\zssq-dex2jar.jar
* Qualified Name: com.ushaqi.zhuishushenqi.widget.CoverLoadingLayer
* JD-Core Version: 0.6.0
*/
| |
/*
* Copyright 2015 Adaptris Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.adaptris.mail;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import javax.mail.MessagingException;
import javax.mail.Session;
import javax.mail.URLName;
import javax.mail.internet.MimeMessage;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.input.ReaderInputStream;
import org.apache.commons.net.pop3.POP3Client;
import org.apache.commons.net.pop3.POP3MessageInfo;
abstract class ApacheMailClient<T extends POP3Client> extends MailClientImp {
private transient URLName mailboxUrl;
private transient ApacheClientConfig clientConfig;
private transient T pop3;
private transient Map<String, Integer> collectedMessages;
private transient Set<String> messagesToDelete;
ApacheMailClient() {
super();
}
public ApacheMailClient(URLName url, ApacheClientConfig configurator) {
this();
mailboxUrl = url;
this.clientConfig = configurator;
}
@Override
public void connectLocal() throws MailException {
try {
pop3 = createClient();
clientConfig.preConnectConfigure(pop3);
pop3.connect(mailboxUrl.getHost(), mailboxUrl.getPort());
postConnectAction(pop3);
clientConfig.postConnectConfigure(pop3);
if (!pop3.login(mailboxUrl.getUsername(), mailboxUrl.getPassword())) {
throw new Exception("Could not login to server, check username/password.");
}
}
catch (Exception e) {
disconnectLocal();
rethrowMailException(e);
}
finally {
collectedMessages = new HashMap<>();
messagesToDelete = new LinkedHashSet<>();
}
}
abstract T createClient() throws MailException;
abstract void postConnectAction(T client) throws MailException, IOException;
@Override
public ArrayList<MimeMessage> collectMessages() throws MailException {
ArrayList<MimeMessage> result = new ArrayList<MimeMessage>();
try {
POP3MessageInfo[] pop3messages = pop3.listMessages();
Session session = Session.getDefaultInstance(new Properties(), null);
// Convert messages to MimeMessages
if (pop3messages != null && pop3messages.length > 0) {
for (POP3MessageInfo message : pop3messages) {
StringWriter writer = new StringWriter();
BufferedReader bufferedReader = null;
ReaderInputStream mimeMessageInput = null;
try {
Reader reader = pop3.retrieveMessage(message.number);
if (reader == null) {
throw new Exception("Could not retrieve message header.");
}
MimeMessage mimeMsg = createMimeMessage(session, reader);
log.trace("Parsing message [{}] (msgNum={})", mimeMsg.getMessageID(), message.number);
result.add(mimeMsg);
collectedMessages.put(mimeMsg.getMessageID(), message.number);
}
finally {
IOUtils.closeQuietly(mimeMessageInput);
IOUtils.closeQuietly(bufferedReader);
IOUtils.closeQuietly(writer);
}
}
}
}
catch (Exception e) {
throw new MailException(e);
}
return result;
}
private MimeMessage createMimeMessage(Session session, Reader src) throws IOException, MessagingException {
MimeMessage result = null;
StringWriter writer = new StringWriter();
try (BufferedReader bufferedReader = new BufferedReader(src)) {
IOUtils.copy(bufferedReader, writer);
}
try (InputStream mimeMessageInput = IOUtils.toInputStream(writer.toString())) {
result = new MimeMessage(session, mimeMessageInput);
}
return result;
}
@Override
public void disconnectLocal() {
if (clientConnected()) {
deleteQuietly();
disconnectQuietly();
}
collectedMessages = new HashMap<>();
messagesToDelete = new LinkedHashSet<>();
pop3 = null;
}
private void deleteQuietly() {
// Delete messages
if (deleteFlag) {
for (String msgId : messagesToDelete) {
try {
Integer msgNum = collectedMessages.get(msgId);
log.warn("Deleting [{}] (msgNum={})", msgId, msgNum);
if (msgNum != null) {
pop3.deleteMessage(msgNum.intValue());
}
}
catch (Exception e) {
}
}
}
}
private void disconnectQuietly() {
try {
pop3.logout();
pop3.disconnect();
}
catch (Exception e) {
}
}
private boolean clientConnected() {
return pop3 != null && pop3.isConnected();
}
@Override
// Messages marked as read (as in they match the filter) will be added to the list of messages to delete.
public void setMessageRead(MimeMessage msg) throws MailException {
try {
messagesToDelete.add(msg.getMessageID());
}
catch (MessagingException e) {
rethrowMailException(e);
}
}
@Override
public void resetMessage(MimeMessage msg) throws MailException {
try {
messagesToDelete.remove(msg.getMessageID());
}
catch (MessagingException e) {
rethrowMailException(e);
}
}
static void rethrowMailException(Throwable e) throws MailException {
rethrowMailException(e.getMessage(), e);
}
static void rethrowMailException(String msg, Throwable e) throws MailException {
if (e instanceof MailException) {
throw (MailException) e;
}
throw new MailException(msg, e);
}
}
| |
package org.apache.solr.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.JSONTestUtil;
import org.apache.solr.SolrJettyTestBase;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.MultiMapSolrParams;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.servlet.SolrRequestParsers;
import org.eclipse.jetty.servlet.ServletHolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
import javax.xml.xpath.XPathExpressionException;
import java.io.IOException;
import java.util.Map;
import java.util.SortedMap;
abstract public class RestTestBase extends SolrJettyTestBase {
private static final Logger log = LoggerFactory.getLogger(RestTestBase.class);
protected static RestTestHarness restTestHarness;
public static void createJettyAndHarness
(String solrHome, String configFile, String schemaFile, String context,
boolean stopAtShutdown, SortedMap<ServletHolder,String> extraServlets) throws Exception {
createJetty(solrHome, configFile, schemaFile, context, stopAtShutdown, extraServlets);
restTestHarness = new RestTestHarness(new RESTfulServerProvider() {
@Override
public String getBaseURL() {
return jetty.getBaseUrl().toString();
}
});
}
/** Validates an update XML String is successful
*/
public static void assertU(String update) {
assertU(null, update);
}
/** Validates an update XML String is successful
*/
public static void assertU(String message, String update) {
checkUpdateU(message, update, true);
}
/** Validates an update XML String failed
*/
public static void assertFailedU(String update) {
assertFailedU(null, update);
}
/** Validates an update XML String failed
*/
public static void assertFailedU(String message, String update) {
checkUpdateU(message, update, false);
}
/** Checks the success or failure of an update message
*/
private static void checkUpdateU(String message, String update, boolean shouldSucceed) {
try {
String m = (null == message) ? "" : message + " ";
if (shouldSucceed) {
String response = restTestHarness.validateUpdate(update);
if (response != null) fail(m + "update was not successful: " + response);
} else {
String response = restTestHarness.validateErrorUpdate(update);
if (response != null) fail(m + "update succeeded, but should have failed: " + response);
}
} catch (SAXException e) {
throw new RuntimeException("Invalid XML", e);
}
}
/**
* Validates a query matches some XPath test expressions
*
* @param request a URL path with optional query params, e.g. "/schema/fields?fl=id,_version_"
*/
public static void assertQ(String request, String... tests) {
try {
int queryStartPos = request.indexOf('?');
String query;
String path;
if (-1 == queryStartPos) {
query = "";
path = request;
} else {
query = request.substring(queryStartPos + 1);
path = request.substring(0, queryStartPos);
}
if ( ! query.matches(".*wt=schema\\.xml.*")) { // don't overwrite wt=schema.xml
query = setParam(query, "wt", "xml");
}
request = path + '?' + setParam(query, "indent", "on");
String response = restTestHarness.query(request);
// TODO: should the facet handling below be converted to parse the URL?
/*
if (req.getParams().getBool("facet", false)) {
// add a test to ensure that faceting did not throw an exception
// internally, where it would be added to facet_counts/exception
String[] allTests = new String[tests.length+1];
System.arraycopy(tests,0,allTests,1,tests.length);
allTests[0] = "*[count(//lst[@name='facet_counts']/*[@name='exception'])=0]";
tests = allTests;
}
*/
String results = restTestHarness.validateXPath(response, tests);
if (null != results) {
String msg = "REQUEST FAILED: xpath=" + results
+ "\n\txml response was: " + response
+ "\n\trequest was:" + request;
log.error(msg);
throw new RuntimeException(msg);
}
} catch (XPathExpressionException e1) {
throw new RuntimeException("XPath is invalid", e1);
} catch (Exception e2) {
SolrException.log(log, "REQUEST FAILED: " + request, e2);
throw new RuntimeException("Exception during query", e2);
}
}
/**
* Makes a query request and returns the JSON string response
*
* @param request a URL path with optional query params, e.g. "/schema/fields?fl=id,_version_"
*/
public static String JQ(String request) throws Exception {
int queryStartPos = request.indexOf('?');
String query;
String path;
if (-1 == queryStartPos) {
query = "";
path = request;
} else {
query = request.substring(queryStartPos + 1);
path = request.substring(0, queryStartPos);
}
query = setParam(query, "wt", "json");
request = path + '?' + setParam(query, "indent", "on");
String response;
boolean failed=true;
try {
response = restTestHarness.query(request);
failed = false;
} finally {
if (failed) {
log.error("REQUEST FAILED: " + request);
}
}
return response;
}
/**
* Validates a query matches some JSON test expressions using the default double delta tolerance.
* @see org.apache.solr.JSONTestUtil#DEFAULT_DELTA
* @see #assertJQ(String,double,String...)
*/
public static void assertJQ(String request, String... tests) throws Exception {
assertJQ(request, JSONTestUtil.DEFAULT_DELTA, tests);
}
/**
* Validates a query matches some JSON test expressions and closes the
* query. The text expression is of the form path:JSON. To facilitate
* easy embedding in Java strings, the JSON can have double quotes
* replaced with single quotes.
* <p>
* Please use this with care: this makes it easy to match complete
* structures, but doing so can result in fragile tests if you are
* matching more than what you want to test.
* </p>
* @param request a URL path with optional query params, e.g. "/schema/fields?fl=id,_version_"
* @param delta tolerance allowed in comparing float/double values
* @param tests JSON path expression + '==' + expected value
*/
public static void assertJQ(String request, double delta, String... tests) throws Exception {
int queryStartPos = request.indexOf('?');
String query;
String path;
if (-1 == queryStartPos) {
query = "";
path = request;
} else {
query = request.substring(queryStartPos + 1);
path = request.substring(0, queryStartPos);
}
query = setParam(query, "wt", "json");
request = path + '?' + setParam(query, "indent", "on");
String response;
boolean failed = true;
try {
response = restTestHarness.query(request);
failed = false;
} finally {
if (failed) {
log.error("REQUEST FAILED: " + request);
}
}
for (String test : tests) {
if (null == test || 0 == test.length()) continue;
String testJSON = test.replace('\'', '"');
try {
failed = true;
String err = JSONTestUtil.match(response, testJSON, delta);
failed = false;
if (err != null) {
log.error("query failed JSON validation. error=" + err +
"\n expected =" + testJSON +
"\n response = " + response +
"\n request = " + request + "\n"
);
throw new RuntimeException(err);
}
} finally {
if (failed) {
log.error("JSON query validation threw an exception." +
"\n expected =" + testJSON +
"\n response = " + response +
"\n request = " + request + "\n"
);
}
}
}
}
/**
* Insures that the given param is included in the query with the given value.
*
* <ol>
* <li>If the param is already included with the given value, the request is returned unchanged.</li>
* <li>If the param is not already included, it is added with the given value.</li>
* <li>If the param is already included, but with a different value, the value is replaced with the given value.</li>
* <li>If the param is already included multiple times, they are replaced with a single param with given value.</li>
* </ol>
*
* The passed-in valueToSet should NOT be URL encoded, as it will be URL encoded by this method.
*
* @param query The query portion of a request URL, e.g. "wt=json&indent=on&fl=id,_version_"
* @param paramToSet The parameter name to insure the presence of in the returned request
* @param valueToSet The parameter value to insure in the returned request
* @return The query with the given param set to the given value
*/
private static String setParam(String query, String paramToSet, String valueToSet) {
if (null == valueToSet) {
valueToSet = "";
}
try {
StringBuilder builder = new StringBuilder();
if (null == query || query.trim().isEmpty()) {
// empty query -> return "paramToSet=valueToSet"
builder.append(paramToSet);
builder.append('=');
StrUtils.partialURLEncodeVal(builder, valueToSet);
return builder.toString();
}
MultiMapSolrParams requestParams = SolrRequestParsers.parseQueryString(query);
String[] values = requestParams.getParams(paramToSet);
if (null == values) {
// paramToSet isn't present in the request -> append "¶mToSet=valueToSet"
builder.append(query);
builder.append('&');
builder.append(paramToSet);
builder.append('=');
StrUtils.partialURLEncodeVal(builder, valueToSet);
return builder.toString();
}
if (1 == values.length && valueToSet.equals(values[0])) {
// paramToSet=valueToSet is already in the query - just return the query as-is.
return query;
}
// More than one value for paramToSet on the request, or paramToSet's value is not valueToSet
// -> rebuild the query
boolean isFirst = true;
for (Map.Entry<String,String[]> entry : requestParams.getMap().entrySet()) {
String key = entry.getKey();
String[] valarr = entry.getValue();
if ( ! key.equals(paramToSet)) {
for (String val : valarr) {
builder.append(isFirst ? "" : '&');
isFirst = false;
builder.append(key);
builder.append('=');
StrUtils.partialURLEncodeVal(builder, null == val ? "" : val);
}
}
}
builder.append(isFirst ? "" : '&');
builder.append(paramToSet);
builder.append('=');
StrUtils.partialURLEncodeVal(builder, valueToSet);
return builder.toString();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.delete;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction;
import org.elasticsearch.action.delete.index.IndexDeleteRequest;
import org.elasticsearch.action.delete.index.IndexDeleteResponse;
import org.elasticsearch.action.delete.index.ShardDeleteResponse;
import org.elasticsearch.action.delete.index.TransportIndexDeleteAction;
import org.elasticsearch.action.support.replication.TransportShardReplicationOperationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.service.IndexShard;
import org.elasticsearch.indices.IndexAlreadyExistsException;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
/**
* Performs the delete operation.
*/
public class TransportDeleteAction extends TransportShardReplicationOperationAction<DeleteRequest, DeleteRequest, DeleteResponse> {
private final boolean autoCreateIndex;
private final TransportCreateIndexAction createIndexAction;
private final TransportIndexDeleteAction indexDeleteAction;
@Inject
public TransportDeleteAction(Settings settings, TransportService transportService, ClusterService clusterService,
IndicesService indicesService, ThreadPool threadPool, ShardStateAction shardStateAction,
TransportCreateIndexAction createIndexAction, TransportIndexDeleteAction indexDeleteAction) {
super(settings, transportService, clusterService, indicesService, threadPool, shardStateAction);
this.createIndexAction = createIndexAction;
this.indexDeleteAction = indexDeleteAction;
this.autoCreateIndex = settings.getAsBoolean("action.auto_create_index", true);
}
@Override
protected String executor() {
return ThreadPool.Names.INDEX;
}
@Override
protected void doExecute(final DeleteRequest request, final ActionListener<DeleteResponse> listener) {
if (autoCreateIndex && !clusterService.state().metaData().hasConcreteIndex(request.index())) {
request.beforeLocalFork();
createIndexAction.execute(new CreateIndexRequest(request.index()).cause("auto(delete api)").masterNodeTimeout(request.timeout()), new ActionListener<CreateIndexResponse>() {
@Override
public void onResponse(CreateIndexResponse result) {
innerExecute(request, listener);
}
@Override
public void onFailure(Throwable e) {
if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) {
// we have the index, do it
innerExecute(request, listener);
} else {
listener.onFailure(e);
}
}
});
} else {
innerExecute(request, listener);
}
}
@Override
protected boolean resolveRequest(final ClusterState state, final DeleteRequest request, final ActionListener<DeleteResponse> listener) {
request.routing(state.metaData().resolveIndexRouting(request.routing(), request.index()));
request.index(state.metaData().concreteIndex(request.index()));
if (state.metaData().hasIndex(request.index())) {
// check if routing is required, if so, do a broadcast delete
MappingMetaData mappingMd = state.metaData().index(request.index()).mappingOrDefault(request.type());
if (mappingMd != null && mappingMd.routing().required()) {
if (request.routing() == null) {
indexDeleteAction.execute(new IndexDeleteRequest(request.index(), request.type(), request.id()), new ActionListener<IndexDeleteResponse>() {
@Override
public void onResponse(IndexDeleteResponse indexDeleteResponse) {
// go over the response, see if we have found one, and the version if found
long version = 0;
boolean found = false;
for (ShardDeleteResponse deleteResponse : indexDeleteResponse.responses()) {
if (!deleteResponse.notFound()) {
found = true;
version = deleteResponse.version();
break;
}
}
listener.onResponse(new DeleteResponse(request.index(), request.type(), request.id(), version, !found));
}
@Override
public void onFailure(Throwable e) {
listener.onFailure(e);
}
});
return false;
}
}
}
return true;
}
private void innerExecute(final DeleteRequest request, final ActionListener<DeleteResponse> listener) {
super.doExecute(request, listener);
}
@Override
protected boolean checkWriteConsistency() {
return true;
}
@Override
protected DeleteRequest newRequestInstance() {
return new DeleteRequest();
}
@Override
protected DeleteRequest newReplicaRequestInstance() {
return new DeleteRequest();
}
@Override
protected DeleteResponse newResponseInstance() {
return new DeleteResponse();
}
@Override
protected String transportAction() {
return DeleteAction.NAME;
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state, DeleteRequest request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.WRITE);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, DeleteRequest request) {
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, request.index());
}
@Override
protected PrimaryResponse<DeleteResponse, DeleteRequest> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) {
DeleteRequest request = shardRequest.request;
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.request.index()).shardSafe(shardRequest.shardId);
Engine.Delete delete = indexShard.prepareDelete(request.type(), request.id(), request.version())
.versionType(request.versionType())
.origin(Engine.Operation.Origin.PRIMARY);
indexShard.delete(delete);
// update the request with teh version so it will go to the replicas
request.version(delete.version());
if (request.refresh()) {
try {
indexShard.refresh(new Engine.Refresh(false));
} catch (Exception e) {
// ignore
}
}
DeleteResponse response = new DeleteResponse(request.index(), request.type(), request.id(), delete.version(), delete.notFound());
return new PrimaryResponse<DeleteResponse, DeleteRequest>(shardRequest.request, response, null);
}
@Override
protected void shardOperationOnReplica(ReplicaOperationRequest shardRequest) {
DeleteRequest request = shardRequest.request;
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.request.index()).shardSafe(shardRequest.shardId);
Engine.Delete delete = indexShard.prepareDelete(request.type(), request.id(), request.version())
.origin(Engine.Operation.Origin.REPLICA);
indexShard.delete(delete);
if (request.refresh()) {
try {
indexShard.refresh(new Engine.Refresh(false));
} catch (Exception e) {
// ignore
}
}
}
@Override
protected ShardIterator shards(ClusterState clusterState, DeleteRequest request) {
return clusterService.operationRouting()
.deleteShards(clusterService.state(), request.index(), request.type(), request.id(), request.routing());
}
}
| |
package io.github.ibuildthecloud.dstack.engine.process.log;
import static io.github.ibuildthecloud.dstack.util.time.TimeUtils.*;
import io.github.ibuildthecloud.dstack.engine.process.ExitReason;
import io.github.ibuildthecloud.dstack.engine.process.ProcessStateTransition;
import io.github.ibuildthecloud.dstack.util.type.Named;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
public class ProcessExecutionLog extends AbstractParentLog implements ParentLog {
String id = UUID.randomUUID().toString();
long startTime;
String processName;
Long stopTime;
String processLock;
Long lockAcquireStart;
Long lockAcquired;
Long lockAcquireEnd;
Long lockAcquireFailed;
String failedToAcquireLock;
Long lockHoldTime;
String processingServerId;
String resourceType;
String resourceId;
Long processId;
List<ProcessStateTransition> transitions = new ArrayList<ProcessStateTransition>();
List<ProcessLogicExecutionLog> processHandlerExecutions = new ArrayList<ProcessLogicExecutionLog>();
ExceptionLog exception;
// List<ProcessLogicExecutionLog> handlerExecutions = new ArrayList<ProcessLogicExecutionLog>();
// List<ProcessExecutionLog> executions = new ArrayList<ProcessExecutionLog>();
ExitReason exitReason;
public ExitReason exit(ExitReason reason) {
this.stopTime = System.currentTimeMillis();
this.exitReason = reason;
return reason;
}
public void close() {
if ( processLock != null && lockAcquired != null && lockAcquireEnd != null ) {
lockHoldTime = lockAcquireEnd - lockAcquired;
}
}
public ProcessLogicExecutionLog newProcessLogicExecution(Named handler) {
if ( handler == null ) {
return new ProcessLogicExecutionLog();
}
ProcessLogicExecutionLog execution = new ProcessLogicExecutionLog();
execution.setStartTime(now());
execution.setName(handler.getName());
processHandlerExecutions.add(execution);
return execution;
}
/* Standard Accessors below */
public Long getStartTime() {
return startTime;
}
public ExitReason getExitReason() {
return exitReason;
}
public void setExitReason(ExitReason exitReason) {
this.exitReason = exitReason;
}
public Long getStopTime() {
return stopTime;
}
public void setStopTime(Long stopTime) {
this.stopTime = stopTime;
}
public String getProcessLock() {
return processLock;
}
public void setProcessLock(String processLock) {
this.processLock = processLock;
}
public Long getLockAcquireStart() {
return lockAcquireStart;
}
public void setLockAcquireStart(Long lockAcquireStart) {
this.lockAcquireStart = lockAcquireStart;
}
public Long getLockAcquired() {
return lockAcquired;
}
public void setLockAcquired(Long lockAcquired) {
this.lockAcquired = lockAcquired;
}
public Long getLockAcquireEnd() {
return lockAcquireEnd;
}
public void setLockAcquireEnd(Long lockAcquireEnd) {
this.lockAcquireEnd = lockAcquireEnd;
}
public String getProcessingServerId() {
return processingServerId;
}
public void setProcessingServerId(String processingServerId) {
this.processingServerId = processingServerId;
}
public Long getLockHoldTime() {
return lockHoldTime;
}
public void setLockHoldTime(Long lockHoldTime) {
this.lockHoldTime = lockHoldTime;
}
public List<ProcessStateTransition> getTransitions() {
return transitions;
}
public void setTransitions(List<ProcessStateTransition> transitions) {
this.transitions = transitions;
}
public Long getLockAcquireFailed() {
return lockAcquireFailed;
}
public void setLockAcquireFailed(Long lockAcquireFailed) {
this.lockAcquireFailed = lockAcquireFailed;
}
public List<ProcessLogicExecutionLog> getProcessHandlerExecutions() {
return processHandlerExecutions;
}
public void setProcessHandlerExecutions(List<ProcessLogicExecutionLog> processHandlerExecutions) {
this.processHandlerExecutions = processHandlerExecutions;
}
public ExceptionLog getException() {
return exception;
}
public void setException(ExceptionLog exception) {
this.exception = exception;
}
public String getFailedToAcquireLock() {
return failedToAcquireLock;
}
public void setFailedToAcquireLock(String failedToAcquireLock) {
this.failedToAcquireLock = failedToAcquireLock;
}
public void setStartTime(long startTime) {
this.startTime = startTime;
}
public String getProcessName() {
return processName;
}
public void setProcessName(String processName) {
this.processName = processName;
}
public String getResourceType() {
return resourceType;
}
public void setResourceType(String resourceType) {
this.resourceType = resourceType;
}
public String getResourceId() {
return resourceId;
}
public void setResourceId(String resourceId) {
this.resourceId = resourceId;
}
@Override
public String getName() {
return processName;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public Long getProcessId() {
return processId;
}
public void setProcessId(Long processId) {
this.processId = processId;
}
}
| |
package bsh;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import static bsh.TestUtil.eval;
import static bsh.TestUtil.script;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.arrayContaining;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import org.junit.Rule;
@RunWith(FilteredTestRunner.class)
public class EnumTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Test
public void enum_with_no_constants() throws Exception {
Class<?> clas = (Class<?>) eval(
"enum E0 { }",
"E0.class;"
);
assertTrue("VAL instance of Enum", Enum.class.isAssignableFrom(clas));
}
@Test
public void enum_with_single_constant() throws Exception {
Object obj = eval(
"enum E1 {",
"VAL",
"}",
"E1.VAL;"
);
assertThat("VAL instance of Enum", obj, instanceOf(Enum.class));
assertThat("VAL string value", obj.toString(), equalTo("VAL"));
}
@Test
public void enum_with_two_constant() throws Exception {
Object obj = eval(
"enum E2 {",
"VAL1, VAL2",
"}",
"E2.VAL1;"
);
assertThat("VAL1 instance of Enum", obj, instanceOf(Enum.class));
assertThat("VAL1 string value", obj.toString(), equalTo("VAL1"));
}
@Test
public void enum_with_multiple_constant() throws Exception {
Object obj = eval(
"enum E3 {",
"VAL1, VAL2, VAL3, VAL4",
"}",
"E3.VAL3;"
);
assertThat("VAL3 instance of Enum", obj, instanceOf(Enum.class));
assertThat("VAL3 string value", obj.toString(), equalTo("VAL3"));
}
@Test
public void enum_with_more_than_6_constant() throws Exception {
Object obj = eval(
"enum E8 {",
"VAL1, VAL2, VAL3, VAL4, VAL5, VAL6, VAL7, VAL8",
"}",
"E8.VAL7;"
);
assertThat("VAL3 instance of Enum", obj, instanceOf(Enum.class));
assertThat("VAL3 string value", obj.toString(), equalTo("VAL7"));
}
@Test
public void enum_values_zero() throws Exception {
Object[] obj = (Object[]) eval(
"enum E0 { }",
"E0.values();"
);
assertThat("array with length length is 0", obj, arrayWithSize(0));
}
@Test
public void enum_values() throws Exception {
final Interpreter bsh = new Interpreter();
Object[] obj = (Object[]) bsh.eval(script(
"enum E4 {",
"VAL1, VAL2, VAL3, VAL4",
"}",
"E4.values();"
));
assertThat("array with length length is 4", obj, arrayWithSize(4));
assertThat("array containing VAL1, VAL2, VAL3, VAL4", obj, arrayContaining(
bsh.eval("E4.VAL1"), bsh.eval("E4.VAL2"),
bsh.eval("E4.VAL3"), bsh.eval("E4.VAL4")));
}
@Test
public void enum_values_excludes_enum_field() throws Exception {
final Interpreter bsh = new Interpreter();
Object[] obj = (Object[]) bsh.eval(script(
"enum E4 {",
"VAL1, VAL2, VAL3, VAL4;",
"E4 enm;",
"}",
"E4.values();"
));
assertThat("array with length length is 4", obj, arrayWithSize(4));
assertThat("array containing VAL1, VAL2, VAL3, VAL4", obj, arrayContaining(
bsh.eval("E4.VAL1"), bsh.eval("E4.VAL2"),
bsh.eval("E4.VAL3"), bsh.eval("E4.VAL4")));
}
@Test
public void enum_values_excludes_enum_static_field() throws Exception {
final Interpreter bsh = new Interpreter();
Object[] obj = (Object[]) bsh.eval(script(
"enum E4 {",
"VAL1, VAL2, VAL3, VAL4;",
"static E4 enm = E4.VAL1;",
"}",
"E4.values();"
));
assertThat("array with length length is 4", obj, arrayWithSize(4));
assertThat("array containing VAL1, VAL2, VAL3, VAL4", obj, arrayContaining(
bsh.eval("E4.VAL1"), bsh.eval("E4.VAL2"),
bsh.eval("E4.VAL3"), bsh.eval("E4.VAL4")));
}
@Test
public void enum_values_excludes_enum_static_private_field() throws Exception {
final Interpreter bsh = new Interpreter();
Object[] obj = (Object[]) bsh.eval(script(
"enum E4 {",
"VAL1, VAL2, VAL3, VAL4;",
"private static final E4 enm = E4.VAL1;",
"}",
"E4.values();"
));
assertThat("array with length length is 4", obj, arrayWithSize(4));
assertThat("array containing VAL1, VAL2, VAL3, VAL4", obj, arrayContaining(
bsh.eval("E4.VAL1"), bsh.eval("E4.VAL2"),
bsh.eval("E4.VAL3"), bsh.eval("E4.VAL4")));
}
@Test
public void enum_values_length() throws Exception {
Object obj = eval(
"enum E4 {",
"VAL1, VAL2, VAL3, VAL4",
"}",
"E4.values().length;"
);
assertThat("values length is 4", obj, equalTo(4));
}
@Test
public void enum_valueOf() throws Exception {
Object obj = eval(
"enum E5 {",
"VAL1, VAL2, VAL3, VAL4",
"}",
"E5.valueOf('VAL3');"
);
assertThat("VAL3 instance of Enum", obj, instanceOf(Enum.class));
assertThat("VAL3 string value", obj.toString(), equalTo("VAL3"));
}
@Test
public void enum_assign() throws Exception {
Object obj = eval(
"enum E6 {",
"VAL1, VAL2, VAL3, VAL4",
"}",
"val3 = E6.VAL3;",
"val3;"
);
assertThat("VAL3 instance of Enum", obj, instanceOf(Enum.class));
assertThat("VAL3 string value", obj.toString(), equalTo("VAL3"));
}
@Test
public void enum_equals() throws Exception {
Object obj = eval(
"enum E6 {",
"VAL1, VAL2, VAL3, VAL4",
"}",
"val3 = E6.VAL3;",
"val3 == E6.VAL3;"
);
assertThat("VAL3 == val3", obj, equalTo(true));
}
@Test
public void enum_with_static_field() throws Exception {
Object obj = eval(
"enum E7 {",
"VAL1, VAL2, VAL3, VAL4;",
"static int val = 5;",
"}",
"E7.val;"
);
assertThat("static int value", obj, equalTo(5));
}
@Test
public void enum_with_intance_field() throws Exception {
Object obj = eval(
"enum E8 {",
"VAL1, VAL2, VAL3, VAL4;",
"int val = 5;",
"}",
"E8.VAL2.val;"
);
assertThat("static int value", obj, equalTo(5));
}
@Test
public void enum_with_static_method() throws Exception {
Object obj = eval(
"enum E9 {",
"VAL1, VAL2, VAL3, VAL4;",
"static int val = 5;",
"static int getVal() { val; }",
"}",
"E9.getVal();"
);
assertThat("static int value", obj, equalTo(5));
}
@Test
public void enum_switch() throws Exception {
final Interpreter bsh = new Interpreter();
bsh.eval(script(
"enum Name { VAL1, VAL2 }",
"switchit(val) {",
"switch (val) {",
"case VAL1:",
"return 'val1';",
"break;",
"case VAL2:",
"return 'val2';",
"break;",
"default:",
"return 'default';",
"}",
"}"
));
assertThat("val2 switched", bsh.eval("switchit(Name.VAL2);"), equalTo("val2"));
assertThat("val1 switched", bsh.eval("switchit(Name.VAL1);"), equalTo("val1"));
assertThat("default switched null", bsh.eval("switchit(null);"), equalTo("default"));
assertThat("default switched string", bsh.eval("switchit('VAL1');"), equalTo("default"));
}
@Test
public void enum_args_constructor_required() throws Exception {
thrown.expect(EvalError.class);
thrown.expectMessage(containsString("Can't find constructor: Name(int)"));
eval(
"enum Name {",
"VAL1(1), VAL2(2);",
"int val;",
"}",
"Name.VAL1.val;"
);
}
@Test
public void enum_new_enum_default_constructor() throws Exception {
thrown.expect(EvalError.class);
thrown.expectMessage(containsString("Can't find default constructor for: class Name"));
eval(
"enum Name {",
"VAL1, VAL2",
"}",
"new Name();"
);
}
@Test
public void enum_new_enum_default_enum_constructor() throws Exception {
thrown.expect(EvalError.class);
thrown.expectMessage(
containsString("Can't find constructor: Name(String)"));
eval(
"enum Name {",
"VAL1, VAL2",
"}",
"new Name('VAL3');",
// with accessibility we can use the default private constructor
"//new Name('VAL3',3);"
);
}
@Test
public void enum_args_constructor() throws Exception {
Object obj = eval(
"enum Name {",
"VAL1(1), VAL2(2);",
"int val;",
"private Name(int a) {",
"val = a;",
"}",
"}",
"Name.VAL1.val;"
);
assertThat("enum args constructor set value", obj, equalTo(1));
}
@Test
public void enum_args_constructor_multi() throws Exception {
final Interpreter bsh = new Interpreter();
bsh.eval(script(
"enum Name {",
"VAL1(1, 1.0, 'v1'), VAL2(2, 2.0, 'v2');",
"int i;",
"double d;",
"String s;",
"Name(int i, double d, String s) {",
"this.i = i;",
"this.d = d;",
"this.s = s;",
"}",
"}"
));
assertThat("enum args VAL1 constructor set value i", bsh.eval("Name.VAL1.i"), equalTo(1));
assertThat("enum args VAL1 constructor set value d", bsh.eval("Name.VAL1.d"), equalTo(1.0));
assertThat("enum args VAL1 constructor set value s", bsh.eval("Name.VAL1.s"), equalTo("v1"));
assertThat("enum args VAL2 constructor set value i", bsh.eval("Name.VAL2.i"), equalTo(2));
assertThat("enum args VAL2 constructor set value d", bsh.eval("Name.VAL2.d"), equalTo(2.0));
assertThat("enum args VAL2 constructor set value s", bsh.eval("Name.VAL2.s"), equalTo("v2"));
}
@Test
public void enum_implements_interface_constants() throws Exception {
Object obj = eval(
"interface AA {",
"AB=99;",
"}",
"enum Name implements AA {",
"VAL1, VAL2",
"}",
"Name.AB;"
);
assertThat("interface inherited constant", obj, equalTo(99));
}
@Test
public void enum_implements_interface_default_method() throws Exception {
Object obj = eval(
"interface AA {",
"default int def(a) { a; }",
"}",
"enum Name implements AA {",
"VAL1, VAL2;",
"}",
"Name.VAL2.def(4);"
);
assertThat("interface inherited default method", obj, equalTo(4));
}
@Test
public void enum_implements_interface_static_method() throws Exception {
Object obj = eval(
"interface AA {",
"static int def(a) { a; }",
"}",
"enum Name implements AA {",
"VAL1, VAL2;",
"}",
"Name.def(44);"
);
assertThat("interface inherited static method", obj, equalTo(44));
}
@Test
public void enum_block_variables() throws Exception {
final Interpreter bsh = new Interpreter();
bsh.eval(script(
"enum Name {",
"VAL1 {",
"val = 'val1';",
"},",
"VAL2 {",
"val = 'val2';",
"}",
"}"
));
assertThat("enum block variable VAL2", bsh.eval("Name.VAL2.val"), equalTo("val2"));
assertThat("enum block variable VAL1", bsh.eval("Name.VAL1.val"), equalTo("val1"));
}
@Test
public void enum_block_method() throws Exception {
final Interpreter bsh = new Interpreter();
bsh.eval(script(
"enum Name {",
"VAL1 {",
"get() { 'val1'; }",
"},",
"VAL2 {",
"get() { 'val2'; }",
"}",
"}"
));
assertThat("enum block variable VAL2", bsh.eval("Name.VAL2.get()"), equalTo("val2"));
assertThat("enum block variable VAL1", bsh.eval("Name.VAL1.get()"), equalTo("val1"));
}
@Test
public void enum_block_method_override() throws Exception {
final Interpreter bsh = new Interpreter();
bsh.eval(script(
"enum Name {",
"VAL1 {",
"get() { 'val1'; }",
"},",
"VAL2 {",
"get() { 'val2'; }",
"};",
"abstract String get();",
"}"
));
assertThat("enum block variable VAL2", bsh.eval("Name.VAL2.get()"), equalTo("val2"));
assertThat("enum block variable VAL1", bsh.eval("Name.VAL1.get()"), equalTo("val1"));
}
@Test
public void enum_block_method_and_constructor() throws Exception {
final Interpreter bsh = new Interpreter();
bsh.eval(script(
"enum Name {",
"VAL1('1val') {",
"get() { str + '1'; }",
"},",
"VAL2('2val') {",
"get() { str + '2'; }",
"};",
"String str = '';",
"Name(String s) {",
"str = s;",
"}",
"}"
));
assertThat("enum block variable VAL2", bsh.eval("Name.VAL2.get()"), equalTo("2val2"));
assertThat("enum block variable VAL1", bsh.eval("Name.VAL1.get()"), equalTo("1val1"));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.store;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ode.bpel.compiler.api.CompilationException;
import org.apache.ode.bpel.dd.DeployDocument;
import org.apache.ode.bpel.dd.TDeployment;
import org.apache.ode.bpel.iapi.ContextException;
import org.apache.ode.bpel.iapi.ProcessConf;
import org.apache.ode.bpel.iapi.ProcessState;
import org.apache.ode.bpel.iapi.ProcessStore;
import org.apache.ode.bpel.iapi.ProcessStoreEvent;
import org.apache.ode.bpel.iapi.ProcessStoreListener;
import org.apache.ode.bpel.iapi.EndpointReferenceContext;
import org.apache.ode.store.DeploymentUnitDir.CBPInfo;
import org.apache.ode.utils.DOMUtils;
import org.apache.ode.utils.GUID;
import org.apache.ode.utils.msg.MessageBundle;
import org.apache.ode.il.config.OdeConfigProperties;
import org.hsqldb.jdbc.jdbcDataSource;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import javax.sql.DataSource;
import javax.xml.namespace.QName;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.*;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* <p>
* JDBC-based implementation of a process store. Also provides an "in-memory" store by way of HSQL database.
* </p>
*
* <p>
* The philsophy here is to keep things simple. Process store operations are relatively infrequent. Performance of the public
* methods is not a concern. However, note that the {@link org.apache.ode.bpel.iapi.ProcessConf} objects returned by the class are
* going to be used from within the engine runtime, and hence their performance needs to be very good. Similarly, these objects
* should be immutable so as not to confuse the engine.
*
* Note the way that the database is used in this class, it is more akin to a recovery log, this is intentional: we want to start
* up, load stuff from the database and then pretty much forget about it when it comes to reads.
*
* @author Maciej Szefler <mszefler at gmail dot com>
* @author mriou <mriou at apache dot org>
*/
public class ProcessStoreImpl implements ProcessStore {
private static final Log __log = LogFactory.getLog(ProcessStoreImpl.class);
private static final Messages __msgs = MessageBundle.getMessages(Messages.class);
private final CopyOnWriteArrayList<ProcessStoreListener> _listeners = new CopyOnWriteArrayList<ProcessStoreListener>();
private Map<QName, ProcessConfImpl> _processes = new HashMap<QName, ProcessConfImpl>();
private Map<String, DeploymentUnitDir> _deploymentUnits = new HashMap<String, DeploymentUnitDir>();
/** Guards access to the _processes and _deploymentUnits */
private final ReadWriteLock _rw = new ReentrantReadWriteLock();
private ConfStoreConnectionFactory _cf;
private EndpointReferenceContext eprContext;
protected File _deployDir;
protected File _configDir;
/**
* Executor used to process DB transactions. Allows us to isolate the TX context, and to ensure that only one TX gets executed a
* time. We don't really care to parallelize these operations because: i) HSQL does not isolate transactions and we don't want
* to get confused ii) we're already serializing all the operations with a read/write lock. iii) we don't care about
* performance, these are infrequent operations.
*/
private ExecutorService _executor = Executors.newSingleThreadExecutor(new SimpleThreadFactory());
/**
* In-memory DataSource, or <code>null</code> if we are using a real DS. We need this to shutdown the DB.
*/
private DataSource _inMemDs;
public ProcessStoreImpl() {
this(null, null, "", new OdeConfigProperties(new Properties(), ""), true);
}
public ProcessStoreImpl(EndpointReferenceContext eprContext, DataSource ds, String persistenceType, OdeConfigProperties props, boolean createDatamodel) {
this.eprContext = eprContext;
if (ds != null) {
// ugly hack
if (persistenceType.toLowerCase().indexOf("hib") != -1) {
_cf = new org.apache.ode.store.hib.DbConfStoreConnectionFactory(ds, props.getProperties(), createDatamodel, props.getTxFactoryClass());
} else {
_cf = new org.apache.ode.store.jpa.DbConfStoreConnectionFactory(ds, createDatamodel, props.getTxFactoryClass());
}
} else {
// If the datasource is not provided, then we create a HSQL-based
// in-memory database. Makes testing a bit simpler.
DataSource hsqlds = createInternalDS(new GUID().toString());
if ("hibernate".equalsIgnoreCase(persistenceType)) {
_cf = new org.apache.ode.store.hib.DbConfStoreConnectionFactory(hsqlds, props.getProperties(), createDatamodel, props.getTxFactoryClass());
} else {
_cf = new org.apache.ode.store.jpa.DbConfStoreConnectionFactory(hsqlds, createDatamodel, props.getTxFactoryClass());
}
_inMemDs = hsqlds;
}
}
/**
* Constructor that hardwires OpenJPA on a new in-memory database. Suitable for tests.
*/
public ProcessStoreImpl(EndpointReferenceContext eprContext, DataSource inMemDs) {
this.eprContext = eprContext;
DataSource hsqlds = createInternalDS(new GUID().toString());
//when in memory we always create the model as we are starting from scratch
_cf = new org.apache.ode.store.jpa.DbConfStoreConnectionFactory(hsqlds, true, OdeConfigProperties.DEFAULT_TX_FACTORY_CLASS_NAME);
_inMemDs = hsqlds;
}
public void shutdown() {
if (_inMemDs != null) {
shutdownInternalDB(_inMemDs);
_inMemDs = null;
}
}
@Override
protected void finalize() throws Throwable {
// force a shutdown so that HSQL cleans up its mess.
try {
shutdown();
} catch (Throwable t) {
; // we tried, no worries.
}
super.finalize();
}
/**
* Deploys a process.
*/
public Collection<QName> deploy(final File deploymentUnitDirectory) {
return deploy(deploymentUnitDirectory, true, null);
}
/**
* Deploys a process.
*/
public Collection<QName> deploy(final File deploymentUnitDirectory, boolean activate, String duName) {
__log.info(__msgs.msgDeployStarting(deploymentUnitDirectory));
final Date deployDate = new Date();
// Create the DU and compile/scan it before acquiring lock.
final DeploymentUnitDir du = new DeploymentUnitDir(deploymentUnitDirectory);
if( duName != null ) {
// Override the package name if given from the parameter
du.setName(duName);
}
try {
du.compile();
} catch (CompilationException ce) {
String errmsg = __msgs.msgDeployFailCompileErrors(ce);
__log.error(errmsg, ce);
throw new ContextException(errmsg, ce);
}
du.scan();
final DeployDocument dd = du.getDeploymentDescriptor();
final ArrayList<ProcessConfImpl> processes = new ArrayList<ProcessConfImpl>();
Collection<QName> deployed;
_rw.writeLock().lock();
// Process and DU use a monotonically increased single version number.
long version = exec(new Callable<Long>() {
public Long call(ConfStoreConnection conn) {
return conn.getNextVersion();
}
});
try {
if (_deploymentUnits.containsKey(du.getName())) {
String errmsg = __msgs.msgDeployFailDuplicateDU(du.getName());
__log.error(errmsg);
throw new ContextException(errmsg);
}
du.setVersion(version);
for (TDeployment.Process processDD : dd.getDeploy().getProcessList()) {
QName pid = toPid(processDD.getName(), version);
// Retires older version if we can find one
DeploymentUnitDir oldDU = findOldDU(du.getName());
if (oldDU != null)
setRetiredPackage(oldDU.getName(), true);
if (_processes.containsKey(pid)) {
String errmsg = __msgs.msgDeployFailDuplicatePID(processDD.getName(), du.getName());
__log.error(errmsg);
throw new ContextException(errmsg);
}
QName type = processDD.getType() != null ? processDD.getType() : processDD.getName();
CBPInfo cbpInfo = du.getCBPInfo(type);
if (cbpInfo == null) {
String errmsg = __msgs.msgDeployFailedProcessNotFound(processDD.getName(), du.getName());
__log.error(errmsg);
throw new ContextException(errmsg);
}
ProcessConfImpl pconf = new ProcessConfImpl(pid, processDD.getName(), version, du, processDD, deployDate,
calcInitialProperties(processDD), calcInitialState(processDD), eprContext, _configDir);
processes.add(pconf);
}
_deploymentUnits.put(du.getName(), du);
for (ProcessConfImpl process : processes) {
__log.info(__msgs.msgProcessDeployed(du.getDeployDir(), process.getProcessId()));
_processes.put(process.getProcessId(), process);
}
} finally {
_rw.writeLock().unlock();
}
// Do the deployment in the DB. We need this so that we remember deployments across system shutdowns.
// We don't fail if there is a DB error, simply print some errors.
deployed = exec(new Callable<Collection<QName>>() {
public Collection<QName> call(ConfStoreConnection conn) {
// Check that this deployment unit is not deployed.
DeploymentUnitDAO dudao = conn.getDeploymentUnit(du.getName());
if (dudao != null) {
String errmsg = "Database out of synch for DU " + du.getName();
__log.warn(errmsg);
dudao.delete();
}
dudao = conn.createDeploymentUnit(du.getName());
try {
dudao.setDeploymentUnitDir(deploymentUnitDirectory.getCanonicalPath());
} catch (IOException e1) {
String errmsg = "Error getting canonical path for " + du.getName()
+ "; deployment unit will not be available after restart!";
__log.error(errmsg);
}
ArrayList<QName> deployed = new ArrayList<QName>();
// Going trough each process declared in the dd
for (ProcessConfImpl pc : processes) {
try {
ProcessConfDAO newDao = dudao.createProcess(pc.getProcessId(), pc.getType(), pc.getVersion());
newDao.setState(pc.getState());
for (Map.Entry<QName, Node> prop : pc.getProcessProperties().entrySet()) {
newDao.setProperty(prop.getKey(), DOMUtils.domToString(prop.getValue()));
}
deployed.add(pc.getProcessId());
conn.setVersion(pc.getVersion());
} catch (Throwable e) {
String errmsg = "Error persisting deployment record for " + pc.getProcessId()
+ "; process will not be available after restart!";
__log.error(errmsg, e);
}
}
return deployed;
}
});
// We want the events to be fired outside of the bounds of the writelock.
try {
for (ProcessConfImpl process : processes) {
fireEvent(new ProcessStoreEvent(ProcessStoreEvent.Type.DEPLOYED, process.getProcessId(), process.getDeploymentUnit()
.getName()));
fireStateChange(process.getProcessId(), process.getState(), process.getDeploymentUnit().getName());
}
} catch (Exception e) {
// A problem at that point means that engine deployment failed, we don't want the store to keep the du
__log.warn("Deployment failed within the engine, store undeploying process.", e);
undeploy(deploymentUnitDirectory);
if (e instanceof ContextException) throw (ContextException) e;
else throw new ContextException("Deployment failed within the engine.", e);
}
return deployed;
}
public Collection<QName> undeploy(final File dir) {
return undeploy(dir.getName());
}
public Collection<QName> undeploy(final String duName) {
try {
exec(new Callable<Collection<QName>>() {
public Collection<QName> call(ConfStoreConnection conn) {
DeploymentUnitDAO dudao = conn.getDeploymentUnit(duName);
if (dudao != null)
dudao.delete();
return null;
}
});
} catch (Exception ex) {
__log.error("Error synchronizing with data store; " + duName + " may be reappear after restart!");
}
Collection<QName> undeployed = Collections.emptyList();
DeploymentUnitDir du;
_rw.writeLock().lock();
try {
du = _deploymentUnits.remove(duName);
if (du != null) {
undeployed = toPids(du.getProcessNames(), du.getVersion());
}
} finally {
_rw.writeLock().unlock();
}
for (QName pn : undeployed) {
fireEvent(new ProcessStoreEvent(ProcessStoreEvent.Type.UNDEPLOYED, pn, du.getName()));
__log.info(__msgs.msgProcessUndeployed(pn));
}
_rw.writeLock().lock();
try {
_processes.keySet().removeAll(undeployed);
} finally {
_rw.writeLock().unlock();
}
return undeployed;
}
public Collection<String> getPackages() {
_rw.readLock().lock();
try {
return new ArrayList<String>(_deploymentUnits.keySet());
} finally {
_rw.readLock().unlock();
}
}
public List<QName> listProcesses(String packageName) {
_rw.readLock().lock();
try {
DeploymentUnitDir du = _deploymentUnits.get(packageName);
if (du == null)
return null;
return toPids(du.getProcessNames(), du.getVersion());
} finally {
_rw.readLock().unlock();
}
}
public void setState(final QName pid, final ProcessState state) {
__log.debug("Changing process state for " + pid + " to " + state);
final ProcessConfImpl pconf;
_rw.readLock().lock();
try {
pconf = _processes.get(pid);
if (pconf == null) {
String msg = __msgs.msgProcessNotFound(pid);
__log.info(msg);
throw new ContextException(msg);
}
} finally {
_rw.readLock().unlock();
}
final DeploymentUnitDir dudir = pconf.getDeploymentUnit();
// Update in the database.
ProcessState old = exec(new Callable<ProcessState>() {
public ProcessState call(ConfStoreConnection conn) {
DeploymentUnitDAO dudao = conn.getDeploymentUnit(dudir.getName());
if (dudao == null) {
String errmsg = __msgs.msgProcessNotFound(pid);
__log.error(errmsg);
throw new ContextException(errmsg);
}
ProcessConfDAO dao = dudao.getProcess(pid);
if (dao == null) {
String errmsg = __msgs.msgProcessNotFound(pid);
__log.error(errmsg);
throw new ContextException(errmsg);
}
ProcessState old = dao.getState();
dao.setState(state);
pconf.setState(state);
return old;
}
});
pconf.setState(state);
if (old != null && old != state)
fireStateChange(pid, state, pconf.getDeploymentUnit().getName());
}
public void setRetiredPackage(String packageName, boolean retired) {
DeploymentUnitDir duDir = _deploymentUnits.get(packageName);
if (duDir == null) throw new ContextException("Could not find package " + packageName);
for (QName processName : duDir.getProcessNames()) {
setState(toPid(processName, duDir.getVersion()), retired ? ProcessState.RETIRED : ProcessState.ACTIVE);
}
}
public ProcessConf getProcessConfiguration(final QName processId) {
_rw.readLock().lock();
try {
return _processes.get(processId);
} finally {
_rw.readLock().unlock();
}
}
public void setProperty(final QName pid, final QName propName, final Node value) {
setProperty(pid, propName, DOMUtils.domToStringLevel2(value));
}
public void setProperty(final QName pid, final QName propName, final String value) {
if (__log.isDebugEnabled())
__log.debug("Setting property " + propName + " on process " + pid);
ProcessConfImpl pconf = _processes.get(pid);
if (pconf == null) {
String msg = __msgs.msgProcessNotFound(pid);
__log.info(msg);
throw new ContextException(msg);
}
final DeploymentUnitDir dudir = pconf.getDeploymentUnit();
exec(new ProcessStoreImpl.Callable<Object>() {
public Object call(ConfStoreConnection conn) {
DeploymentUnitDAO dudao = conn.getDeploymentUnit(dudir.getName());
if (dudao == null)
return null;
ProcessConfDAO proc = dudao.getProcess(pid);
if (proc == null)
return null;
proc.setProperty(propName, value);
return null;
}
});
fireEvent(new ProcessStoreEvent(ProcessStoreEvent.Type.PROPERTY_CHANGED, pid, dudir.getName()));
}
/**
* Load all the deployment units out of the store. Called on start-up.
*
*/
public void loadAll() {
final ArrayList<ProcessConfImpl> loaded = new ArrayList<ProcessConfImpl>();
exec(new Callable<Object>() {
public Object call(ConfStoreConnection conn) {
Collection<DeploymentUnitDAO> dus = conn.getDeploymentUnits();
for (DeploymentUnitDAO du : dus)
try {
loaded.addAll(load(du));
} catch (Exception ex) {
__log.error("Error loading DU from store: " + du.getName(), ex);
}
return null;
}
});
// Dispatch DISABLED, RETIRED and ACTIVE events in that order
Collections.sort(loaded, new Comparator<ProcessConf>() {
public int compare(ProcessConf o1, ProcessConf o2) {
return stateValue(o1.getState()) - stateValue(o2.getState());
}
int stateValue(ProcessState state) {
if (ProcessState.DISABLED.equals(state)) return 0;
if (ProcessState.RETIRED.equals(state)) return 1;
if (ProcessState.ACTIVE.equals(state)) return 2;
throw new IllegalStateException("Unexpected process state: "+state);
}
});
for (ProcessConfImpl p : loaded) {
try {
fireStateChange(p.getProcessId(), p.getState(), p.getDeploymentUnit().getName());
} catch (Exception except) {
__log.error("Error while activating process: pid=" + p.getProcessId() + " package="+p.getDeploymentUnit().getName(), except);
}
}
}
public List<QName> getProcesses() {
_rw.readLock().lock();
try {
return new ArrayList<QName>(_processes.keySet());
} finally {
_rw.readLock().unlock();
}
}
public long getCurrentVersion() {
long version = exec(new Callable<Long>() {
public Long call(ConfStoreConnection conn) {
return conn.getNextVersion();
}
});
return version;
}
protected void fireEvent(ProcessStoreEvent pse) {
__log.debug("firing event: " + pse);
for (ProcessStoreListener psl : _listeners)
psl.onProcessStoreEvent(pse);
}
private void fireStateChange(QName processId, ProcessState state, String duname) {
switch (state) {
case ACTIVE:
fireEvent(new ProcessStoreEvent(ProcessStoreEvent.Type.ACTVIATED, processId, duname));
break;
case DISABLED:
fireEvent(new ProcessStoreEvent(ProcessStoreEvent.Type.DISABLED, processId, duname));
break;
case RETIRED:
fireEvent(new ProcessStoreEvent(ProcessStoreEvent.Type.RETIRED, processId, duname));
break;
}
}
public void registerListener(ProcessStoreListener psl) {
__log.debug("Registering listener " + psl);
_listeners.add(psl);
}
public void unregisterListener(ProcessStoreListener psl) {
__log.debug("Unregistering listener " + psl);
_listeners.remove(psl);
}
/**
* Execute database transactions in an isolated context.
*
* @param <T>
* return type
* @param callable
* transaction
* @return
*/
synchronized <T> T exec(Callable<T> callable) {
// We want to submit db jobs to an executor to isolate
// them from the current thread,
Future<T> future = _executor.submit(callable);
try {
return future.get();
} catch (Exception e) {
throw new ContextException("DbError", e);
}
}
private ConfStoreConnection getConnection() {
return _cf.getConnection();
}
/**
* Create a property mapping based on the initial values in the deployment descriptor.
*
* @param dd
* @return
*/
public static Map<QName, Node> calcInitialProperties(TDeployment.Process dd) {
HashMap<QName, Node> ret = new HashMap<QName, Node>();
if (dd.getPropertyList().size() > 0) {
for (TDeployment.Process.Property property : dd.getPropertyList()) {
Element elmtContent = DOMUtils.getElementContent(property.getDomNode());
if (elmtContent != null) {
// We'll need DOM Level 3
Document doc = DOMUtils.newDocument();
doc.appendChild(doc.importNode(elmtContent, true));
ret.put(property.getName(), doc.getDocumentElement());
} else
ret.put(property.getName(), property.getDomNode().getFirstChild());
}
}
return ret;
}
/**
* Figure out the initial process state from the state in the deployment descriptor.
*
* @param dd
* deployment descriptor
* @return
*/
private static ProcessState calcInitialState(TDeployment.Process dd) {
ProcessState state = ProcessState.ACTIVE;
if (dd.isSetActive() && dd.getActive() == false)
state = ProcessState.DISABLED;
if (dd.isSetRetired() && dd.getRetired() == true)
state = ProcessState.RETIRED;
return state;
}
/**
* Load a deployment unit record stored in the db into memory.
*
* @param dudao
*/
protected List<ProcessConfImpl> load(DeploymentUnitDAO dudao) {
__log.debug("Loading deployment unit record from db: " + dudao.getName());
File dudir = findDeployDir(dudao);
if (dudir == null || !dudir.exists())
throw new ContextException("Deployed directory " + (dudir == null ? "(unknown)" : dudir) + " no longer there!");
DeploymentUnitDir dud = new DeploymentUnitDir(dudir);
// set the name with the one from database
dud.setName(dudao.getName());
dud.scan();
ArrayList<ProcessConfImpl> loaded = new ArrayList<ProcessConfImpl>();
_rw.writeLock().lock();
try {
_deploymentUnits.put(dud.getName(), dud);
long version = 0;
for (ProcessConfDAO p : dudao.getProcesses()) {
TDeployment.Process pinfo = dud.getProcessDeployInfo(p.getType());
if (pinfo == null) {
__log.warn("Cannot load " + p.getPID() + "; cannot find descriptor.");
continue;
}
Map<QName, Node> props = calcInitialProperties(pinfo);
// TODO: update the props based on the values in the DB.
ProcessConfImpl pconf = new ProcessConfImpl(p.getPID(), p.getType(), p.getVersion(), dud, pinfo, dudao
.getDeployDate(), props, p.getState(), eprContext, _configDir);
version = p.getVersion();
_processes.put(pconf.getProcessId(), pconf);
loaded.add(pconf);
}
// All processes and the DU have the same version
dud.setVersion(version);
} finally {
_rw.writeLock().unlock();
}
return loaded;
}
protected File findDeployDir(DeploymentUnitDAO dudao) {
File f = new File(dudao.getDeploymentUnitDir());
if (f.exists())
return f;
f = new File(_deployDir, dudao.getName());
if (f.exists())
return f;
return null;
}
/**
* Make sure that the deployment unit is loaded.
*
* @param duName
* deployment unit name
*/
protected boolean load(final String duName) {
_rw.writeLock().lock();
try {
if (_deploymentUnits.containsKey(duName))
return true;
} finally {
_rw.writeLock().unlock();
}
try {
return exec(new Callable<Boolean>() {
public Boolean call(ConfStoreConnection conn) {
DeploymentUnitDAO dudao = conn.getDeploymentUnit(duName);
if (dudao == null)
return false;
load(dudao);
return true;
}
});
} catch (Exception ex) {
__log.error("Error loading deployment unit: " + duName);
return false;
}
}
/**
* Wrapper for database transactions.
*
* @author Maciej Szefler
*
* @param <V>
* return type
*/
abstract class Callable<V> implements java.util.concurrent.Callable<V> {
public V call() {
boolean success = false;
// in JTA, transaction is bigger than the session
_cf.beginTransaction();
ConfStoreConnection conn = getConnection();
try {
V r = call(conn);
_cf.commitTransaction();
success = true;
return r;
} finally {
if (!success)
try {
_cf.rollbackTransaction();
} catch (Exception ex) {
__log.error("DbError", ex);
}
}
// session is closed automatically when committed or rolled back under JTA
}
abstract V call(ConfStoreConnection conn);
}
public void setDeployDir(File depDir) {
if (depDir != null) {
if( !depDir.exists() ) {
depDir.mkdirs();
__log.warn("Deploy directory: " + depDir.getAbsolutePath() + " does not exist; created it.");
} else if(!depDir.isDirectory()) {
throw new IllegalArgumentException("Deploy directory is not a directory: " + depDir);
}
}
_deployDir = depDir;
}
public File getDeployDir() {
return _deployDir;
}
public File getConfigDir() {
return _configDir;
}
public void setConfigDir(File configDir) {
if (configDir != null && !configDir.isDirectory())
throw new IllegalArgumentException("Config directory is not a directory or does not exist: " + configDir);
this._configDir = configDir;
}
public static DataSource createInternalDS(String guid) {
jdbcDataSource hsqlds = new jdbcDataSource();
hsqlds.setDatabase("jdbc:hsqldb:mem:" + guid);
hsqlds.setUser("sa");
hsqlds.setPassword("");
return hsqlds;
}
public static void shutdownInternalDB(DataSource ds) {
try {
ds.getConnection().createStatement().execute("SHUTDOWN;");
} catch (SQLException e) {
__log.error("Error shutting down.", e);
}
}
private List<QName> toPids(Collection<QName> processTypes, long version) {
ArrayList<QName> result = new ArrayList<QName>();
for (QName pqName : processTypes) {
result.add(toPid(pqName, version));
}
return result;
}
private QName toPid(QName processType, long version) {
return new QName(processType.getNamespaceURI(), processType.getLocalPart() + "-" + version);
}
private DeploymentUnitDir findOldDU(String newName) {
DeploymentUnitDir old = null;
int dashIdx = newName.lastIndexOf("-");
if (dashIdx > 0 && dashIdx + 1 < newName.length()) {
String radical = newName.substring(0, dashIdx);
int newVersion = -1;
try {
newVersion = Integer.parseInt(newName.substring(newName.lastIndexOf("-") + 1));
} catch (NumberFormatException e) {
// Swallowing, if we can't parse then we just can't find an old version
}
while (old == null && newVersion >= 0)
old = _deploymentUnits.get(radical + "-" + (newVersion--));
}
return old;
}
private class SimpleThreadFactory implements ThreadFactory {
int threadNumber = 0;
public Thread newThread(Runnable r) {
threadNumber += 1;
Thread t = new Thread(r, "ProcessStoreImpl-"+threadNumber);
t.setDaemon(true);
return t;
}
}
public void refreshSchedules(String packageName) {
for( QName pid : listProcesses(packageName) ) {
fireEvent(new ProcessStoreEvent(ProcessStoreEvent.Type.SCHEDULE_SETTINGS_CHANGED, pid, packageName));
}
}
}
| |
/*
* Copyright 2014 Ben Manes. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.benmanes.caffeine.cache;
import static com.github.benmanes.caffeine.cache.testing.CacheWriterVerifier.verifyWriter;
import static com.github.benmanes.caffeine.cache.testing.HasRemovalNotifications.hasRemovalNotifications;
import static com.github.benmanes.caffeine.testing.IsEmptyMap.emptyMap;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import org.testng.annotations.Listeners;
import org.testng.annotations.Test;
import com.github.benmanes.caffeine.cache.Policy.Expiration;
import com.github.benmanes.caffeine.cache.testing.CacheContext;
import com.github.benmanes.caffeine.cache.testing.CacheProvider;
import com.github.benmanes.caffeine.cache.testing.CacheSpec;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Expire;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Loader;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population;
import com.github.benmanes.caffeine.cache.testing.CacheValidationListener;
import com.github.benmanes.caffeine.cache.testing.ExpireAfterWrite;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
/**
* The test cases for caches that support the expire after write (time-to-live) policy.
*
* @author ben.manes@gmail.com (Ben Manes)
*/
@Listeners(CacheValidationListener.class)
@Test(dataProviderClass = CacheProvider.class)
public final class ExpireAfterWriteTest {
/* ---------------- Cache -------------- */
@Test(dataProvider = "caches")
@CacheSpec(expireAfterWrite = Expire.ONE_MINUTE,
population = { Population.PARTIAL, Population.FULL })
public void getIfPresent(Cache<Integer, Integer> cache, CacheContext context) {
context.ticker().advance(30, TimeUnit.SECONDS);
cache.getIfPresent(context.firstKey());
context.ticker().advance(45, TimeUnit.SECONDS);
assertThat(cache.getIfPresent(context.firstKey()), is(nullValue()));
cache.cleanUp();
assertThat(cache.estimatedSize(), is(0L));
long count = context.initialSize();
assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.EXPIRED));
verifyWriter(context, (verifier, writer) -> verifier.deletions(count, RemovalCause.EXPIRED));
}
@Test(dataProvider = "caches")
@CacheSpec(expireAfterWrite = Expire.ONE_MINUTE,
population = { Population.PARTIAL, Population.FULL })
public void get(Cache<Integer, Integer> cache, CacheContext context) {
Function<Integer, Integer> mappingFunction = context.original()::get;
context.ticker().advance(30, TimeUnit.SECONDS);
cache.get(context.firstKey(), mappingFunction);
context.ticker().advance(45, TimeUnit.SECONDS);
cache.get(context.lastKey(), mappingFunction); // recreated
cache.cleanUp();
assertThat(cache.estimatedSize(), is(1L));
long count = context.initialSize();
assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.EXPIRED));
verifyWriter(context, (verifier, writer) -> verifier.deletions(count, RemovalCause.EXPIRED));
}
@Test(dataProvider = "caches")
@CacheSpec(expireAfterWrite = Expire.ONE_MINUTE,
population = { Population.PARTIAL, Population.FULL })
public void getAllPresent(Cache<Integer, Integer> cache, CacheContext context) {
context.ticker().advance(30, TimeUnit.SECONDS);
cache.getAllPresent(context.firstMiddleLastKeys());
context.ticker().advance(45, TimeUnit.SECONDS);
assertThat(cache.getAllPresent(context.firstMiddleLastKeys()).size(), is(0));
cache.cleanUp();
assertThat(cache.estimatedSize(), is(0L));
long count = context.initialSize();
assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.EXPIRED));
verifyWriter(context, (verifier, writer) -> verifier.deletions(count, RemovalCause.EXPIRED));
}
/* ---------------- LoadingCache -------------- */
@Test(dataProvider = "caches")
@CacheSpec(expireAfterWrite = Expire.ONE_MINUTE,
population = { Population.PARTIAL, Population.FULL })
public void get(LoadingCache<Integer, Integer> cache, CacheContext context) {
context.ticker().advance(30, TimeUnit.SECONDS);
cache.get(context.firstKey());
cache.get(context.absentKey());
context.ticker().advance(45, TimeUnit.SECONDS);
cache.cleanUp();
assertThat(cache.estimatedSize(), is(1L));
assertThat(cache.getIfPresent(context.absentKey()), is(-context.absentKey()));
long count = context.initialSize();
assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.EXPIRED));
verifyWriter(context, (verifier, writer) -> verifier.deletions(count, RemovalCause.EXPIRED));
}
@Test(dataProvider = "caches")
@CacheSpec(expireAfterWrite = Expire.ONE_MINUTE, loader = {Loader.IDENTITY, Loader.BULK_IDENTITY},
population = { Population.PARTIAL, Population.FULL })
public void getAll(LoadingCache<Integer, Integer> cache, CacheContext context) {
context.ticker().advance(30, TimeUnit.SECONDS);
assertThat(cache.getAll(ImmutableList.of(context.firstKey(), context.absentKey())),
is(ImmutableMap.of(context.firstKey(), -context.firstKey(),
context.absentKey(), context.absentKey())));
context.ticker().advance(45, TimeUnit.SECONDS);
cache.cleanUp();
assertThat(cache.getAll(ImmutableList.of(context.firstKey(), context.absentKey())),
is(ImmutableMap.of(context.firstKey(), context.firstKey(),
context.absentKey(), context.absentKey())));
assertThat(cache.estimatedSize(), is(2L));
long count = context.initialSize();
assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.EXPIRED));
verifyWriter(context, (verifier, writer) -> verifier.deletions(count, RemovalCause.EXPIRED));
}
/* ---------------- AsyncLoadingCache -------------- */
@Test(dataProvider = "caches")
@CacheSpec(expireAfterWrite = Expire.ONE_MINUTE,
population = { Population.PARTIAL, Population.FULL })
public void getIfPresent(AsyncLoadingCache<Integer, Integer> cache, CacheContext context) {
context.ticker().advance(30, TimeUnit.SECONDS);
cache.getIfPresent(context.firstKey());
context.ticker().advance(45, TimeUnit.SECONDS);
assertThat(cache.getIfPresent(context.firstKey()), is(nullValue()));
assertThat(cache.getIfPresent(context.lastKey()), is(nullValue()));
assertThat(cache.synchronous().estimatedSize(), is(0L));
long count = context.initialSize();
assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.EXPIRED));
}
/* ---------------- Map -------------- */
@Test(dataProvider = "caches")
@CacheSpec(expireAfterWrite = Expire.ONE_MINUTE, population = Population.FULL)
public void putIfAbsent(Map<Integer, Integer> map, CacheContext context) {
context.ticker().advance(30, TimeUnit.SECONDS);
assertThat(map.putIfAbsent(context.firstKey(), context.absentValue()), is(not(nullValue())));
context.ticker().advance(30, TimeUnit.SECONDS);
assertThat(map.putIfAbsent(context.lastKey(), context.absentValue()), is(nullValue()));
long count = context.initialSize();
assertThat(map.size(), is(1));
assertThat(map, hasRemovalNotifications(context, count, RemovalCause.EXPIRED));
verifyWriter(context, (verifier, writer) -> verifier.deletions(count, RemovalCause.EXPIRED));
}
/* ---------------- Policy -------------- */
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, expireAfterWrite = Expire.ONE_MINUTE)
public void getExpiresAfter(
@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
assertThat(expireAfterWrite.getExpiresAfter(TimeUnit.MINUTES), is(1L));
}
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, expireAfterWrite = Expire.ONE_MINUTE)
public void setExpiresAfter(Cache<Integer, Integer> cache, CacheContext context,
@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
expireAfterWrite.setExpiresAfter(2, TimeUnit.MINUTES);
assertThat(expireAfterWrite.getExpiresAfter(TimeUnit.MINUTES), is(2L));
context.ticker().advance(90, TimeUnit.SECONDS);
cache.cleanUp();
assertThat(cache.estimatedSize(), is(context.initialSize()));
}
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, expireAfterWrite = Expire.ONE_MINUTE,
population = { Population.SINGLETON, Population.PARTIAL, Population.FULL })
public void ageOf(CacheContext context,
@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
assertThat(expireAfterWrite.ageOf(context.firstKey(), TimeUnit.SECONDS).getAsLong(), is(0L));
context.ticker().advance(30, TimeUnit.SECONDS);
assertThat(expireAfterWrite.ageOf(context.firstKey(), TimeUnit.SECONDS).getAsLong(), is(30L));
context.ticker().advance(45, TimeUnit.SECONDS);
assertThat(expireAfterWrite.ageOf(context.firstKey(), TimeUnit.SECONDS).isPresent(), is(false));
}
/* ---------------- Policy: oldest -------------- */
@CacheSpec(implementation = Implementation.Caffeine, expireAfterWrite = Expire.ONE_MINUTE)
@Test(dataProvider = "caches", expectedExceptions = UnsupportedOperationException.class)
public void oldest_unmodifiable(
@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
expireAfterWrite.oldest(Integer.MAX_VALUE).clear();;
}
@CacheSpec(implementation = Implementation.Caffeine, expireAfterWrite = Expire.ONE_MINUTE)
@Test(dataProvider = "caches", expectedExceptions = IllegalArgumentException.class)
public void oldest_negative(@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
expireAfterWrite.oldest(-1);
}
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, expireAfterWrite = Expire.ONE_MINUTE)
public void oldest_zero(@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
assertThat(expireAfterWrite.oldest(0), is(emptyMap()));
}
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine,
population = Population.FULL, expireAfterWrite = Expire.ONE_MINUTE)
public void oldest_partial(CacheContext context,
@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
int count = (int) context.initialSize() / 2;
assertThat(expireAfterWrite.oldest(count).size(), is(count));
}
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, expireAfterWrite = Expire.ONE_MINUTE,
removalListener = { Listener.DEFAULT, Listener.REJECTING })
public void oldest_order(CacheContext context,
@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
Map<Integer, Integer> oldest = expireAfterWrite.oldest(Integer.MAX_VALUE);
assertThat(Iterables.elementsEqual(oldest.keySet(), context.original().keySet()), is(true));
}
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, expireAfterWrite = Expire.ONE_MINUTE)
public void oldest_snapshot(Cache<Integer, Integer> cache, CacheContext context,
@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
Map<Integer, Integer> oldest = expireAfterWrite.oldest(Integer.MAX_VALUE);
cache.invalidateAll();
assertThat(oldest, is(equalTo(context.original())));
}
/* ---------------- Policy: youngest -------------- */
@CacheSpec(implementation = Implementation.Caffeine, expireAfterWrite = Expire.ONE_MINUTE)
@Test(dataProvider = "caches", expectedExceptions = UnsupportedOperationException.class)
public void youngest_unmodifiable(
@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
expireAfterWrite.youngest(Integer.MAX_VALUE).clear();;
}
@CacheSpec(implementation = Implementation.Caffeine, expireAfterWrite = Expire.ONE_MINUTE)
@Test(dataProvider = "caches", expectedExceptions = IllegalArgumentException.class)
public void youngest_negative(@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
expireAfterWrite.youngest(-1);
}
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, expireAfterWrite = Expire.ONE_MINUTE)
public void youngest_zero(@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
assertThat(expireAfterWrite.youngest(0), is(emptyMap()));
}
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine,
population = Population.FULL, expireAfterWrite = Expire.ONE_MINUTE)
public void youngest_partial(CacheContext context,
@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
int count = (int) context.initialSize() / 2;
assertThat(expireAfterWrite.youngest(count).size(), is(count));
}
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine,
population = Population.FULL, expireAfterWrite = Expire.ONE_MINUTE,
removalListener = { Listener.DEFAULT, Listener.REJECTING })
public void youngest_order(CacheContext context,
@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
Map<Integer, Integer> youngest = expireAfterWrite.youngest(Integer.MAX_VALUE);
Set<Integer> keys = new LinkedHashSet<>(ImmutableList.copyOf(youngest.keySet()).reverse());
assertThat(Iterables.elementsEqual(keys, context.original().keySet()), is(true));
}
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, expireAfterWrite = Expire.ONE_MINUTE)
public void youngest_snapshot(Cache<Integer, Integer> cache, CacheContext context,
@ExpireAfterWrite Expiration<Integer, Integer> expireAfterWrite) {
Map<Integer, Integer> youngest = expireAfterWrite.youngest(Integer.MAX_VALUE);
cache.invalidateAll();
assertThat(youngest, is(equalTo(context.original())));
}
}
| |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.ascii;
import com.hazelcast.cluster.ClusterState;
import com.hazelcast.config.Config;
import com.hazelcast.config.JoinConfig;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.LifecycleEvent;
import com.hazelcast.core.LifecycleListener;
import com.hazelcast.instance.BuildInfoProvider;
import com.hazelcast.instance.HazelcastInstanceFactory;
import com.hazelcast.spi.properties.GroupProperty;
import com.hazelcast.test.AssertTask;
import com.hazelcast.test.HazelcastSerialClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.annotation.SlowTest;
import org.apache.http.NoHttpResponseException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.net.ConnectException;
import java.net.HttpURLConnection;
import java.util.concurrent.CountDownLatch;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
@RunWith(HazelcastSerialClassRunner.class)
@Category(SlowTest.class)
public class RestClusterTest extends HazelcastTestSupport {
private static final String STATUS_FORBIDDEN = "{\"status\":\"forbidden\"}";
private Config config = new Config();
@Before
public void setup() {
config.setProperty(GroupProperty.REST_ENABLED.getName(), "true");
config.setProperty(GroupProperty.HTTP_HEALTHCHECK_ENABLED.getName(), "true");
JoinConfig join = config.getNetworkConfig().getJoin();
join.getMulticastConfig().setEnabled(false);
join.getTcpIpConfig().setEnabled(true).clear().addMember("127.0.0.1");
}
@After
public void tearDown() {
HazelcastInstanceFactory.terminateAll();
}
@Test
public void testDisabledRest() throws Exception {
// REST should be disabled by default
Config config = new Config();
HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance);
try {
communicator.getClusterInfo();
fail("Rest is disabled. Not expected to reach here!");
} catch (NoHttpResponseException ignored) {
}
}
@Test
public void testClusterShutdown() throws Exception {
final HazelcastInstance instance1 = Hazelcast.newHazelcastInstance(config);
final HazelcastInstance instance2 = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance2);
assertEquals(HttpURLConnection.HTTP_OK, communicator.shutdownCluster("dev", "dev-pass"));
assertTrueEventually(new AssertTask() {
@Override
public void run()
throws Exception {
assertFalse(instance1.getLifecycleService().isRunning());
assertFalse(instance2.getLifecycleService().isRunning());
}
});
}
@Test
public void testGetClusterState() throws Exception {
HazelcastInstance instance1 = Hazelcast.newHazelcastInstance(config);
HazelcastInstance instance2 = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator1 = new HTTPCommunicator(instance1);
HTTPCommunicator communicator2 = new HTTPCommunicator(instance2);
instance1.getCluster().changeClusterState(ClusterState.FROZEN);
assertEquals("{\"status\":\"success\",\"state\":\"frozen\"}",
communicator1.getClusterState("dev", "dev-pass"));
instance1.getCluster().changeClusterState(ClusterState.PASSIVE);
assertEquals("{\"status\":\"success\",\"state\":\"passive\"}",
communicator2.getClusterState("dev", "dev-pass"));
}
@Test
public void testChangeClusterState() throws Exception {
final HazelcastInstance instance1 = Hazelcast.newHazelcastInstance(config);
final HazelcastInstance instance2 = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance1);
assertEquals(STATUS_FORBIDDEN, communicator.changeClusterState("dev1", "dev-pass", "frozen").response);
assertEquals(HttpURLConnection.HTTP_OK, communicator.changeClusterState("dev", "dev-pass", "frozen").responseCode);
assertClusterStateEventually(ClusterState.FROZEN, instance1);
assertClusterStateEventually(ClusterState.FROZEN, instance2);
}
@Test
public void testGetClusterVersion() throws IOException {
final HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
final HTTPCommunicator communicator = new HTTPCommunicator(instance);
final String expected = "{\"status\":\"success\","
+ "\"version\":\"" + instance.getCluster().getClusterVersion().toString() + "\"}";
assertEquals(expected, communicator.getClusterVersion());
}
@Test
public void testChangeClusterVersion() throws IOException {
final HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
final HTTPCommunicator communicator = new HTTPCommunicator(instance);
assertEquals(HttpURLConnection.HTTP_OK, communicator.changeClusterVersion("dev", "dev-pass",
instance.getCluster().getClusterVersion().toString()).responseCode);
assertEquals(STATUS_FORBIDDEN, communicator.changeClusterVersion("dev1", "dev-pass", "1.2.3").response);
}
@Test
public void testHotBackup() throws IOException {
final HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
final HTTPCommunicator communicator = new HTTPCommunicator(instance);
assertEquals(HttpURLConnection.HTTP_OK, communicator.hotBackup("dev", "dev-pass").responseCode);
assertEquals(STATUS_FORBIDDEN, communicator.hotBackup("dev1", "dev-pass").response);
assertEquals(HttpURLConnection.HTTP_OK, communicator.hotBackupInterrupt("dev", "dev-pass").responseCode);
assertEquals(STATUS_FORBIDDEN, communicator.hotBackupInterrupt("dev1", "dev-pass").response);
}
@Test
public void testForceAndPartialStart() throws IOException {
final HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
final HTTPCommunicator communicator = new HTTPCommunicator(instance);
assertEquals(HttpURLConnection.HTTP_OK, communicator.forceStart("dev", "dev-pass").responseCode);
assertEquals(STATUS_FORBIDDEN, communicator.forceStart("dev1", "dev-pass").response);
assertEquals(HttpURLConnection.HTTP_OK, communicator.partialStart("dev", "dev-pass").responseCode);
assertEquals(STATUS_FORBIDDEN, communicator.partialStart("dev1", "dev-pass").response);
}
@Test
public void testManagementCenterUrlChange() throws IOException {
final HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
final HTTPCommunicator communicator = new HTTPCommunicator(instance);
assertEquals(HttpURLConnection.HTTP_NO_CONTENT,
communicator.changeManagementCenterUrl("dev", "dev-pass", "http://bla").responseCode);
}
@Test
public void testListNodes() throws Exception {
HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance);
HazelcastTestSupport.waitInstanceForSafeState(instance);
String result = String.format("{\"status\":\"success\",\"response\":\"[%s]\n%s\n%s\"}",
instance.getCluster().getLocalMember().toString(),
BuildInfoProvider.getBuildInfo().getVersion(),
System.getProperty("java.version"));
assertEquals(result, communicator.listClusterNodes("dev", "dev-pass"));
}
@Test
public void testListNodesWithWrongCredentials() throws Exception {
HazelcastInstance instance1 = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance1);
HazelcastTestSupport.waitInstanceForSafeState(instance1);
assertEquals(STATUS_FORBIDDEN, communicator.listClusterNodes("dev1", "dev-pass"));
}
@Test
public void testShutdownNode() throws Exception {
HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance);
final CountDownLatch shutdownLatch = new CountDownLatch(1);
instance.getLifecycleService().addLifecycleListener(new LifecycleListener() {
@Override
public void stateChanged(LifecycleEvent event) {
if (event.getState() == LifecycleEvent.LifecycleState.SHUTDOWN) {
shutdownLatch.countDown();
}
}
});
try {
assertEquals("{\"status\":\"success\"}", communicator.shutdownMember("dev", "dev-pass"));
} catch (ConnectException ignored) {
// if node shuts down before response is received, `java.net.ConnectException: Connection refused` is expected
} catch (NoHttpResponseException ignored) {
// `NoHttpResponseException` is also a possible outcome when a node shut down before it has a chance
// to send a response back to a client.
}
assertOpenEventually(shutdownLatch);
assertFalse(instance.getLifecycleService().isRunning());
}
@Test
public void testShutdownNodeWithWrongCredentials() throws Exception {
HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance);
assertEquals(STATUS_FORBIDDEN, communicator.shutdownMember("dev1", "dev-pass"));
}
@Test
public void simpleHealthCheck() throws Exception {
HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance);
String result = communicator.getClusterHealth();
assertEquals("Hazelcast::NodeState=ACTIVE\n"
+ "Hazelcast::ClusterState=ACTIVE\n"
+ "Hazelcast::ClusterSafe=TRUE\n"
+ "Hazelcast::MigrationQueueSize=0\n"
+ "Hazelcast::ClusterSize=1\n", result);
}
@Test
public void healthCheckWithPathParameters() throws Exception {
HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance);
assertEquals("ACTIVE", communicator.getClusterHealth("/node-state"));
assertEquals("ACTIVE", communicator.getClusterHealth("/cluster-state"));
assertEquals(HttpURLConnection.HTTP_OK, communicator.getClusterHealthResponseCode("/cluster-safe"));
assertEquals("0", communicator.getClusterHealth("/migration-queue-size"));
assertEquals("1", communicator.getClusterHealth("/cluster-size"));
}
@Test
public void healthCheckWithUnknownPathParameter() throws Exception {
HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance);
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, communicator.getClusterHealthResponseCode("/unknown-parameter"));
}
@Test(expected = NoHttpResponseException.class)
public void fail_with_deactivatedHealthCheck() throws Exception {
// Healthcheck REST URL is deactivated by default - no passed config on purpose
HazelcastInstance instance = Hazelcast.newHazelcastInstance();
HTTPCommunicator communicator = new HTTPCommunicator(instance);
communicator.getClusterHealth();
}
@Test
public void fail_on_healthcheck_url_with_garbage() throws Exception {
HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance);
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, communicator.getFailingClusterHealthWithTrailingGarbage());
}
@Test
public void testHeadRequest_ClusterVersion() throws Exception {
HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance);
assertEquals(HttpURLConnection.HTTP_OK, communicator.headRequestToClusterVersionURI().responseCode);
}
@Test
public void testHeadRequest_ClusterInfo() throws Exception {
HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance);
assertEquals(HttpURLConnection.HTTP_OK, communicator.headRequestToClusterInfoURI().responseCode);
}
@Test
public void testHeadRequest_ClusterHealth() throws Exception {
HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance);
HTTPCommunicator.ConnectionResponse response = communicator.headRequestToClusterHealthURI();
assertEquals(HttpURLConnection.HTTP_OK, response.responseCode);
assertEquals(response.responseHeaders.get("Hazelcast-NodeState").size(), 1);
assertContains(response.responseHeaders.get("Hazelcast-NodeState"), "ACTIVE");
assertEquals(response.responseHeaders.get("Hazelcast-ClusterState").size(), 1);
assertContains(response.responseHeaders.get("Hazelcast-ClusterState"), "ACTIVE");
assertEquals(response.responseHeaders.get("Hazelcast-ClusterSize").size(), 1);
assertContains(response.responseHeaders.get("Hazelcast-ClusterSize"), "2");
assertEquals(response.responseHeaders.get("Hazelcast-MigrationQueueSize").size(), 1);
assertContains(response.responseHeaders.get("Hazelcast-MigrationQueueSize"), "0");
}
@Test
public void testHeadRequest_GarbageClusterHealth() throws Exception {
HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
HTTPCommunicator communicator = new HTTPCommunicator(instance);
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, communicator.headRequestToGarbageClusterHealthURI().responseCode);
}
}
| |
/**
* Copyright 2010 Neuroph Project http://neuroph.sourceforge.net
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.neuroph.core.learning;
import java.io.Serializable;
import java.util.Iterator;
import org.neuroph.core.Connection;
import org.neuroph.core.Layer;
import org.neuroph.core.Neuron;
import org.neuroph.core.Weight;
// TODO: random pattern order
/**
* Base class for all supervised learning algorithms.
* It extends IterativeLearning, and provides general supervised learning principles.
*
* @author Zoran Sevarac <sevarac@gmail.com>
*/
abstract public class SupervisedLearning extends IterativeLearning implements
Serializable {
/**
* The class fingerprint that is set to indicate serialization
* compatibility with a previous version of the class
*/
private static final long serialVersionUID = 3L;
/**
* Total network error
*/
protected transient double totalNetworkError;
/**
* Total squared sum of all pattern errors
*/
protected transient double totalSquaredErrorSum;
/**
* Total network error in previous epoch
*/
protected transient double previousEpochError;
/**
* Max allowed network error (condition to stop learning)
*/
protected double maxError = 0.01d;
/**
* Stopping condition: training stops if total network error change is smaller than minErrorChange
* for minErrorChangeIterationsLimit number of iterations
*/
private double minErrorChange = Double.POSITIVE_INFINITY;
/**
* Stopping condition: training stops if total network error change is smaller than minErrorChange
* for minErrorChangeStopIterations number of iterations
*/
private int minErrorChangeIterationsLimit = Integer.MAX_VALUE;
/**
* Count iterations where error change is smaller then minErrorChange
*/
private transient int minErrorChangeIterationsCount;
/**
* Setting to determine if learning (weights update) is in batch mode
* False by default.
*/
private boolean batchMode = false;
/**
* Stores network output error vector
*/
// protected double[] outputError;
private int trainingSetSize;
/**
* Creates new supervised learning rule
*/
public SupervisedLearning() {
super();
}
/**
* Trains network for the specified training set and number of iterations
* @param trainingSet training set to learn
* @param maxError maximum number of iterations to learn
*
*/
public void learn(DataSet trainingSet, double maxError) {
this.maxError = maxError;
this.learn(trainingSet);
}
/**
* Trains network for the specified training set and number of iterations
* @param trainingSet training set to learn
* @param maxIterations maximum number of learning iterations
*
*/
public void learn(DataSet trainingSet, double maxError, int maxIterations) {
this.maxError = maxError;
this.setMaxIterations(maxIterations);
this.learn(trainingSet);
}
@Override
protected void onStart() {
super.onStart(); // reset iteration counter
this.minErrorChangeIterationsCount = 0;
this.totalNetworkError = 0d;
this.previousEpochError = 0d;
//this.outputError = new double[neuralNetwork.getOutputsCount()]; // initialize output error buffer
}
@Override
protected void beforeEpoch() {
this.previousEpochError = this.totalNetworkError;
this.totalNetworkError = 0d;
this.totalSquaredErrorSum = 0d;
this.trainingSetSize = getTrainingSet().size();
}
@Override
protected void afterEpoch() {
// if learning is performed in batch mode, apply accumulated weight changes from this epoch
if (this.batchMode == true) {
doBatchWeightsUpdate();
}
}
/**
* This method implements basic logic for one learning epoch for the
* supervised learning algorithms. Epoch is the one pass through the
* training set. This method iterates through the training set
* and trains network for each element. It also sets flag if conditions
* to stop learning has been reached: network error below some allowed
* value, or maximum iteration count
*
* @param trainingSet
* training set for training network
*/
@Override
public void doLearningEpoch(DataSet trainingSet) {
// feed network with all elements from training set
Iterator<DataSetRow> iterator = trainingSet.iterator();
while (iterator.hasNext() && !isStopped()) {
DataSetRow dataSetRow = iterator.next();
// learn current input/output pattern defined by SupervisedTrainingElement
this.learnPattern(dataSetRow);
}
// calculate total network error as MSE. Use MSE so network does not grow with bigger training sets
this.totalNetworkError = this.totalSquaredErrorSum / this.trainingSetSize;
// moved stopping condition to separate method hasReachedStopCondition() so it can be overriden / customized in subclasses
if (hasReachedStopCondition()) {
stopLearning();
}
}
/**
* Trains network with the input and desired output pattern from the specified training element
*
* @param trainingElement
* supervised training element which contains input and desired
* output
*/
protected void learnPattern(DataSetRow trainingElement) {
double[] input = trainingElement.getInput();
this.neuralNetwork.setInput(input);
this.neuralNetwork.calculate();
double[] output = this.neuralNetwork.getOutput();
double[] desiredOutput = trainingElement.getDesiredOutput();
double[] outputError = this.calculateOutputError(desiredOutput, output);
this.addToSquaredErrorSum(outputError);
this.updateNetworkWeights(outputError);
}
/**
* This method updates network weights in batch mode - use accumulated weights change stored in Weight.deltaWeight
* It is executed after each learning epoch, only if learning is done in batch mode.
* @see SupervisedLearning#doLearningEpoch(org.neuroph.core.learning.TrainingSet)
*/
protected void doBatchWeightsUpdate() {
// iterate layers from output to input
Layer[] layers = neuralNetwork.getLayers();
for (int i = neuralNetwork.getLayersCount() - 1; i > 0; i--) {
// iterate neurons at each layer
for (Neuron neuron : layers[i].getNeurons()) {
// iterate connections/weights for each neuron
for (Connection connection : neuron.getInputConnections()) {
// for each connection weight apply accumulated weight change
Weight weight = connection.getWeight();
weight.value += weight.weightChange; // apply delta weight which is the sum of delta weights in batch mode
weight.weightChange = 0; // reset deltaWeight
}
}
}
}
/**
* Returns true if stop condition has been reached, false otherwise.
* Override this method in derived classes to implement custom stop criteria.
*
* @return true if stop condition is reached, false otherwise
*/
protected boolean hasReachedStopCondition() {
// da li ovd etreba staviti da proverava i da li se koristi ovaj uslov??? ili staviti da uslov bude automatski samo s ajaako malom vrednoscu za errorChange Doule.minvalue
return (this.totalNetworkError < this.maxError) || this.errorChangeStalled();
}
/**
* Returns true if absolute error change is sufficently small (<=minErrorChange) for minErrorChangeStopIterations number of iterations
* @return true if absolute error change is stalled (error is sufficently small for some number of iterations)
*/
protected boolean errorChangeStalled() {
double absErrorChange = Math.abs(previousEpochError - totalNetworkError);
if (absErrorChange <= this.minErrorChange) {
this.minErrorChangeIterationsCount++;
if (this.minErrorChangeIterationsCount >= this.minErrorChangeIterationsLimit) {
return true;
}
} else {
this.minErrorChangeIterationsCount = 0;
}
return false;
}
/**
* Calculates the network error for the current input pattern - diference between
* desired and actual output
*
* @param output
* actual network output
* @param desiredOutput
* desired network output
*/
protected double[] calculateOutputError(double[] desiredOutput, double[] output) {
double[] outputError = new double[desiredOutput.length];
for (int i = 0; i < output.length; i++) {
outputError[i] = desiredOutput[i] - output[i];
}
return outputError;
}
/**
* Returns true if learning is performed in batch mode, false otherwise
* @return true if learning is performed in batch mode, false otherwise
*/
public boolean isInBatchMode() {
return batchMode;
}
/**
* Sets batch mode on/off (true/false)
* @param batchMode batch mode setting
*/
public void setBatchMode(boolean batchMode) {
this.batchMode = batchMode;
}
/**
* Sets allowed network error, which indicates when to stopLearning training
*
* @param maxError
* network error
*/
public void setMaxError(double maxError) {
this.maxError = maxError;
}
/**
* Returns learning error tolerance - the value of total network error to stop learning.
*
* @return learning error tolerance
*/
public double getMaxError() {
return maxError;
}
/**
* Returns total network error in current learning epoch
*
* @return total network error in current learning epoch
*/
public synchronized double getTotalNetworkError() {
return totalNetworkError;
}
/**
* Returns total network error in previous learning epoch
*
* @return total network error in previous learning epoch
*/
public double getPreviousEpochError() {
return previousEpochError;
}
/**
* Returns min error change stopping criteria
*
* @return min error change stopping criteria
*/
public double getMinErrorChange() {
return minErrorChange;
}
/**
* Sets min error change stopping criteria
*
* @param minErrorChange value for min error change stopping criteria
*/
public void setMinErrorChange(double minErrorChange) {
this.minErrorChange = minErrorChange;
}
/**
* Returns number of iterations for min error change stopping criteria
*
* @return number of iterations for min error change stopping criteria
*/
public int getMinErrorChangeIterationsLimit() {
return minErrorChangeIterationsLimit;
}
/**
* Sets number of iterations for min error change stopping criteria
* @param minErrorChangeIterationsLimit number of iterations for min error change stopping criteria
*/
public void setMinErrorChangeIterationsLimit(int minErrorChangeIterationsLimit) {
this.minErrorChangeIterationsLimit = minErrorChangeIterationsLimit;
}
/**
* Returns number of iterations count for for min error change stopping criteria
*
* @return number of iterations count for for min error change stopping criteria
*/
public int getMinErrorChangeIterationsCount() {
return minErrorChangeIterationsCount;
}
/**
* Calculates and updates sum of squared errors for single pattern, and updates total sum of squared pattern errors
*
* @param outputError output error vector
*/
// see: http://www.vni.com/products/imsl/documentation/CNL06/stat/NetHelp/default.htm?turl=multilayerfeedforwardneuralnetworks.htm
protected void addToSquaredErrorSum(double[] outputError) {
double outputErrorSqrSum = 0;
for (double error : outputError) {
outputErrorSqrSum += (error * error) * 0.5; // a;so multiply with 1/trainingSetSize 1/2n * (...)
}
this.totalSquaredErrorSum += outputErrorSqrSum;
}
/**
* This method should implement the weights update procedure for the whole network
* for the given output error vector.
*
* @param outputError
* output error vector for some network input (aka. patternError, network error)
* usually the difference between desired and actual output
*
* @see SupervisedLearning#calculateOutputError(double[], double[]) calculateOutputError
* @see SupervisedLearning#addToSquaredErrorSum(double[])
*/
abstract protected void updateNetworkWeights(double[] outputError);
}
| |
/*
GNU LESSER GENERAL PUBLIC LICENSE
Copyright (C) 2006 The XAMJ Project
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Contact info: info@xamjwg.org
*/
/*
* Created on Jan 14, 2006
*/
package org.xamjwg.html.domimpl;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.html2.HTMLCollection;
import org.w3c.dom.html2.HTMLFormElement;
import org.xamjwg.html.FormInput;
import org.xamjwg.html.HtmlParserContext;
import org.xamjwg.html.HtmlRendererContext;
public class HTMLFormElementImpl extends HTMLElementImpl implements
HTMLFormElement {
public HTMLFormElementImpl(String name, boolean noStyleSheet) {
super(name, noStyleSheet);
}
public HTMLFormElementImpl(String name) {
super(name);
}
public HTMLFormElementImpl() {
super("FORM");
}
public Object namedItem(final String name) {
try {
this.visit(new NodeVisitor() {
public void visit(Node node) {
if(HTMLFormElementImpl.isInput(node)) {
if(name.equals(((Element) node).getAttribute("name"))) {
throw new StopVisitorException(node);
}
}
}
});
} catch(StopVisitorException sve) {
return sve.getTag();
}
return null;
}
public Object item(final int index) {
try {
this.visit(new NodeVisitor() {
private int current = 0;
public void visit(Node node) {
if(HTMLFormElementImpl.isInput(node)) {
if(this.current == index) {
throw new StopVisitorException(node);
}
this.current++;
}
}
});
} catch(StopVisitorException sve) {
return sve.getTag();
}
return null;
}
public HTMLCollection getElements() {
return new ChildHTMLCollection(this, new InputFilter(), this.getTreeLock());
}
public int getLength() {
return new ChildHTMLCollection(this, new InputFilter(), this.getTreeLock()).getLength();
}
public String getName() {
return this.getAttribute("name");
}
public void setName(String name) {
this.setAttribute("name", name);
}
public String getAcceptCharset() {
return this.getAttribute("acceptCharset");
}
public void setAcceptCharset(String acceptCharset) {
this.setAttribute("acceptCharset", acceptCharset);
}
public String getAction() {
return this.getAttribute("action");
}
public void setAction(String action) {
this.setAttribute("action", action);
}
public String getEnctype() {
return this.getAttribute("enctype");
}
public void setEnctype(String enctype) {
this.setAttribute("enctype", enctype);
}
public String getMethod() {
String method = this.getAttribute("method");
if(method == null) {
method = "GET";
}
return method;
}
public void setMethod(String method) {
this.setAttribute("method", method);
}
public String getTarget() {
return this.getAttribute("target");
}
public void setTarget(String target) {
this.setAttribute("target", target);
}
public void submit() {
this.submit(null);
}
final void submit(final HTMLInputElementImpl submitButton) {
HtmlRendererContext context = this.getHtmlRendererContext();
if(context != null) {
final ArrayList formInputs = new ArrayList();
this.visit(new NodeVisitor() {
public void visit(Node node) {
if(node instanceof HTMLElementImpl) {
FormInput fi = ((HTMLElementImpl) node).getFormInput(submitButton);
if(fi != null) {
if(fi.getName() == null) {
throw new IllegalStateException("Form input does not have a name: " + node);
}
formInputs.add(fi);
}
}
}
});
FormInput[] fia = (FormInput[]) formInputs.toArray(FormInput.EMPTY_ARRAY);
String href = this.getAction();
if(href == null) {
href = this.getBaseURI();
}
try {
URL url = this.getFullURL(href);
context.submitForm(this.getMethod(), url, this.getTarget(), this.getEnctype(), fia);
} catch(MalformedURLException mfu) {
this.warn("submit()", mfu);
}
}
}
public void reset() {
// TODO Auto-generated method stub
}
static boolean isInput(Node node) {
String name = node.getNodeName().toLowerCase();
return name.equals("input") || name.equals("textarea") || name.equals("select");
}
private class InputFilter implements NodeFilter {
/* (non-Javadoc)
* @see org.xamjwg.html.domimpl.NodeFilter#accept(org.w3c.dom.Node)
*/
public boolean accept(Node node) {
return HTMLFormElementImpl.isInput(node);
}
}
}
| |
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rules.modern.impl;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.junit.Assert.assertEquals;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.AddsToRuleKey;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.CellPathResolver;
import com.facebook.buck.rules.ExplicitBuildTargetSourcePath;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.modern.Buildable;
import com.facebook.buck.rules.modern.Deserializer;
import com.facebook.buck.rules.modern.Deserializer.DataProvider;
import com.facebook.buck.rules.modern.Serializer;
import com.facebook.buck.rules.modern.Serializer.Delegate;
import com.facebook.buck.util.types.Either;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.hash.HashCode;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import org.junit.Before;
import org.junit.Test;
public class BuildableSerializerTest extends AbstractValueVisitorTest {
private SourcePathRuleFinder ruleFinder;
private CellPathResolver cellResolver;
@Before
public void setUp() throws IOException, InterruptedException {
ruleFinder = createStrictMock(SourcePathRuleFinder.class);
cellResolver = createStrictMock(CellPathResolver.class);
expect(cellResolver.getCellPaths())
.andReturn(ImmutableMap.of("other", otherFilesystem.getRootPath()))
.anyTimes();
expect(cellResolver.getCellPath(Optional.empty()))
.andReturn(Optional.of(rootFilesystem.getRootPath()))
.anyTimes();
}
static DataProvider getDataProvider(
Map<HashCode, byte[]> dataMap, Map<HashCode, List<HashCode>> childMap, HashCode hash) {
return new DataProvider() {
@Override
public InputStream getData() {
return new ByteArrayInputStream(Preconditions.checkNotNull(dataMap.get(hash)));
}
@Override
public DataProvider getChild(HashCode hash) {
return getDataProvider(dataMap, childMap, hash);
}
};
}
<T extends Buildable> void test(T instance, Function<String, String> expectedMapper)
throws IOException {
replay(cellResolver, ruleFinder);
Map<HashCode, byte[]> dataMap = new HashMap<>();
Map<HashCode, List<HashCode>> childMap = new HashMap<>();
Delegate serializerDelegate =
(value, data, children) -> {
int id = dataMap.size();
HashCode hash = HashCode.fromInt(id);
dataMap.put(hash, data);
childMap.put(hash, children);
return hash;
};
Either<HashCode, byte[]> serialized =
new Serializer(ruleFinder, cellResolver, serializerDelegate)
.serialize(instance, DefaultClassInfoFactory.forInstance(instance));
AddsToRuleKey reconstructed =
new Deserializer(s -> s.isPresent() ? otherFilesystem : rootFilesystem, Class::forName)
.deserialize(
new DataProvider() {
@Override
public InputStream getData() {
return new ByteArrayInputStream(
serialized.transform(left -> dataMap.get(left), right -> right));
}
@Override
public DataProvider getChild(HashCode hash) {
return getDataProvider(dataMap, childMap, hash);
}
},
AddsToRuleKey.class);
verify(cellResolver, ruleFinder);
assertEquals(expectedMapper.apply(stringify(instance)), stringify(reconstructed));
}
private String stringify(AddsToRuleKey instance) {
StringifyingValueVisitor visitor = new StringifyingValueVisitor();
DefaultClassInfoFactory.forInstance(instance).visit(instance, visitor);
return String.format(
"%s {\n %s\n}",
instance.getClass().getName(), Joiner.on("\n ").join(visitor.getValue().split("\n")));
}
@Override
@Test
public void outputPath() throws IOException {
test(new WithOutputPath(), expected -> expected);
}
@Test
@Override
public void sourcePath() throws IOException {
test(new WithSourcePath(), expected -> expected);
}
@Override
@Test
public void set() throws IOException {
test(new WithSet(), expected -> expected);
}
@Test
@Override
public void list() throws IOException {
test(new WithList(), expected -> expected);
}
@Test
@Override
public void optional() throws IOException {
test(new WithOptional(), expected -> expected);
}
@Test
@Override
public void simple() throws IOException {
test(new Simple(), expected -> expected);
}
@Test
@Override
public void superClass() throws IOException {
test(new Derived(), expected -> expected);
}
@Test
@Override
public void empty() throws IOException {
test(new Empty(), expected -> expected);
}
@Test
@Override
public void addsToRuleKey() throws IOException {
test(new WithAddsToRuleKey(), expected -> expected);
}
@Test
@Override
public void complex() throws IOException {
BuildRule mockRule = createStrictMock(BuildRule.class);
BuildTarget target =
BuildTargetFactory.newInstance(rootFilesystem.getRootPath(), "//some/build:target");
expect(ruleFinder.getRule((SourcePath) anyObject())).andReturn(Optional.of(mockRule));
mockRule.getSourcePathToOutput();
expectLastCall().andReturn(ExplicitBuildTargetSourcePath.of(target, Paths.get("and.path")));
replay(mockRule);
test(
new Complex(),
expected ->
expected.replace(
"SourcePath(//some/build:target)",
"SourcePath(Pair(//some/build:target, and.path))"));
verify(mockRule);
}
@Test
@Override
public void buildTarget() throws IOException {
test(new WithBuildTarget(), expected -> expected);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.api.ldap.model.schema;
import java.util.List;
import java.util.Map;
import org.apache.directory.api.ldap.model.constants.MetaSchemaConstants;
import org.apache.directory.api.ldap.model.constants.SchemaConstants;
import org.apache.directory.api.ldap.model.entry.Attribute;
import org.apache.directory.api.ldap.model.entry.DefaultAttribute;
import org.apache.directory.api.ldap.model.entry.DefaultEntry;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.api.ldap.model.schema.registries.Schema;
import org.apache.directory.api.util.DateUtils;
/**
* A factory that generates an entry using the meta schema for schema
* elements.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class AttributesFactory
{
public Entry getAttributes( SchemaObject obj, Schema schema, SchemaManager schemaManager ) throws LdapException
{
if ( obj instanceof LdapSyntax )
{
return convert( ( LdapSyntax ) obj, schema, schemaManager );
}
else if ( obj instanceof MatchingRule )
{
return convert( ( MatchingRule ) obj, schema, schemaManager );
}
else if ( obj instanceof AttributeType )
{
return convert( ( AttributeType ) obj, schema, schemaManager );
}
else if ( obj instanceof ObjectClass )
{
return convert( ( ObjectClass ) obj, schema, schemaManager );
}
else if ( obj instanceof MatchingRuleUse )
{
return convert( ( MatchingRuleUse ) obj, schema, schemaManager );
}
else if ( obj instanceof DitStructureRule )
{
return convert( ( DitStructureRule ) obj, schema, schemaManager );
}
else if ( obj instanceof DitContentRule )
{
return convert( ( DitContentRule ) obj, schema, schemaManager );
}
else if ( obj instanceof NameForm )
{
return convert( ( NameForm ) obj, schema, schemaManager );
}
throw new IllegalArgumentException( "nknown SchemaObject type: " + obj.getClass() );
}
/**
* Convert a Schema to Entry
*
* @param schema The Schema to convert
* @param schemaManager The SchemaManager
* @return An Entry containing the converted Schema
* @throws LdapException If the conversion failed
*/
public Entry convert( Schema schema, SchemaManager schemaManager ) throws LdapException
{
Entry entry = new DefaultEntry( schemaManager );
entry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC, MetaSchemaConstants.META_SCHEMA_OC );
entry.put( SchemaConstants.CN_AT, schema.getSchemaName() );
entry.put( SchemaConstants.CREATORS_NAME_AT, schema.getOwner() );
entry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
if ( schema.isDisabled() )
{
entry.put( MetaSchemaConstants.M_DISABLED_AT, "TRUE" );
}
String[] dependencies = schema.getDependencies();
if ( dependencies != null && dependencies.length > 0 )
{
Attribute attr = new DefaultAttribute(
schemaManager.getAttributeType( MetaSchemaConstants.M_DEPENDENCIES_AT ) );
for ( String dependency : dependencies )
{
attr.add( dependency );
}
entry.put( attr );
}
return entry;
}
public Entry convert( SyntaxChecker syntaxChecker, Schema schema, SchemaManager schemaManager )
{
Entry entry = new DefaultEntry( schemaManager );
entry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC, MetaSchemaConstants.META_SYNTAX_CHECKER_OC );
entry.put( MetaSchemaConstants.M_OID_AT, syntaxChecker.getOid() );
entry.put( MetaSchemaConstants.M_FQCN_AT, syntaxChecker.getClass().getName() );
entry.put( SchemaConstants.CREATORS_NAME_AT, schema.getOwner() );
entry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
return entry;
}
public Entry convert( LdapSyntax syntax, Schema schema, SchemaManager schemaManager ) throws LdapException
{
Entry entry = new DefaultEntry( schemaManager );
entry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC, MetaSchemaConstants.META_SYNTAX_OC );
entry.put( SchemaConstants.CREATORS_NAME_AT, schema.getOwner() );
entry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
injectCommon( syntax, entry, schemaManager );
return entry;
}
public Entry convert( String oid, Normalizer normalizer, Schema schema, SchemaManager schemaManager )
{
Entry entry = new DefaultEntry( schemaManager );
entry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC, MetaSchemaConstants.META_NORMALIZER_OC );
entry.put( MetaSchemaConstants.M_OID_AT, oid );
entry.put( MetaSchemaConstants.M_FQCN_AT, normalizer.getClass().getName() );
entry.put( SchemaConstants.CREATORS_NAME_AT, schema.getOwner() );
entry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
return entry;
}
public Entry convert( String oid, LdapComparator<? super Object> comparator, Schema schema,
SchemaManager schemaManager )
{
Entry entry = new DefaultEntry( schemaManager );
entry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC, MetaSchemaConstants.META_COMPARATOR_OC );
entry.put( MetaSchemaConstants.M_OID_AT, oid );
entry.put( MetaSchemaConstants.M_FQCN_AT, comparator.getClass().getName() );
entry.put( SchemaConstants.CREATORS_NAME_AT, schema.getOwner() );
entry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
return entry;
}
/**
*
* @param matchingRule
* @return Attributes
* @throws LdapException
*/
public Entry convert( MatchingRule matchingRule, Schema schema, SchemaManager schemaManager )
throws LdapException
{
Entry entry = new DefaultEntry( schemaManager );
entry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC, MetaSchemaConstants.META_MATCHING_RULE_OC );
entry.put( MetaSchemaConstants.M_SYNTAX_AT, matchingRule.getSyntaxOid() );
entry.put( SchemaConstants.CREATORS_NAME_AT, schema.getOwner() );
entry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
injectCommon( matchingRule, entry, schemaManager );
return entry;
}
public Entry convert( MatchingRuleUse matchingRuleUse, Schema schema, SchemaManager schemaManager )
{
Entry entry = new DefaultEntry( schemaManager );
entry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC, "" );
entry.put( SchemaConstants.CREATORS_NAME_AT, schema.getOwner() );
entry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
return entry;
}
public Entry convert( DitStructureRule ditStructureRule, Schema schema, SchemaManager schemaManager )
{
Entry entry = new DefaultEntry( schemaManager );
entry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC, "" );
entry.put( SchemaConstants.CREATORS_NAME_AT, schema.getOwner() );
entry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
return entry;
}
public Entry convert( DitContentRule dITContentRule, Schema schema, SchemaManager schemaManager )
{
Entry entry = new DefaultEntry( schemaManager );
entry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC, "" );
entry.put( SchemaConstants.CREATORS_NAME_AT, schema.getOwner() );
entry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
return entry;
}
public Entry convert( NameForm nameForm, Schema schema, SchemaManager schemaManager )
{
Entry entry = new DefaultEntry( schemaManager );
entry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC, "" );
entry.put( SchemaConstants.CREATORS_NAME_AT, schema.getOwner() );
entry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
return entry;
}
/**
* <pre>
* objectclass ( 1.3.6.1.4.1.18060.0.4.0.3.3
* NAME 'metaAttributeType'
* DESC 'meta definition of the AttributeType object'
* SUP metaTop
* STRUCTURAL
* MUST ( m-name $ m-syntax )
* MAY ( m-supAttributeType $ m-obsolete $ m-equality $ m-ordering $
* m-substr $ m-singleValue $ m-collective $ m-noUserModification $
* m-usage $ m-extensionAttributeType )
* )
* </pre>
*
* @param attributeType
* @return Attributes
* @throws LdapException
*/
public Entry convert( AttributeType attributeType, Schema schema, SchemaManager schemaManager )
throws LdapException
{
Entry entry = new DefaultEntry( schemaManager );
entry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC, MetaSchemaConstants.META_ATTRIBUTE_TYPE_OC );
entry.put( MetaSchemaConstants.M_COLLECTIVE_AT, getBoolean( attributeType.isCollective() ) );
entry.put( MetaSchemaConstants.M_NO_USER_MODIFICATION_AT, getBoolean( !attributeType.isUserModifiable() ) );
entry.put( MetaSchemaConstants.M_SINGLE_VALUE_AT, getBoolean( attributeType.isSingleValued() ) );
entry.put( MetaSchemaConstants.M_USAGE_AT, attributeType.getUsage().toString() );
entry.put( SchemaConstants.CREATORS_NAME_AT, schema.getOwner() );
entry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
injectCommon( attributeType, entry, schemaManager );
String superiorOid = attributeType.getSuperiorOid();
if ( superiorOid != null )
{
entry.put( MetaSchemaConstants.M_SUP_ATTRIBUTE_TYPE_AT, superiorOid );
}
if ( attributeType.getEqualityOid() != null )
{
entry.put( MetaSchemaConstants.M_EQUALITY_AT, attributeType.getEqualityOid() );
}
if ( attributeType.getSubstringOid() != null )
{
entry.put( MetaSchemaConstants.M_SUBSTR_AT, attributeType.getSubstringOid() );
}
if ( attributeType.getOrderingOid() != null )
{
entry.put( MetaSchemaConstants.M_ORDERING_AT, attributeType.getOrderingOid() );
}
if ( attributeType.getSyntaxOid() != null )
{
entry.put( MetaSchemaConstants.M_SYNTAX_AT, attributeType.getSyntaxOid() );
}
return entry;
}
/**
* Creates the attributes of an entry representing an objectClass.
*
* <pre>
* objectclass ( 1.3.6.1.4.1.18060.0.4.0.3.2
* NAME 'metaObjectClass'
* DESC 'meta definition of the objectclass object'
* SUP metaTop
* STRUCTURAL
* MUST m-oid
* MAY ( m-name $ m-obsolete $ m-supObjectClass $ m-typeObjectClass $ m-must $
* m-may $ m-extensionObjectClass )
* )
* </pre>
*
* @param objectClass the objectClass to produce a meta schema entry for
* @return the attributes of the metaSchema entry representing the objectClass
* @throws LdapException if there are any problems
*/
public Entry convert( ObjectClass objectClass, Schema schema, SchemaManager schemaManager )
throws LdapException
{
Entry entry = new DefaultEntry( schemaManager );
entry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC, MetaSchemaConstants.META_OBJECT_CLASS_OC );
entry.put( MetaSchemaConstants.M_TYPE_OBJECT_CLASS_AT, objectClass.getType().toString() );
entry.put( SchemaConstants.CREATORS_NAME_AT, schema.getOwner() );
entry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
injectCommon( objectClass, entry, schemaManager );
Attribute attr = null;
// handle the superior objectClasses
if ( objectClass.getSuperiorOids() != null && objectClass.getSuperiorOids().size() != 0 )
{
if ( schemaManager != null )
{
attr = new DefaultAttribute(
schemaManager.getAttributeType( MetaSchemaConstants.M_SUP_OBJECT_CLASS_AT ) );
}
else
{
attr = new DefaultAttribute( MetaSchemaConstants.M_SUP_OBJECT_CLASS_AT );
}
for ( String superior : objectClass.getSuperiorOids() )
{
attr.add( superior );
}
entry.put( attr );
}
// add the must list
if ( objectClass.getMustAttributeTypeOids() != null && objectClass.getMustAttributeTypeOids().size() != 0 )
{
if ( schemaManager != null )
{
attr = new DefaultAttribute( schemaManager.getAttributeType( MetaSchemaConstants.M_MUST_AT ) );
}
else
{
attr = new DefaultAttribute( MetaSchemaConstants.M_MUST_AT );
}
for ( String mustOid : objectClass.getMustAttributeTypeOids() )
{
attr.add( mustOid );
}
entry.put( attr );
}
// add the may list
if ( objectClass.getMayAttributeTypeOids() != null && objectClass.getMayAttributeTypeOids().size() != 0 )
{
if ( schemaManager != null )
{
attr = new DefaultAttribute( schemaManager.getAttributeType( MetaSchemaConstants.M_MAY_AT ) );
}
else
{
attr = new DefaultAttribute( MetaSchemaConstants.M_MAY_AT );
}
for ( String mayOid : objectClass.getMayAttributeTypeOids() )
{
attr.add( mayOid );
}
entry.put( attr );
}
return entry;
}
private final void injectCommon( SchemaObject object, Entry entry, SchemaManager schemaManager )
throws LdapException
{
injectNames( object.getNames(), entry, schemaManager );
entry.put( MetaSchemaConstants.M_OBSOLETE_AT, getBoolean( object.isObsolete() ) );
entry.put( MetaSchemaConstants.M_OID_AT, object.getOid() );
if ( object.getDescription() != null )
{
entry.put( MetaSchemaConstants.M_DESCRIPTION_AT, object.getDescription() );
}
// The extensions
Map<String, List<String>> extensions = object.getExtensions();
if ( extensions != null )
{
for ( Map.Entry<String, List<String>> mapEntry : extensions.entrySet() )
{
String key = mapEntry.getKey();
List<String> values = mapEntry.getValue();
for ( String value : values )
{
entry.add( key, value );
}
}
}
}
private final void injectNames( List<String> names, Entry entry, SchemaManager schemaManager ) throws LdapException
{
if ( ( names == null ) || ( names.size() == 0 ) )
{
return;
}
Attribute attr = null;
if ( schemaManager != null )
{
attr = new DefaultAttribute( schemaManager.getAttributeType( MetaSchemaConstants.M_NAME_AT ) );
}
else
{
attr = new DefaultAttribute( MetaSchemaConstants.M_NAME_AT );
}
for ( String name : names )
{
attr.add( name );
}
entry.put( attr );
}
private final String getBoolean( boolean value )
{
if ( value )
{
return "TRUE";
}
else
{
return "FALSE";
}
}
}
| |
/*
* Copyright (c) 2002, 2008, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.jmx.snmp.tasks;
import java.util.ArrayList;
import com.sun.jmx.snmp.tasks.Task;
import com.sun.jmx.snmp.tasks.TaskServer;
/**
* This class implements a {@link com.sun.jmx.snmp.tasks.TaskServer} over
* a thread pool.
* <p><b>This API is a Sun Microsystems internal API and is subject
* to change without notice.</b></p>
**/
public class ThreadService implements TaskServer {
public ThreadService(int threadNumber) {
if (threadNumber <= 0) {
throw new IllegalArgumentException("The thread number should bigger than zero.");
}
minThreads = threadNumber;
threadList = new ExecutorThread[threadNumber];
priority = Thread.currentThread().getPriority();
cloader = Thread.currentThread().getContextClassLoader();
}
// public methods
// --------------
/**
* Submit a task to be executed.
* Once a task is submitted, it is guaranteed that either
* {@link com.sun.jmx.snmp.tasks.Task#run() task.run()} or
* {@link com.sun.jmx.snmp.tasks.Task#cancel() task.cancel()} will be called.
* This implementation of TaskServer uses a thread pool to execute
* the submitted tasks.
* @param task The task to be executed.
* @exception IllegalArgumentException if the submitted task is null.
**/
public void submitTask(Task task) throws IllegalArgumentException {
submitTask((Runnable)task);
}
/**
* Submit a task to be executed.
* This implementation of TaskServer uses a thread pool to execute
* the submitted tasks.
* @param task The task to be executed.
* @exception IllegalArgumentException if the submitted task is null.
**/
public void submitTask(Runnable task) throws IllegalArgumentException {
stateCheck();
if (task == null) {
throw new IllegalArgumentException("No task specified.");
}
synchronized(jobList) {
jobList.add(jobList.size(), task);
jobList.notify();
}
createThread();
}
public Runnable removeTask(Runnable task) {
stateCheck();
Runnable removed = null;
synchronized(jobList) {
int lg = jobList.indexOf(task);
if (lg >= 0) {
removed = jobList.remove(lg);
}
}
if (removed != null && removed instanceof Task)
((Task) removed).cancel();
return removed;
}
public void removeAll() {
stateCheck();
final Object[] jobs;
synchronized(jobList) {
jobs = jobList.toArray();
jobList.clear();
}
final int len = jobs.length;
for (int i=0; i<len ; i++) {
final Object o = jobs[i];
if (o!= null && o instanceof Task) ((Task)o).cancel();
}
}
// to terminate
public void terminate() {
if (terminated == true) {
return;
}
terminated = true;
synchronized(jobList) {
jobList.notifyAll();
}
removeAll();
for (int i=0; i<currThreds; i++) {
try {
threadList[i].interrupt();
} catch (Exception e) {
// TODO
}
}
threadList = null;
}
// private classes
// ---------------
// A thread used to execute jobs
//
private class ExecutorThread extends Thread {
public ExecutorThread() {
super(threadGroup, "ThreadService-"+counter++);
setDaemon(true);
// init
this.setPriority(priority);
this.setContextClassLoader(cloader);
idle++;
}
public void run() {
while(!terminated) {
Runnable job = null;
synchronized(jobList) {
if (jobList.size() > 0) {
job = jobList.remove(0);
if (jobList.size() > 0) {
jobList.notify();
}
} else {
try {
jobList.wait();
} catch (InterruptedException ie) {
// terminated ?
} finally {
}
continue;
}
}
if (job != null) {
try {
idle--;
job.run();
} catch (Exception e) {
// TODO
e.printStackTrace();
} finally {
idle++;
}
}
// re-init
this.setPriority(priority);
this.interrupted();
this.setContextClassLoader(cloader);
}
}
}
// private methods
private void stateCheck() throws IllegalStateException {
if (terminated) {
throw new IllegalStateException("The thread service has been terminated.");
}
}
private void createThread() {
if (idle < 1) {
synchronized(threadList) {
if (jobList.size() > 0 && currThreds < minThreads) {
ExecutorThread et = new ExecutorThread();
et.start();
threadList[currThreds++] = et;
}
}
}
}
// protected or private variables
// ------------------------------
private ArrayList<Runnable> jobList = new ArrayList<Runnable>(0);
private ExecutorThread[] threadList;
private int minThreads = 1;
private int currThreds = 0;
private int idle = 0;
private boolean terminated = false;
private int priority;
private ThreadGroup threadGroup = new ThreadGroup("ThreadService");
private ClassLoader cloader;
private static long counter = 0;
private int addedJobs = 1;
private int doneJobs = 1;
}
| |
package com.nomachetejuggling.scl;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.joda.time.LocalDate;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
import com.nomachetejuggling.scl.model.Exercise;
import com.nomachetejuggling.scl.model.LogEntry;
import android.os.AsyncTask;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.app.ActionBar;
import android.app.AlertDialog;
import android.app.ListActivity;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.text.Html;
import android.util.Log;
import android.view.ContextMenu;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.ContextMenu.ContextMenuInfo;
import android.view.View.OnClickListener;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.BaseAdapter;
import android.widget.CheckBox;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.AdapterView.AdapterContextMenuInfo;
// Release 1.2:
//TODO: "History" activity showing past workouts by day, each with activities and calories burned, with calorie total
//Later Release
//FUTURE: Integrate with RunKeeper API (better suited for Simple Health Suite app)
public class ExerciseList extends ListActivity implements ActionBar.OnNavigationListener{
private ExerciseAdapter exerciseAdapter;
private ArrayList<Exercise> allExercises;
private ArrayList<Exercise> displayExercises;
private boolean dirty;
private boolean loaded;
private Set<String> doneExercises;
private String filter;
private static final int ADD_EXERCISE_REQUEST = 0;
private static final int EDIT_EXERCISE_REQUEST = 1;
private ArrayList<String> itemList;
private ActionBarNavigationAdapter aAdpt;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_exercise_list);
getListView().setVisibility(View.INVISIBLE);
findViewById(R.id.linlaHeaderProgress).setVisibility(View.VISIBLE);
allExercises = new ArrayList<Exercise>();
displayExercises = new ArrayList<Exercise>();
dirty = false;
loaded = false;
doneExercises = new HashSet<String>();
exerciseAdapter = new ExerciseAdapter(this, R.layout.list_exercises, R.id.line1, displayExercises);
setListAdapter(exerciseAdapter);
registerForContextMenu(getListView());
SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(this);
if(settings.contains("filter")){
filter = settings.getString("filter", "All");
} else {
filter = "All";
}
itemList = new ArrayList<String>();
itemList.add("All");
itemList.add("Favorites");
aAdpt = new ActionBarNavigationAdapter(this,itemList);
}
@Override
public void onStart() {
super.onStart();
final ActionBar actionBar = getActionBar();
actionBar.setDisplayShowTitleEnabled(false);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_LIST);
actionBar.setListNavigationCallbacks(aAdpt, this);
actionBar.setSelectedNavigationItem(itemList.indexOf(filter));
new LoadListData(this).execute();
}
@Override
protected void onStop() {
super.onStop();
saveExercises();
}
@Override
protected void onPause()
{
super.onPause();
// Store values between instances here
SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(this);
SharedPreferences.Editor editor = settings.edit(); // Put the values from the UI
editor.putString("filter", filter); // value to store
editor.commit();
}
@Override
public void onCreateContextMenu(ContextMenu menu, View v, ContextMenuInfo menuInfo) {
AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) menuInfo;
Exercise selectedExercise = ((ExerciseAdapter)this.getListAdapter()).getItem(info.position);
menu.setHeaderTitle(selectedExercise.name);
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.exercise_list_context, menu);
MenuItem favorite = (MenuItem) menu.findItem(R.id.favoriteContextMenu);
MenuItem unfavorite = (MenuItem) menu.findItem(R.id.unfavoriteContextMenu);
favorite.setVisible(!selectedExercise.favorite);
unfavorite.setVisible(selectedExercise.favorite);
super.onCreateContextMenu(menu, v, menuInfo);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.exercise_list, menu);
return true;
}
@Override
public boolean onNavigationItemSelected(int position, long id) {
this.filter = itemList.get(position);
displayExercises();
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.add_exercise:
Intent intent = new Intent(this, AddActivity.class);
startActivityForResult(intent, ADD_EXERCISE_REQUEST);
return true;
case R.id.random_exercise:
List<Exercise> possibleExercises = new ArrayList<Exercise>();
SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(this);
boolean randomIncludeDone = settings.getBoolean("randomIncludeDone", false);
if(randomIncludeDone) {
possibleExercises.addAll(displayExercises);
} else {
for(Exercise exercise: displayExercises) {
if(!doneExercises.contains(exercise.name)) {
possibleExercises.add(exercise);
}
}
}
if(possibleExercises.size() == 0) {
new AlertDialog.Builder(this)
.setTitle(getString(R.string.noExercisesTitle))
.setMessage(getString(R.string.noExercisesMessage))
.setCancelable(false)
.setPositiveButton(android.R.string.ok,new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,int id) {
dialog.cancel();
}
}).create().show();
} else {
Exercise selected = possibleExercises.get((int)(possibleExercises.size()*Math.random()));
logExercise(selected);
}
return true;
case R.id.action_settings:
startActivity(new Intent(this, SettingsActivity.class));
return true;
}
return false;
}
@Override
protected void onListItemClick(ListView l, View v, int position, long id) {
Exercise exercise = displayExercises.get(position);
logExercise(exercise);
}
public void selectFilter(CharSequence title) {
this.filter = title.toString();
displayExercises();
}
@Override
public boolean onContextItemSelected(MenuItem item) {
AdapterContextMenuInfo info = (AdapterContextMenuInfo) item.getMenuInfo();
Exercise selectedExercise = ((ExerciseAdapter) getListAdapter()).getItem(info.position);
switch(item.getItemId()) {
case R.id.logContextMenu :
logExercise(selectedExercise);
return true;
case R.id.deleteContextMenu :
deleteExercise(selectedExercise);
return true;
case R.id.favoriteContextMenu :
markFavorite(selectedExercise, true);
this.exerciseAdapter.notifyDataSetChanged();
return true;
case R.id.unfavoriteContextMenu :
markFavorite(selectedExercise, false);
this.exerciseAdapter.notifyDataSetChanged();
return true;
case R.id.editContextMenu :
Intent intent = new Intent(this, AddActivity.class);
intent.putExtra("exercise", selectedExercise);
startActivityForResult(intent, EDIT_EXERCISE_REQUEST);
return true;
}
return false;
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent intent) {
super.onActivityResult(requestCode, resultCode, intent);
if (requestCode == ADD_EXERCISE_REQUEST) {
if (resultCode == RESULT_OK) {
Bundle extras = intent.getExtras();
if (extras != null) {
Exercise newExercise = (Exercise) extras.getSerializable("newExercise");
addExercise(newExercise);
}
}
} else if(requestCode == EDIT_EXERCISE_REQUEST) {
if(resultCode == RESULT_OK) {
Bundle extras = intent.getExtras();
if (extras != null) {
Exercise editedExercise = (Exercise) extras.getSerializable("newExercise");
modifyExercise(editedExercise);
}
}
}
}
private void modifyExercise(Exercise editedExercise) {
for(Exercise exercise: allExercises) {
if(exercise.name.equals(editedExercise.name)) {
//Modify the existing one, because there might be handles to it elsewhere
exercise.copyFrom(editedExercise);
this.dirty = true;
saveExercises();
this.displayExercises();
}
}
}
private void markFavorite(Exercise exercise, boolean favorite) {
if(favorite != exercise.favorite) {
exercise.favorite = favorite;
dirty = true;
saveExercises();
}
}
private void addExercise(Exercise newExercise) {
int copyVal = 1;
String originalName = newExercise.name;
while(exercisesContain(newExercise)) {
copyVal++;
newExercise.name = originalName+" ("+copyVal+")";
}
allExercises.add(newExercise);
displayExercises();
dirty = true;
saveExercises();
}
private boolean exercisesContain(Exercise exercise) {
for(Exercise ce: allExercises) {
if(ce.name.equals(exercise.name)) {
return true;
}
}
return false;
}
private void logExercise(Exercise exercise) {
Intent intent = new Intent(ExerciseList.this, LogActivity.class);
intent.putExtra("exercise", exercise);
startActivity(intent);
}
private void deleteExercise(final Exercise exercise) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage(Html.fromHtml("Are you sure you want to delete '"+StringEscapeUtils.escapeHtml3(exercise.name)+"'? <br/><small>(Note: this will not delete any logs)</small>"))
.setCancelable(false)
.setPositiveButton("Yes", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
allExercises.remove(exercise);
dirty = true;
displayExercises();
saveExercises();
}
})
.setNegativeButton("No", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert = builder.create();
alert.show();
}
private void saveExercises() {
if (dirty == false)
return;
Gson gson = new GsonBuilder().setPrettyPrinting().create();
String json = gson.toJson(allExercises);
File file = Util.getExerciseFile(this.getApplicationContext());
Log.d("IO", "Writing to " + file.getAbsolutePath() + "\n" + json);
try {
FileUtils.write(file, json, "UTF-8");
} catch (IOException e) {
Log.e("IO", "Could not write exercise file", e);
Toast.makeText(getApplicationContext(), getString(R.string.error_cannot_save_exercises), Toast.LENGTH_SHORT).show();
}
dirty = false;
}
private static class ExerciseAdapter extends ArrayAdapter<Exercise> {
Context mContext;
public ExerciseAdapter(Context context, int layout, int resId, List<Exercise> items) {
super(context, layout, resId, items);
mContext = context;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View row = convertView;
if (row == null) {
row = LayoutInflater.from(getContext()).inflate(R.layout.list_exercises, parent, false);
}
final Exercise item = getItem(position);
ExerciseList act = (ExerciseList)mContext;
TextView text = (TextView) row.findViewById(R.id.line1);
text.setText(item.name);
if(act.doneExercises.contains(item.name)) {
row.findViewById(R.id.doneCheckMarkView).setVisibility(View.VISIBLE);
} else {
row.findViewById(R.id.doneCheckMarkView).setVisibility(View.GONE);
}
OnClickListener toggleFavoriteListener = new OnClickListener(){
public void onClick(View v) {
ExerciseList activity = (ExerciseList)Util.getActivityFromContext(mContext);
CheckBox checkBox = (CheckBox) v;
activity.markFavorite(item, checkBox.isChecked());
}
};
CheckBox favoriteCheckBox = (CheckBox) row.findViewById(R.id.favoriteCheckbox);
favoriteCheckBox.setChecked(item.favorite);
favoriteCheckBox.setOnClickListener(toggleFavoriteListener);
return row;
}
}
private void displayExercises() {
String title="";
displayExercises.clear();
if (this.filter == null || this.filter.equals("All")) {
displayExercises.addAll(allExercises);
title = getResources().getString(R.string.title_exercise_list);
} else if(filter.equals("Favorites")) {
for (Exercise exercise : allExercises) {
if (exercise.favorite) displayExercises.add(exercise);
}
title = filter;
}
this.exerciseAdapter.notifyDataSetChanged();
TextView spinnerBox = (TextView) findViewById(R.id.ab_basemaps_title);
if(spinnerBox!=null) {
spinnerBox.setText(title);
}
if(displayExercises.size() == 0 && loaded) {
findViewById(R.id.noExercisesView).setVisibility(View.VISIBLE);
} else {
findViewById(R.id.noExercisesView).setVisibility(View.INVISIBLE); //Not GONE, we want it to take up space so that checking something off doesn't make the name wrap
}
}
private static class LoadListData extends AsyncTask<Void, Void, LoadListData.Output> {
public static class Output {
public Set<String> doneExercices;
public ArrayList<Exercise> allExercises;
public boolean dirty;
}
private ExerciseList act;
public LoadListData(ExerciseList act) {
this.act = act;
}
@Override
protected void onPreExecute() {
super.onPreExecute();
}
@Override
protected Output doInBackground(Void... params) {
Output output = new Output();
loadCurrentWorkout(output);
loadExerciseList(output);
return output;
}
private void loadExerciseList(Output output) {
File file = Util.getExerciseFile(act.getApplicationContext());
Gson gson = new GsonBuilder().setPrettyPrinting().create();
Type collectionType = new TypeToken<Collection<Exercise>>() {}.getType();
List<Exercise> exercisesRead = new ArrayList<Exercise>();
String json;
try {
json = FileUtils.readFileToString(file, "UTF-8");
Log.d("IO", "Start Reading from " + file.getAbsolutePath() + "\n" + json);
exercisesRead = gson.fromJson(json, collectionType);
} catch (IOException e) {
InputStream raw = act.getResources().openRawResource(R.raw.exerciselist_default);
exercisesRead = gson.fromJson(new InputStreamReader(raw), collectionType);
output.dirty = true; //Save this on exit
}
Collections.sort(exercisesRead);
output.allExercises = new ArrayList<Exercise>(exercisesRead);
}
private void loadCurrentWorkout(Output output) {
File dir = Util.getLogStorageDir(act.getApplicationContext());
String today = new LocalDate().toString("yyyy-MM-dd");
File currentLogFile = new File(dir, today+".json");
if(currentLogFile.exists()) {
Gson gson = new GsonBuilder().setPrettyPrinting().create();
Type collectionType = new TypeToken<Collection<LogEntry>>() {}.getType();
try {
String json = FileUtils.readFileToString(currentLogFile, "UTF-8");
List<LogEntry> logs = gson.fromJson(json,collectionType);
Set<String> currentExerciseSet = new HashSet<String>();
for(LogEntry entry: logs) {
currentExerciseSet.add(entry.exercise);
}
output.doneExercices = currentExerciseSet;
} catch(IOException e) {
Log.e("IO", "Couldn't read current log file in list view", e);
}
}
}
@Override
protected void onPostExecute(Output result) {
super.onPostExecute(result);
act.completeBackgroundLoad(result);
}
}
public void completeBackgroundLoad(LoadListData.Output result) {
loaded = true;
if(result.doneExercices!= null) {
doneExercises = result.doneExercices;
}
if(result.dirty) {
dirty = true;
}
if(result.allExercises != null) {
allExercises.clear();
allExercises.addAll(result.allExercises);
}
exerciseAdapter.notifyDataSetChanged();
displayExercises();
findViewById(R.id.linlaHeaderProgress).setVisibility(View.GONE);
getListView().setVisibility(View.VISIBLE);
}
private static class ActionBarNavigationAdapter extends BaseAdapter {
Context context;
ArrayList<String> data;
LayoutInflater inflater;
public ActionBarNavigationAdapter(Context context, ArrayList<String> data) {
this.data = data;
inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
this.context = context;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View actionBarView = inflater.inflate(R.layout.ab_main_view, null);
TextView title = (TextView) actionBarView.findViewById(R.id.ab_basemaps_title);
if(position == 0) {
title.setText(context.getResources().getString(R.string.title_exercise_list));
} else {
title.setText(data.get(position));
}
return actionBarView;
}
@Override
public View getDropDownView(int position, View convertView, ViewGroup parent) {
View actionBarDropDownView = inflater.inflate(R.layout.ab_dropdown_view, null);
TextView dropDownTitle = (TextView) actionBarDropDownView.findViewById(R.id.ab_basemaps_dropdown_title);
dropDownTitle.setText(data.get(position));
return actionBarDropDownView;
}
@Override
public int getCount() {
return data.size();
}
@Override
public Object getItem(int position) {
return data.get(position);
}
@Override
public long getItemId(int position) {
return 0;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.redis.fluent;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.util.Context;
import com.azure.resourcemanager.redis.fluent.models.RedisFirewallRuleInner;
import com.azure.resourcemanager.redis.models.RedisFirewallRuleCreateParameters;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in FirewallRulesClient. */
public interface FirewallRulesClient {
/**
* Gets all firewall rules in the specified redis cache.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all firewall rules in the specified redis cache.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedFlux<RedisFirewallRuleInner> listByRedisResourceAsync(String resourceGroupName, String cacheName);
/**
* Gets all firewall rules in the specified redis cache.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all firewall rules in the specified redis cache.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<RedisFirewallRuleInner> listByRedisResource(String resourceGroupName, String cacheName);
/**
* Gets all firewall rules in the specified redis cache.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all firewall rules in the specified redis cache.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<RedisFirewallRuleInner> listByRedisResource(
String resourceGroupName, String cacheName, Context context);
/**
* Create or update a redis cache firewall rule.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @param ruleName The name of the firewall rule.
* @param parameters Parameters required for creating a firewall rule on redis cache.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a firewall rule on a redis cache has a name, and describes a contiguous range of IP addresses permitted
* to connect.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<RedisFirewallRuleInner>> createOrUpdateWithResponseAsync(
String resourceGroupName, String cacheName, String ruleName, RedisFirewallRuleCreateParameters parameters);
/**
* Create or update a redis cache firewall rule.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @param ruleName The name of the firewall rule.
* @param parameters Parameters required for creating a firewall rule on redis cache.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a firewall rule on a redis cache has a name, and describes a contiguous range of IP addresses permitted
* to connect.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<RedisFirewallRuleInner> createOrUpdateAsync(
String resourceGroupName, String cacheName, String ruleName, RedisFirewallRuleCreateParameters parameters);
/**
* Create or update a redis cache firewall rule.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @param ruleName The name of the firewall rule.
* @param parameters Parameters required for creating a firewall rule on redis cache.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a firewall rule on a redis cache has a name, and describes a contiguous range of IP addresses permitted
* to connect.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
RedisFirewallRuleInner createOrUpdate(
String resourceGroupName, String cacheName, String ruleName, RedisFirewallRuleCreateParameters parameters);
/**
* Create or update a redis cache firewall rule.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @param ruleName The name of the firewall rule.
* @param parameters Parameters required for creating a firewall rule on redis cache.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a firewall rule on a redis cache has a name, and describes a contiguous range of IP addresses permitted
* to connect.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<RedisFirewallRuleInner> createOrUpdateWithResponse(
String resourceGroupName,
String cacheName,
String ruleName,
RedisFirewallRuleCreateParameters parameters,
Context context);
/**
* Gets a single firewall rule in a specified redis cache.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @param ruleName The name of the firewall rule.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a single firewall rule in a specified redis cache.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<RedisFirewallRuleInner>> getWithResponseAsync(
String resourceGroupName, String cacheName, String ruleName);
/**
* Gets a single firewall rule in a specified redis cache.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @param ruleName The name of the firewall rule.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a single firewall rule in a specified redis cache.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<RedisFirewallRuleInner> getAsync(String resourceGroupName, String cacheName, String ruleName);
/**
* Gets a single firewall rule in a specified redis cache.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @param ruleName The name of the firewall rule.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a single firewall rule in a specified redis cache.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
RedisFirewallRuleInner get(String resourceGroupName, String cacheName, String ruleName);
/**
* Gets a single firewall rule in a specified redis cache.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @param ruleName The name of the firewall rule.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a single firewall rule in a specified redis cache.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<RedisFirewallRuleInner> getWithResponse(
String resourceGroupName, String cacheName, String ruleName, Context context);
/**
* Deletes a single firewall rule in a specified redis cache.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @param ruleName The name of the firewall rule.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<Void>> deleteWithResponseAsync(String resourceGroupName, String cacheName, String ruleName);
/**
* Deletes a single firewall rule in a specified redis cache.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @param ruleName The name of the firewall rule.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Void> deleteAsync(String resourceGroupName, String cacheName, String ruleName);
/**
* Deletes a single firewall rule in a specified redis cache.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @param ruleName The name of the firewall rule.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String cacheName, String ruleName);
/**
* Deletes a single firewall rule in a specified redis cache.
*
* @param resourceGroupName The name of the resource group.
* @param cacheName The name of the Redis cache.
* @param ruleName The name of the firewall rule.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<Void> deleteWithResponse(String resourceGroupName, String cacheName, String ruleName, Context context);
}
| |
package com.gwtfb.client;
import com.google.gwt.core.client.EntryPoint;
import com.google.gwt.core.client.GWT;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.user.client.History;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.Anchor;
import com.google.gwt.user.client.ui.DockPanel;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.RootPanel;
import com.google.gwt.user.client.ui.SimplePanel;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.gwtfb.client.examples.Example;
import com.gwtfb.client.examples.FriendsExample;
import com.gwtfb.client.examples.StreamPublishExample;
import com.gwtfb.sdk.FBCore;
import com.gwtfb.sdk.FBEvent;
import com.gwtfb.sdk.FBXfbml;
/**
* Entry point classes define <code>onModuleLoad()</code>.
* @author ola
*
*/
public class GwtFB implements EntryPoint, ValueChangeHandler<String> {
public String APPID = "d8724776da77150465c351df97a80d95";
// prod
//public String APPID = "0d51db8fd8b95ef0c2337ccbdc00d736";
private final DockPanel mainPanel = new DockPanel ();
private final SimplePanel mainView = new SimplePanel ();
private final SimplePanel sideBarView = new SimplePanel ();
private final FBCore fbCore = GWT.create(FBCore.class);
private final FBEvent fbEvent = GWT.create(FBEvent.class);
private final FBXfbml fbXfbml = GWT.create(FBXfbml.class);
private final boolean status = true;
private final boolean xfbml = true;
private final boolean cookie = true;
/**
* This is the entry point method.
*/
public void onModuleLoad() {
History.addValueChangeHandler ( this );
fbCore.init(APPID, status, cookie, xfbml);
RootPanel root = RootPanel.get();
root.getElement().setId ( "TheApp" );
mainView.getElement().setId("MainView");
sideBarView.getElement().setId("SideBarView");
mainPanel.add( new TopMenuPanel () , DockPanel.NORTH );
mainPanel.add ( new TopMenuLinksPanel (), DockPanel.NORTH );
mainPanel.add( sideBarView, DockPanel.WEST );
mainPanel.add( mainView, DockPanel.CENTER );
root.add ( mainPanel );
//
// Callback used when session status is changed
//
class SessionChangeCallback extends Callback<JavaScriptObject> {
@Override
public void onSuccess ( JavaScriptObject response ) {
// Make sure cookie is set so we can use the non async method
renderHomeView ();
}
}
//
// Get notified when user session is changed
//
SessionChangeCallback sessionChangeCallback = new SessionChangeCallback ();
fbEvent.subscribe("auth.sessionChange",sessionChangeCallback);
// Callback used when checking login status
class LoginStatusCallback extends Callback<JavaScriptObject> {
@Override
public void onSuccess ( JavaScriptObject response ) {
LoginStatusJso status = response.cast();
GWT.log( "LoginStatus response: " + status.getStatus() );
renderApp( Window.Location.getHash() );
}
}
LoginStatusCallback loginStatusCallback = new LoginStatusCallback ();
// Get login status
fbCore.getLoginStatus( loginStatusCallback );
class UserCallback extends Callback<JavaScriptObject> {
@Override
public void onSuccess(JavaScriptObject result) {
UserJso user = result.cast();
GWT.log( "FB ID=" + user.getId() );
GWT.log( "FB fname="+ user.getName() );
}
}
fbCore.api("/me", new UserCallback());
}
/**
* Render GUI
*/
private void renderApp ( String token ) {
token = token.replace("#", "");
if ( token == null || "".equals ( token ) || "#".equals ( token ) )
{
token = "home";
}
if ( token.endsWith("home") ) {
renderHomeView ();
} else if ( token.endsWith ( "wave" ) ) {
renderWaveView ();
} else if ( token.startsWith("example" ) ) {
/*
* Wrap example, display sourcecode link etc.
*/
String example = token.split("/")[1];
Example e = null;
if ( "stream.publish".equals ( example ) ) {
e = new StreamPublishExample ( fbCore );
} else if ( "friends".equals ( example ) ) {
e = new FriendsExample ( fbCore );
}
VerticalPanel examplePanel = new VerticalPanel ();
examplePanel.setWidth ( "700px" );
examplePanel.getElement().setId("ExampleView");
HorizontalPanel headerPanel = new HorizontalPanel ();
headerPanel.addStyleName( "header" );
headerPanel.add ( new HTML ( "Method: " + e.getMethod() ) );
Anchor sourceLink = new Anchor ( "Source" );
sourceLink.addStyleName("sourceLink");
sourceLink.setTarget( "blank");
sourceLink.setHref("http://code.google.com/p/gwtfb/source/browse/trunk/GwtFB/src/com/gwtfb/client/examples/" + e.getSimpleName() + ".java" );
headerPanel.add ( sourceLink );
examplePanel.add( headerPanel );
examplePanel.addStyleName ( "example" );
e.addStyleName( "example" );
examplePanel.add ( e );
// Add example
mainView.setWidget( examplePanel );
} else {
Window.alert ( "Unknown url " + token );
}
}
/**
* Render GUI when logged in
*/
private void renderWhenLoggedIn () {
mainView.setWidget ( new UserInfoViewController ( fbCore ) );
fbXfbml.parse();
}
/**
* Render GUI when not logged in
*/
private void renderWhenNotLoggedIn () {
mainView.setWidget ( new FrontpageViewController () );
fbXfbml.parse();
}
/**
* Render home view. If user is logged in display welcome message, otherwise
* display login dialog.
*/
private void renderHomeView () {
sideBarView.clear();
if ( fbCore.getSession() == null ) {
renderWhenNotLoggedIn ();
} else {
sideBarView.setWidget( new HomeSideBarPanel () );
renderWhenLoggedIn();
}
}
/**
* Render Wave
*/
private void renderWaveView () {
WaveView waveView = new WaveView ();
sideBarView.setWidget(new DocSideBarPanel () );
mainView.setWidget( waveView );
}
public void onValueChange(ValueChangeEvent<String> event) {
renderApp ( event.getValue() );
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.core;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import org.apache.catalina.Context;
import org.apache.catalina.Lifecycle;
import org.apache.catalina.LifecycleEvent;
import org.apache.catalina.LifecycleListener;
import org.apache.catalina.startup.Constants;
import org.apache.catalina.startup.ContextConfig;
import org.apache.catalina.startup.Tomcat;
import org.apache.catalina.startup.TomcatBaseTest;
import org.apache.tomcat.util.buf.ByteChunk;
import org.apache.tomcat.util.descriptor.web.WebXml;
public class TestStandardContextResources extends TomcatBaseTest {
@Override
public void setUp() throws Exception {
super.setUp();
Tomcat tomcat = getTomcatInstance();
// BZ 49218: The test fails if JreMemoryLeakPreventionListener is not
// present. The listener affects the JVM, and thus not only the current,
// but also the subsequent tests that are run in the same JVM. So it is
// fair to add it in every test.
tomcat.getServer().addLifecycleListener(
new JreMemoryLeakPreventionListener());
}
@Test
public void testResources() throws Exception {
Tomcat tomcat = getTomcatInstance();
File appDir = new File("test/webapp-fragments");
// app dir is relative to server home
Context ctx = tomcat.addWebapp(null, "/test", appDir.getAbsolutePath());
tomcat.start();
assertPageContains("/test/resourceA.jsp",
"<p>resourceA.jsp in the web application</p>");
assertPageContains("/test/resourceB.jsp",
"<p>resourceB.jsp in resources.jar</p>");
assertPageContains("/test/folder/resourceC.jsp",
"<p>resourceC.jsp in the web application</p>");
assertPageContains("/test/folder/resourceD.jsp",
"<p>resourceD.jsp in resources.jar</p>");
assertPageContains("/test/folder/resourceE.jsp",
"<p>resourceE.jsp in the web application</p>");
assertPageContains("/test/resourceG.jsp",
"<p>resourceG.jsp in WEB-INF/classes</p>", 404);
// For BZ 54391. Relative ordering is specified in resources2.jar.
// It is not absolute-ordering, so there may be other jars in the list
@SuppressWarnings("unchecked")
List<String> orderedLibs = (List<String>) ctx.getServletContext()
.getAttribute(ServletContext.ORDERED_LIBS);
if (orderedLibs.size() > 2) {
log.warn("testResources(): orderedLibs: " + orderedLibs);
}
int index = orderedLibs.indexOf("resources.jar");
int index2 = orderedLibs.indexOf("resources2.jar");
assertTrue(orderedLibs.toString(), index >= 0 && index2 >= 0
&& index < index2);
}
@Test
public void testResourcesWebInfClasses() throws Exception {
Tomcat tomcat = getTomcatInstance();
// app dir is relative to server home
File appDir = new File("test/webapp-fragments");
// Need to cast to be able to set StandardContext specific attribute
StandardContext ctxt = (StandardContext)
tomcat.addWebapp(null, "/test", appDir.getAbsolutePath());
ctxt.setAddWebinfClassesResources(true);
tomcat.start();
assertPageContains("/test/resourceA.jsp",
"<p>resourceA.jsp in the web application</p>");
assertPageContains("/test/resourceB.jsp",
"<p>resourceB.jsp in resources.jar</p>");
assertPageContains("/test/folder/resourceC.jsp",
"<p>resourceC.jsp in the web application</p>");
assertPageContains("/test/folder/resourceD.jsp",
"<p>resourceD.jsp in resources.jar</p>");
assertPageContains("/test/folder/resourceE.jsp",
"<p>resourceE.jsp in the web application</p>");
assertPageContains("/test/resourceG.jsp",
"<p>resourceG.jsp in WEB-INF/classes</p>");
}
@Test
public void testResourcesAbsoluteOrdering() throws Exception {
Tomcat tomcat = getTomcatInstance();
File appDir = new File("test/webapp-fragments");
// app dir is relative to server home
StandardContext ctx = (StandardContext) tomcat.addWebapp(null, "/test",
appDir.getAbsolutePath());
LifecycleListener[] listener = ctx.findLifecycleListeners();
assertEquals(3,listener.length);
assertTrue(listener[1] instanceof ContextConfig);
ContextConfig config = new ContextConfig() {
@Override
protected WebXml createWebXml() {
WebXml wxml = new WebXml();
wxml.addAbsoluteOrdering("resources");
wxml.addAbsoluteOrdering("resources2");
return wxml;
}
};
// prevent it from looking ( if it finds one - it'll have dup error )
config.setDefaultWebXml(Constants.NoDefaultWebXml);
listener[1] = config;
Tomcat.addServlet(ctx, "getresource", new GetResourceServlet());
ctx.addServletMapping("/getresource", "getresource");
tomcat.start();
assertPageContains("/test/getresource?path=/resourceF.jsp",
"<p>resourceF.jsp in resources2.jar</p>");
assertPageContains("/test/getresource?path=/resourceB.jsp",
"<p>resourceB.jsp in resources.jar</p>");
// Check ordering, for BZ 54391
assertEquals(Arrays.asList("resources.jar", "resources2.jar"), ctx
.getServletContext().getAttribute(ServletContext.ORDERED_LIBS));
ctx.stop();
LifecycleListener[] listener1 = ctx.findLifecycleListeners();
// change ordering and reload
ContextConfig config1 = new ContextConfig() {
@Override
protected WebXml createWebXml() {
WebXml wxml = new WebXml();
wxml.addAbsoluteOrdering("resources2");
wxml.addAbsoluteOrdering("resources");
return wxml;
}
};
// prevent it from looking ( if it finds one - it'll have dup error )
config1.setDefaultWebXml(Constants.NoDefaultWebXml);
listener1[1] = config1;
// Need to init since context won't call init
config1.lifecycleEvent(
new LifecycleEvent(ctx, Lifecycle.AFTER_INIT_EVENT, null));
Tomcat.addServlet(ctx, "getresource", new GetResourceServlet());
ctx.addServletMapping("/getresource", "getresource");
ctx.start();
assertPageContains("/test/getresource?path=/resourceF.jsp",
"<p>resourceF.jsp in resources2.jar</p>");
assertPageContains("/test/getresource?path=/resourceB.jsp",
"<p>resourceB.jsp in resources2.jar</p>");
// Check ordering, for BZ 54391
assertEquals(Arrays.asList("resources2.jar", "resources.jar"), ctx
.getServletContext().getAttribute(ServletContext.ORDERED_LIBS));
}
@Test
public void testResources2() throws Exception {
Tomcat tomcat = getTomcatInstance();
File appDir = new File("test/webapp-fragments");
// app dir is relative to server home
StandardContext ctx = (StandardContext) tomcat.addWebapp(null, "/test",
appDir.getAbsolutePath());
Tomcat.addServlet(ctx, "getresource", new GetResourceServlet());
ctx.addServletMapping("/getresource", "getresource");
tomcat.start();
assertPageContains("/test/getresource?path=/resourceF.jsp",
"<p>resourceF.jsp in resources2.jar</p>");
assertPageContains("/test/getresource?path=/resourceA.jsp",
"<p>resourceA.jsp in the web application</p>");
assertPageContains("/test/getresource?path=/resourceB.jsp",
"<p>resourceB.jsp in resources.jar</p>");
assertPageContains("/test/getresource?path=/folder/resourceC.jsp",
"<p>resourceC.jsp in the web application</p>");
assertPageContains("/test/getresource?path=/folder/resourceD.jsp",
"<p>resourceD.jsp in resources.jar</p>");
assertPageContains("/test/getresource?path=/folder/resourceE.jsp",
"<p>resourceE.jsp in the web application</p>");
}
/**
* A servlet that prints the requested resource. The path to the requested
* resource is passed as a parameter, <code>path</code>.
*/
public static class GetResourceServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
resp.setContentType("text/plain");
ServletContext context = getServletContext();
// Check resources individually
URL url = context.getResource(req.getParameter("path"));
if (url == null) {
resp.getWriter().println("Not found");
return;
}
try (InputStream input = url.openStream();
OutputStream output = resp.getOutputStream()) {
byte[] buffer = new byte[4000];
for (int len; (len = input.read(buffer)) > 0;) {
output.write(buffer, 0, len);
}
}
}
}
private void assertPageContains(String pageUrl, String expectedBody)
throws IOException {
assertPageContains(pageUrl, expectedBody, 200);
}
private void assertPageContains(String pageUrl, String expectedBody,
int expectedStatus) throws IOException {
ByteChunk res = new ByteChunk();
int sc = getUrl("http://localhost:" + getPort() + pageUrl, res, null);
assertEquals(expectedStatus, sc);
if (expectedStatus == 200) {
String result = res.toString();
assertTrue(result, result.indexOf(expectedBody) > 0);
}
}
}
| |
package com.thebluealliance.spectrum;
import com.thebluealliance.spectrum.internal.ColorItem;
import com.thebluealliance.spectrum.internal.ColorUtil;
import com.thebluealliance.spectrum.internal.SelectedColorChangedEvent;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import android.content.Context;
import android.content.res.TypedArray;
import androidx.annotation.ColorInt;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.LinearLayout;
import java.util.ArrayList;
import java.util.List;
/**
* General-purpose class that displays colors in a grid.
*/
public class SpectrumPalette extends LinearLayout {
private static final int DEFAULT_COLUMN_COUNT = 4;
private int mColorItemDimension;
private int mColorItemMargin;
private @ColorInt int[] mColors;
private @ColorInt int mSelectedColor;
private OnColorSelectedListener mListener;
private boolean mAutoPadding = false;
private boolean mHasFixedColumnCount = false;
private int mFixedColumnCount = -1;
private int mOutlineWidth = 0;
private int mComputedVerticalPadding = 0;
private int mOriginalPaddingTop = 0;
private int mOriginalPaddingBottom = 0;
private boolean mSetPaddingCalledInternally = false;
private int mNumColumns = 2;
private int mOldNumColumns = -1;
private boolean mViewInitialized = false;
private EventBus mEventBus;
private List<ColorItem> mItems = new ArrayList<>();
public SpectrumPalette(Context context) {
super(context);
init();
}
public SpectrumPalette(Context context, AttributeSet attrs) {
super(context, attrs);
TypedArray a = getContext().getTheme().obtainStyledAttributes(attrs, R.styleable.SpectrumPalette, 0, 0);
int id = a.getResourceId(R.styleable.SpectrumPalette_spectrum_colors, 0);
if (id != 0) {
mColors = getContext().getResources().getIntArray(id);
}
mAutoPadding = a.getBoolean(R.styleable.SpectrumPalette_spectrum_autoPadding, false);
mOutlineWidth = a.getDimensionPixelSize(R.styleable.SpectrumPalette_spectrum_outlineWidth, 0);
mFixedColumnCount = a.getInt(R.styleable.SpectrumPalette_spectrum_columnCount, -1);
if (mFixedColumnCount != -1) {
mHasFixedColumnCount = true;
}
a.recycle();
mOriginalPaddingTop = getPaddingTop();
mOriginalPaddingBottom = getPaddingBottom();
init();
}
private void init() {
mEventBus = new EventBus();
mEventBus.register(this);
mColorItemDimension = getResources().getDimensionPixelSize(R.dimen.color_item_small);
mColorItemMargin = getResources().getDimensionPixelSize(R.dimen.color_item_margins_small);
setOrientation(LinearLayout.VERTICAL);
}
/**
* Sets the colors that this palette will display
*
* @param colors an array of ARGB colors
*/
public void setColors(@ColorInt int[] colors) {
mColors = colors;
mViewInitialized = false;
createPaletteView();
}
/**
* Sets the currently selected color. This should be one of the colors specified via
* {@link #setColors(int[])}; behavior is undefined if {@code color} is not among those colors.
*
* @param color the color to be marked as selected
*/
public void setSelectedColor(@ColorInt int color) {
mSelectedColor = color;
mEventBus.post(new SelectedColorChangedEvent(mSelectedColor));
}
/**
* Registers a callback to be invoked when a new color is selected.
*/
public void setOnColorSelectedListener(OnColorSelectedListener listener) {
mListener = listener;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int widthMode = MeasureSpec.getMode(widthMeasureSpec);
int widthSize = MeasureSpec.getSize(widthMeasureSpec);
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
int heightSize = MeasureSpec.getSize(heightMeasureSpec);
int width, height;
if (!mHasFixedColumnCount) {
if (widthMode == MeasureSpec.EXACTLY) {
width = widthSize;
mNumColumns = computeColumnCount(widthSize - (getPaddingLeft() + getPaddingRight()));
} else if (widthMode == MeasureSpec.AT_MOST) {
width = widthSize;
mNumColumns = computeColumnCount(widthSize - (getPaddingLeft() + getPaddingRight()));
} else {
width = computeWidthForNumColumns(DEFAULT_COLUMN_COUNT) + getPaddingLeft() + getPaddingRight();
mNumColumns = DEFAULT_COLUMN_COUNT;
}
} else {
width = computeWidthForNumColumns(mFixedColumnCount) + getPaddingLeft() + getPaddingRight();
mNumColumns = mFixedColumnCount;
}
mComputedVerticalPadding = (width - (computeWidthForNumColumns(mNumColumns) + getPaddingLeft() + getPaddingRight())) / 2;
if (heightMode == MeasureSpec.EXACTLY) {
height = heightSize;
} else if (heightMode == MeasureSpec.AT_MOST) {
int desiredHeight = computeHeight(mNumColumns) + mOriginalPaddingTop + mOriginalPaddingBottom;
if (mAutoPadding) {
desiredHeight += (2 * mComputedVerticalPadding);
}
height = Math.min(desiredHeight, heightSize);
} else {
height = computeHeight(mNumColumns) + mOriginalPaddingTop + mOriginalPaddingBottom;
if (mAutoPadding) {
height += (2 * mComputedVerticalPadding);
}
}
if (mAutoPadding) {
setPaddingInternal(getPaddingLeft(), mOriginalPaddingTop + mComputedVerticalPadding, getPaddingRight(), mOriginalPaddingBottom + mComputedVerticalPadding);
}
createPaletteView();
super.onMeasure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
}
private int computeColumnCount(int maxWidth) {
int numColumns = 0;
while (((numColumns + 1) * mColorItemDimension) + ((numColumns + 1) * 2 * mColorItemMargin) <= maxWidth) {
numColumns++;
}
return numColumns;
}
private int computeWidthForNumColumns(int columnCount) {
return columnCount * (mColorItemDimension + 2 * mColorItemMargin);
}
private int computeHeight(int columnCount) {
if (mColors == null) {
// View does not have any colors to display, so we won't take up any room
return 0;
}
int rowCount = mColors.length / columnCount;
if (mColors.length % columnCount != 0) {
rowCount++;
}
return rowCount * (mColorItemDimension + 2 * mColorItemMargin);
}
private void setPaddingInternal(int left, int top, int right, int bottom) {
mSetPaddingCalledInternally = true;
setPadding(left, top, right, bottom);
}
@Override public void setPadding(int left, int top, int right, int bottom) {
super.setPadding(left, top, right, bottom);
if (!mSetPaddingCalledInternally) {
mOriginalPaddingTop = top;
mOriginalPaddingBottom = bottom;
}
}
private int getOriginalPaddingTop() {
return mOriginalPaddingTop;
}
private int getOriginalPaddingBottom() {
return mOriginalPaddingBottom;
}
/**
* Generates the views to represent this palette's colors. The grid is implemented with
* {@link LinearLayout}s. This class itself subclasses {@link LinearLayout} and is set up in
* the vertical orientation. Rows consist of horizontal {@link LinearLayout}s which themselves
* hold views that display the individual colors.
*/
protected void createPaletteView() {
// Only create the view if it hasn't been created yet or if the number of columns has changed
if (mViewInitialized && mNumColumns == mOldNumColumns) {
return;
}
mViewInitialized = true;
mOldNumColumns = mNumColumns;
removeAllViews();
if (mColors == null) {
return;
}
// Add rows
int numItemsInRow = 0;
LinearLayout row = createRow();
for (int i = 0; i < mColors.length; i++) {
View colorItem = createColorItem(mColors[i], mSelectedColor);
row.addView(colorItem);
numItemsInRow++;
if (numItemsInRow == mNumColumns) {
addView(row);
row = createRow();
numItemsInRow = 0;
}
}
if (numItemsInRow > 0) {
while (numItemsInRow < mNumColumns) {
row.addView(createSpacer());
numItemsInRow++;
}
addView(row);
}
}
private LinearLayout createRow() {
LinearLayout row = new LinearLayout(getContext());
row.setOrientation(LinearLayout.HORIZONTAL);
ViewGroup.LayoutParams params = new ViewGroup.LayoutParams(LayoutParams.MATCH_PARENT,
LayoutParams.WRAP_CONTENT);
row.setLayoutParams(params);
row.setGravity(Gravity.CENTER_HORIZONTAL);
return row;
}
private ColorItem createColorItem(int color, int selectedColor) {
ColorItem view = new ColorItem(getContext(), color, color == selectedColor, mEventBus);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(mColorItemDimension, mColorItemDimension);
params.setMargins(mColorItemMargin, mColorItemMargin, mColorItemMargin, mColorItemMargin);
view.setLayoutParams(params);
if (mOutlineWidth != 0) {
view.setOutlineWidth(mOutlineWidth);
}
mItems.add(view);
return view;
}
private ImageView createSpacer() {
ImageView view = new ImageView(getContext());
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(mColorItemDimension, mColorItemDimension);
params.setMargins(mColorItemMargin, mColorItemMargin, mColorItemMargin, mColorItemMargin);
view.setLayoutParams(params);
return view;
}
@Subscribe
public void onSelectedColorChanged(SelectedColorChangedEvent event) {
mSelectedColor = event.getSelectedColor();
if (mListener != null) {
mListener.onColorSelected(mSelectedColor);
}
}
public interface OnColorSelectedListener {
void onColorSelected(@ColorInt int color);
}
/**
* Returns true if for the given color a dark checkmark is used.
*
* @return true if color is "dark"
*/
public boolean usesDarkCheckmark(@ColorInt int color) {
return ColorUtil.isColorDark(color);
}
/**
* Change the size of the outlining
*
* @param width in px
*/
public void setOutlineWidth(int width) {
mOutlineWidth = width;
for (ColorItem item : mItems) {
item.setOutlineWidth(width);
}
}
/**
* Tells the palette to use a fixed number of columns during layout.
*
* @param columnCount how many columns to use
*/
public void setFixedColumnCount(int columnCount) {
if (columnCount > 0) {
Log.d("spectrum", "set column count to " + columnCount);
mHasFixedColumnCount = true;
mFixedColumnCount = columnCount;
requestLayout();
invalidate();
} else {
mHasFixedColumnCount = false;
mFixedColumnCount = -1;
requestLayout();
invalidate();
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceResult;
/**
* <p>
* Contains the output of DescribeRouteTables.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeRouteTablesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* Information about one or more route tables.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<RouteTable> routeTables;
/**
* <p>
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more
* results to return.
* </p>
*/
private String nextToken;
/**
* <p>
* Information about one or more route tables.
* </p>
*
* @return Information about one or more route tables.
*/
public java.util.List<RouteTable> getRouteTables() {
if (routeTables == null) {
routeTables = new com.amazonaws.internal.SdkInternalList<RouteTable>();
}
return routeTables;
}
/**
* <p>
* Information about one or more route tables.
* </p>
*
* @param routeTables
* Information about one or more route tables.
*/
public void setRouteTables(java.util.Collection<RouteTable> routeTables) {
if (routeTables == null) {
this.routeTables = null;
return;
}
this.routeTables = new com.amazonaws.internal.SdkInternalList<RouteTable>(routeTables);
}
/**
* <p>
* Information about one or more route tables.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setRouteTables(java.util.Collection)} or {@link #withRouteTables(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param routeTables
* Information about one or more route tables.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeRouteTablesResult withRouteTables(RouteTable... routeTables) {
if (this.routeTables == null) {
setRouteTables(new com.amazonaws.internal.SdkInternalList<RouteTable>(routeTables.length));
}
for (RouteTable ele : routeTables) {
this.routeTables.add(ele);
}
return this;
}
/**
* <p>
* Information about one or more route tables.
* </p>
*
* @param routeTables
* Information about one or more route tables.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeRouteTablesResult withRouteTables(java.util.Collection<RouteTable> routeTables) {
setRouteTables(routeTables);
return this;
}
/**
* <p>
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more
* results to return.
* </p>
*
* @param nextToken
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no
* more results to return.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more
* results to return.
* </p>
*
* @return The token to use to retrieve the next page of results. This value is <code>null</code> when there are no
* more results to return.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more
* results to return.
* </p>
*
* @param nextToken
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no
* more results to return.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeRouteTablesResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getRouteTables() != null)
sb.append("RouteTables: ").append(getRouteTables()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeRouteTablesResult == false)
return false;
DescribeRouteTablesResult other = (DescribeRouteTablesResult) obj;
if (other.getRouteTables() == null ^ this.getRouteTables() == null)
return false;
if (other.getRouteTables() != null && other.getRouteTables().equals(this.getRouteTables()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getRouteTables() == null) ? 0 : getRouteTables().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public DescribeRouteTablesResult clone() {
try {
return (DescribeRouteTablesResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.Collection;
import java.util.UUID;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.events.CacheEvent;
import org.apache.ignite.events.CacheRebalancingEvent;
import org.apache.ignite.internal.managers.eventstorage.GridLocalEventListener;
import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.LT;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteUuid;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.events.EventType.EVT_CACHE_REBALANCE_PART_UNLOADED;
import static org.apache.ignite.events.EventType.EVT_CACHE_STARTED;
import static org.apache.ignite.events.EventType.EVT_CACHE_STOPPED;
/**
* Cache event manager.
*/
public class GridCacheEventManager extends GridCacheManagerAdapter {
/** Force keep binary flag. Will be set if event notification encountered exception during unmarshalling. */
private boolean forceKeepBinary;
/**
* Adds local event listener.
*
* @param lsnr Listener.
* @param evts Types of events.
*/
public void addListener(GridLocalEventListener lsnr, int... evts) {
cctx.gridEvents().addLocalEventListener(lsnr, evts);
}
/**
* Removes local event listener.
*
* @param lsnr Local event listener.
*/
public void removeListener(GridLocalEventListener lsnr) {
cctx.gridEvents().removeLocalEventListener(lsnr);
}
/**
* @param part Partition.
* @param key Key for the event.
* @param tx Possible surrounding transaction.
* @param owner Possible surrounding lock.
* @param type Event type.
* @param newVal New value.
* @param hasNewVal Whether new value is present or not.
* @param oldVal Old value.
* @param hasOldVal Whether old value is present or not.
* @param subjId Subject ID.
* @param cloClsName Closure class name.
* @param taskName Task name.
*/
public void addEvent(int part,
KeyCacheObject key,
IgniteInternalTx tx,
@Nullable GridCacheMvccCandidate owner,
int type,
@Nullable CacheObject newVal,
boolean hasNewVal,
@Nullable CacheObject oldVal,
boolean hasOldVal,
UUID subjId,
String cloClsName,
String taskName,
boolean keepBinary)
{
addEvent(part,
key,
cctx.localNodeId(),
tx,
owner,
type,
newVal,
hasNewVal,
oldVal,
hasOldVal,
subjId,
cloClsName,
taskName,
keepBinary);
}
/**
* @param type Event type (start or stop).
*/
public void addEvent(int type) {
addEvent(
0,
null,
cctx.localNodeId(),
(IgniteUuid)null,
null,
type,
null,
false,
null,
false,
null,
null,
null,
false);
}
/**
* @param part Partition.
* @param key Key for the event.
* @param nodeId Node ID.
* @param tx Possible surrounding transaction.
* @param owner Possible surrounding lock.
* @param type Event type.
* @param newVal New value.
* @param hasNewVal Whether new value is present or not.
* @param oldVal Old value.
* @param hasOldVal Whether old value is present or not.
* @param subjId Subject ID.
* @param cloClsName Closure class name.
* @param taskName Task name.
*/
public void addEvent(int part,
KeyCacheObject key,
UUID nodeId,
IgniteInternalTx tx,
GridCacheMvccCandidate owner,
int type,
CacheObject newVal,
boolean hasNewVal,
CacheObject oldVal,
boolean hasOldVal,
UUID subjId,
String cloClsName,
String taskName,
boolean keepBinary)
{
addEvent(part,
key,
nodeId, tx == null ? null : tx.xid(),
owner == null ? null : owner.version(),
type,
newVal,
hasNewVal,
oldVal,
hasOldVal,
subjId,
cloClsName,
taskName,
keepBinary);
}
/**
* @param part Partition.
* @param key Key for the event.
* @param evtNodeId Node ID.
* @param owner Possible surrounding lock.
* @param type Event type.
* @param newVal New value.
* @param hasNewVal Whether new value is present or not.
* @param oldVal Old value.
* @param hasOldVal Whether old value is present or not.
* @param subjId Subject ID.
* @param cloClsName Closure class name.
* @param taskName Task name.
*/
public void addEvent(int part,
KeyCacheObject key,
UUID evtNodeId,
@Nullable GridCacheMvccCandidate owner,
int type,
@Nullable CacheObject newVal,
boolean hasNewVal,
CacheObject oldVal,
boolean hasOldVal,
UUID subjId,
String cloClsName,
String taskName,
boolean keepBinary)
{
IgniteInternalTx tx = owner == null ? null : cctx.tm().tx(owner.version());
addEvent(part,
key,
evtNodeId,
tx == null ? null : tx.xid(),
owner == null ? null : owner.version(),
type,
newVal,
hasNewVal,
oldVal,
hasOldVal,
subjId,
cloClsName,
taskName,
keepBinary);
}
/**
* @param part Partition.
* @param key Key for the event.
* @param evtNodeId Event node ID.
* @param xid Transaction ID.
* @param lockId Lock ID.
* @param type Event type.
* @param newVal New value.
* @param hasNewVal Whether new value is present or not.
* @param oldVal Old value.
* @param hasOldVal Whether old value is present or not.
* @param subjId Subject ID.
* @param cloClsName Closure class name.
* @param taskName Task class name.
*/
public void addEvent(
int part,
KeyCacheObject key,
UUID evtNodeId,
@Nullable IgniteUuid xid,
@Nullable Object lockId,
int type,
@Nullable CacheObject newVal,
boolean hasNewVal,
@Nullable CacheObject oldVal,
boolean hasOldVal,
UUID subjId,
@Nullable String cloClsName,
@Nullable String taskName,
boolean keepBinary
) {
assert key != null || type == EVT_CACHE_STARTED || type == EVT_CACHE_STOPPED;
if (!cctx.events().isRecordable(type))
LT.warn(log, null, "Added event without checking if event is recordable: " + U.gridEventName(type));
// Events are not fired for internal entry.
if (key == null || !key.internal()) {
ClusterNode evtNode = cctx.discovery().node(evtNodeId);
if (evtNode == null)
evtNode = findNodeInHistory(evtNodeId);
if (evtNode == null)
LT.warn(log, null, "Failed to find event node in grid topology history " +
"(try to increase topology history size configuration property of configured " +
"discovery SPI): " + evtNodeId);
keepBinary = keepBinary || forceKeepBinary;
Object key0;
Object val0;
Object oldVal0;
try {
key0 = cctx.cacheObjectContext().unwrapBinaryIfNeeded(key, keepBinary, false);
val0 = cctx.cacheObjectContext().unwrapBinaryIfNeeded(newVal, keepBinary, false);
oldVal0 = cctx.cacheObjectContext().unwrapBinaryIfNeeded(oldVal, keepBinary, false);
}
catch (Exception e) {
if (!cctx.cacheObjectContext().processor().isBinaryEnabled(cctx.config()))
throw e;
if (log.isDebugEnabled())
log.debug("Failed to unmarshall cache object value for the event notification: " + e);
if (!forceKeepBinary)
LT.warn(log, null, "Failed to unmarshall cache object value for the event notification " +
"(all further notifications will keep binary object format).");
forceKeepBinary = true;
key0 = cctx.cacheObjectContext().unwrapBinaryIfNeeded(key, true, false);
val0 = cctx.cacheObjectContext().unwrapBinaryIfNeeded(newVal, true, false);
oldVal0 = cctx.cacheObjectContext().unwrapBinaryIfNeeded(oldVal, true, false);
}
cctx.gridEvents().record(new CacheEvent(cctx.name(),
cctx.localNode(),
evtNode,
"Cache event.",
type,
part,
cctx.isNear(),
key0,
xid,
lockId,
val0,
hasNewVal,
oldVal0,
hasOldVal,
subjId,
cloClsName,
taskName));
}
}
/**
* Tries to find node in history by specified ID.
*
* @param nodeId Node ID.
* @return Found node or {@code null} if history doesn't contain this node.
*/
@Nullable private ClusterNode findNodeInHistory(UUID nodeId) {
for (long topVer = cctx.discovery().topologyVersion() - 1; topVer > 0; topVer--) {
Collection<ClusterNode> top = cctx.discovery().topology(topVer);
if (top == null)
break;
for (ClusterNode node : top)
if (F.eq(node.id(), nodeId))
return node;
}
return null;
}
/**
* Adds preloading event.
*
* @param part Partition.
* @param type Event type.
* @param discoNode Discovery node.
* @param discoType Discovery event type.
* @param discoTs Discovery event timestamp.
*/
public void addPreloadEvent(int part, int type, ClusterNode discoNode, int discoType, long discoTs) {
assert discoNode != null;
assert type > 0;
assert discoType > 0;
assert discoTs > 0;
if (!cctx.events().isRecordable(type))
LT.warn(log, null, "Added event without checking if event is recordable: " + U.gridEventName(type));
cctx.gridEvents().record(new CacheRebalancingEvent(cctx.name(), cctx.localNode(),
"Cache rebalancing event.", type, part, discoNode, discoType, discoTs));
}
/**
* Adds partition unload event.
*
* @param part Partition.
*/
public void addUnloadEvent(int part) {
if (!cctx.events().isRecordable(EVT_CACHE_REBALANCE_PART_UNLOADED))
LT.warn(log, null, "Added event without checking if event is recordable: " +
U.gridEventName(EVT_CACHE_REBALANCE_PART_UNLOADED));
cctx.gridEvents().record(new CacheRebalancingEvent(cctx.name(), cctx.localNode(),
"Cache unloading event.", EVT_CACHE_REBALANCE_PART_UNLOADED, part, null, 0, 0));
}
/**
* @param type Event type.
* @return {@code True} if event is recordable.
*/
public boolean isRecordable(int type) {
GridCacheContext cctx0 = cctx;
return cctx0 != null && cctx0.userCache() && cctx0.gridEvents().isRecordable(type);
}
/** {@inheritDoc} */
@Override public void printMemoryStats() {
X.println(">>> ");
X.println(">>> Cache event manager memory stats [grid=" + cctx.gridName() + ", cache=" + cctx.name() +
", stats=" + "N/A" + ']');
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.connect.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/connect-2017-08-08/UpdateInstanceAttribute" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateInstanceAttributeRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance.
* </p>
*/
private String instanceId;
/**
* <p>
* The type of attribute.
* </p>
* <note>
* <p>
* Only allowlisted customers can consume USE_CUSTOM_TTS_VOICES. To access this feature, contact Amazon Web Services
* Support for allowlisting.
* </p>
* </note>
*/
private String attributeType;
/**
* <p>
* The value for the attribute. Maximum character limit is 100.
* </p>
*/
private String value;
/**
* <p>
* The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance.
* </p>
*
* @param instanceId
* The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance.
*/
public void setInstanceId(String instanceId) {
this.instanceId = instanceId;
}
/**
* <p>
* The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance.
* </p>
*
* @return The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance.
*/
public String getInstanceId() {
return this.instanceId;
}
/**
* <p>
* The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance.
* </p>
*
* @param instanceId
* The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateInstanceAttributeRequest withInstanceId(String instanceId) {
setInstanceId(instanceId);
return this;
}
/**
* <p>
* The type of attribute.
* </p>
* <note>
* <p>
* Only allowlisted customers can consume USE_CUSTOM_TTS_VOICES. To access this feature, contact Amazon Web Services
* Support for allowlisting.
* </p>
* </note>
*
* @param attributeType
* The type of attribute.</p> <note>
* <p>
* Only allowlisted customers can consume USE_CUSTOM_TTS_VOICES. To access this feature, contact Amazon Web
* Services Support for allowlisting.
* </p>
* @see InstanceAttributeType
*/
public void setAttributeType(String attributeType) {
this.attributeType = attributeType;
}
/**
* <p>
* The type of attribute.
* </p>
* <note>
* <p>
* Only allowlisted customers can consume USE_CUSTOM_TTS_VOICES. To access this feature, contact Amazon Web Services
* Support for allowlisting.
* </p>
* </note>
*
* @return The type of attribute.</p> <note>
* <p>
* Only allowlisted customers can consume USE_CUSTOM_TTS_VOICES. To access this feature, contact Amazon Web
* Services Support for allowlisting.
* </p>
* @see InstanceAttributeType
*/
public String getAttributeType() {
return this.attributeType;
}
/**
* <p>
* The type of attribute.
* </p>
* <note>
* <p>
* Only allowlisted customers can consume USE_CUSTOM_TTS_VOICES. To access this feature, contact Amazon Web Services
* Support for allowlisting.
* </p>
* </note>
*
* @param attributeType
* The type of attribute.</p> <note>
* <p>
* Only allowlisted customers can consume USE_CUSTOM_TTS_VOICES. To access this feature, contact Amazon Web
* Services Support for allowlisting.
* </p>
* @return Returns a reference to this object so that method calls can be chained together.
* @see InstanceAttributeType
*/
public UpdateInstanceAttributeRequest withAttributeType(String attributeType) {
setAttributeType(attributeType);
return this;
}
/**
* <p>
* The type of attribute.
* </p>
* <note>
* <p>
* Only allowlisted customers can consume USE_CUSTOM_TTS_VOICES. To access this feature, contact Amazon Web Services
* Support for allowlisting.
* </p>
* </note>
*
* @param attributeType
* The type of attribute.</p> <note>
* <p>
* Only allowlisted customers can consume USE_CUSTOM_TTS_VOICES. To access this feature, contact Amazon Web
* Services Support for allowlisting.
* </p>
* @return Returns a reference to this object so that method calls can be chained together.
* @see InstanceAttributeType
*/
public UpdateInstanceAttributeRequest withAttributeType(InstanceAttributeType attributeType) {
this.attributeType = attributeType.toString();
return this;
}
/**
* <p>
* The value for the attribute. Maximum character limit is 100.
* </p>
*
* @param value
* The value for the attribute. Maximum character limit is 100.
*/
public void setValue(String value) {
this.value = value;
}
/**
* <p>
* The value for the attribute. Maximum character limit is 100.
* </p>
*
* @return The value for the attribute. Maximum character limit is 100.
*/
public String getValue() {
return this.value;
}
/**
* <p>
* The value for the attribute. Maximum character limit is 100.
* </p>
*
* @param value
* The value for the attribute. Maximum character limit is 100.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateInstanceAttributeRequest withValue(String value) {
setValue(value);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getInstanceId() != null)
sb.append("InstanceId: ").append(getInstanceId()).append(",");
if (getAttributeType() != null)
sb.append("AttributeType: ").append(getAttributeType()).append(",");
if (getValue() != null)
sb.append("Value: ").append(getValue());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateInstanceAttributeRequest == false)
return false;
UpdateInstanceAttributeRequest other = (UpdateInstanceAttributeRequest) obj;
if (other.getInstanceId() == null ^ this.getInstanceId() == null)
return false;
if (other.getInstanceId() != null && other.getInstanceId().equals(this.getInstanceId()) == false)
return false;
if (other.getAttributeType() == null ^ this.getAttributeType() == null)
return false;
if (other.getAttributeType() != null && other.getAttributeType().equals(this.getAttributeType()) == false)
return false;
if (other.getValue() == null ^ this.getValue() == null)
return false;
if (other.getValue() != null && other.getValue().equals(this.getValue()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getInstanceId() == null) ? 0 : getInstanceId().hashCode());
hashCode = prime * hashCode + ((getAttributeType() == null) ? 0 : getAttributeType().hashCode());
hashCode = prime * hashCode + ((getValue() == null) ? 0 : getValue().hashCode());
return hashCode;
}
@Override
public UpdateInstanceAttributeRequest clone() {
return (UpdateInstanceAttributeRequest) super.clone();
}
}
| |
/*
Copyright 1995-2015 Esri
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For additional information, contact:
Environmental Systems Research Institute, Inc.
Attn: Contracts Dept
380 New York Street
Redlands, California, USA 92373
email: contracts@esri.com
*/
package com.esri.core.geometry;
import com.esri.core.geometry.VertexDescription.Semantics;
import java.io.IOException;
import java.util.Map;
class OperatorExportToJsonCursor extends JsonCursor {
GeometryCursor m_inputGeometryCursor;
SpatialReference m_spatialReference;
int m_index;
public OperatorExportToJsonCursor(SpatialReference spatialReference, GeometryCursor geometryCursor) {
m_index = -1;
if (geometryCursor == null) {
throw new IllegalArgumentException();
}
m_inputGeometryCursor = geometryCursor;
m_spatialReference = spatialReference;
}
@Override
public int getID() {
return m_index;
}
@Override
public String next() {
Geometry geometry;
if ((geometry = m_inputGeometryCursor.next()) != null) {
m_index = m_inputGeometryCursor.getGeometryID();
return exportToString(geometry, m_spatialReference, null);
}
return null;
}
static String exportToString(Geometry geometry, SpatialReference spatialReference, Map<String, Object> exportProperties) {
JsonWriter jsonWriter = new JsonStringWriter();
exportToJson_(geometry, spatialReference, jsonWriter, exportProperties);
return (String) jsonWriter.getJson();
}
private static void exportToJson_(Geometry geometry, SpatialReference spatialReference, JsonWriter jsonWriter, Map<String, Object> exportProperties) {
try {
int type = geometry.getType().value();
switch (type) {
case Geometry.GeometryType.Point:
exportPointToJson((Point) geometry, spatialReference, jsonWriter, exportProperties);
break;
case Geometry.GeometryType.MultiPoint:
exportMultiPointToJson((MultiPoint) geometry, spatialReference, jsonWriter, exportProperties);
break;
case Geometry.GeometryType.Polyline:
exportPolylineToJson((Polyline) geometry, spatialReference, jsonWriter, exportProperties);
break;
case Geometry.GeometryType.Polygon:
exportPolygonToJson((Polygon) geometry, spatialReference, jsonWriter, exportProperties);
break;
case Geometry.GeometryType.Envelope:
exportEnvelopeToJson((Envelope) geometry, spatialReference, jsonWriter, exportProperties);
break;
default:
throw new RuntimeException("not implemented for this geometry type");
}
} catch (Exception e) {
}
}
private static void exportPolygonToJson(Polygon pp, SpatialReference spatialReference, JsonWriter jsonWriter, Map<String, Object> exportProperties) {
exportPolypathToJson(pp, "rings", spatialReference, jsonWriter, exportProperties);
}
private static void exportPolylineToJson(Polyline pp, SpatialReference spatialReference, JsonWriter jsonWriter, Map<String, Object> exportProperties) {
exportPolypathToJson(pp, "paths", spatialReference, jsonWriter, exportProperties);
}
private static void exportPolypathToJson(MultiPath pp, String name, SpatialReference spatialReference, JsonWriter jsonWriter, Map<String, Object> exportProperties) {
boolean bExportZs = pp.hasAttribute(Semantics.Z);
boolean bExportMs = pp.hasAttribute(Semantics.M);
boolean bPositionAsF = false;
int decimals = 17;
if (exportProperties != null) {
Object numberOfDecimalsXY = exportProperties.get("numberOfDecimalsXY");
if (numberOfDecimalsXY != null && numberOfDecimalsXY instanceof Number) {
bPositionAsF = true;
decimals = ((Number) numberOfDecimalsXY).intValue();
}
}
jsonWriter.startObject();
if (bExportZs) {
jsonWriter.addPairBoolean("hasZ", true);
}
if (bExportMs) {
jsonWriter.addPairBoolean("hasM", true);
}
jsonWriter.addPairArray(name);
if (!pp.isEmpty()) {
int n = pp.getPathCount(); // rings or paths
MultiPathImpl mpImpl = (MultiPathImpl) pp._getImpl();// get impl for
// faster
// access
AttributeStreamOfDbl zs = null;
AttributeStreamOfDbl ms = null;
if (bExportZs) {
zs = (AttributeStreamOfDbl) mpImpl.getAttributeStreamRef(Semantics.Z);
}
if (bExportMs) {
ms = (AttributeStreamOfDbl) mpImpl.getAttributeStreamRef(Semantics.M);
}
boolean bPolygon = pp instanceof Polygon;
Point2D pt = new Point2D();
for (int i = 0; i < n; i++) {
jsonWriter.addValueArray();
int startindex = pp.getPathStart(i);
int numVertices = pp.getPathSize(i);
double startx = 0.0, starty = 0.0, startz = NumberUtils.NaN(), startm = NumberUtils.NaN();
double z = NumberUtils.NaN(), m = NumberUtils.NaN();
boolean bClosed = pp.isClosedPath(i);
for (int j = startindex; j < startindex + numVertices; j++) {
pp.getXY(j, pt);
jsonWriter.addValueArray();
if (bPositionAsF) {
jsonWriter.addValueDouble(pt.x, decimals, true);
jsonWriter.addValueDouble(pt.y, decimals, true);
} else {
jsonWriter.addValueDouble(pt.x);
jsonWriter.addValueDouble(pt.y);
}
if (bExportZs) {
z = zs.get(j);
jsonWriter.addValueDouble(z);
}
if (bExportMs) {
m = ms.get(j);
jsonWriter.addValueDouble(m);
}
if (j == startindex && bClosed) {
startx = pt.x;
starty = pt.y;
startz = z;
startm = m;
}
jsonWriter.endArray();
}
// Close the Path/Ring by writing the Point at the start index
if (bClosed && (startx != pt.x || starty != pt.y || (bExportZs && !(NumberUtils.isNaN(startz) && NumberUtils.isNaN(z)) && startz != z) || (bExportMs && !(NumberUtils.isNaN(startm) && NumberUtils.isNaN(m)) && startm != m))) {
pp.getXY(startindex, pt);
// getPoint(startindex);
jsonWriter.addValueArray();
if (bPositionAsF) {
jsonWriter.addValueDouble(pt.x, decimals, true);
jsonWriter.addValueDouble(pt.y, decimals, true);
} else {
jsonWriter.addValueDouble(pt.x);
jsonWriter.addValueDouble(pt.y);
}
if (bExportZs) {
z = zs.get(startindex);
jsonWriter.addValueDouble(z);
}
if (bExportMs) {
m = ms.get(startindex);
jsonWriter.addValueDouble(m);
}
jsonWriter.endArray();
}
jsonWriter.endArray();
}
}
jsonWriter.endArray();
if (spatialReference != null) {
writeSR(spatialReference, jsonWriter);
}
jsonWriter.endObject();
}
private static void exportMultiPointToJson(MultiPoint mpt, SpatialReference spatialReference, JsonWriter jsonWriter, Map<String, Object> exportProperties) {
boolean bExportZs = mpt.hasAttribute(Semantics.Z);
boolean bExportMs = mpt.hasAttribute(Semantics.M);
boolean bPositionAsF = false;
int decimals = 17;
if (exportProperties != null) {
Object numberOfDecimalsXY = exportProperties.get("numberOfDecimalsXY");
if (numberOfDecimalsXY != null && numberOfDecimalsXY instanceof Number) {
bPositionAsF = true;
decimals = ((Number) numberOfDecimalsXY).intValue();
}
}
jsonWriter.startObject();
if (bExportZs) {
jsonWriter.addPairBoolean("hasZ", true);
}
if (bExportMs) {
jsonWriter.addPairBoolean("hasM", true);
}
jsonWriter.addPairArray("points");
if (!mpt.isEmpty()) {
MultiPointImpl mpImpl = (MultiPointImpl) mpt._getImpl();// get impl
// for
// faster
// access
AttributeStreamOfDbl zs = null;
AttributeStreamOfDbl ms = null;
if (bExportZs) {
zs = (AttributeStreamOfDbl) mpImpl.getAttributeStreamRef(Semantics.Z);
}
if (bExportMs) {
ms = (AttributeStreamOfDbl) mpImpl.getAttributeStreamRef(Semantics.M);
}
Point2D pt = new Point2D();
int n = mpt.getPointCount();
for (int i = 0; i < n; i++) {
mpt.getXY(i, pt);
jsonWriter.addValueArray();
if (bPositionAsF) {
jsonWriter.addValueDouble(pt.x, decimals, true);
jsonWriter.addValueDouble(pt.y, decimals, true);
} else {
jsonWriter.addValueDouble(pt.x);
jsonWriter.addValueDouble(pt.y);
}
if (bExportZs) {
double z = zs.get(i);
jsonWriter.addValueDouble(z);
}
if (bExportMs) {
double m = ms.get(i);
jsonWriter.addValueDouble(m);
}
jsonWriter.endArray();
}
}
jsonWriter.endArray();
if (spatialReference != null) {
writeSR(spatialReference, jsonWriter);
}
jsonWriter.endObject();
}
private static void exportPointToJson(Point pt, SpatialReference spatialReference, JsonWriter jsonWriter, Map<String, Object> exportProperties) {
boolean bExportZs = pt.hasAttribute(Semantics.Z);
boolean bExportMs = pt.hasAttribute(Semantics.M);
boolean bPositionAsF = false;
int decimals = 17;
if (exportProperties != null) {
Object numberOfDecimalsXY = exportProperties.get("numberOfDecimalsXY");
if (numberOfDecimalsXY != null && numberOfDecimalsXY instanceof Number) {
bPositionAsF = true;
decimals = ((Number) numberOfDecimalsXY).intValue();
}
}
jsonWriter.startObject();
if (pt.isEmpty()) {
jsonWriter.addPairNull("x");
jsonWriter.addPairNull("y");
if (bExportZs) {
jsonWriter.addPairNull("z");
}
if (bExportMs) {
jsonWriter.addPairNull("m");
}
} else {
if (bPositionAsF) {
jsonWriter.addPairDouble("x", pt.getX(), decimals, true);
jsonWriter.addPairDouble("y", pt.getY(), decimals, true);
} else {
jsonWriter.addPairDouble("x", pt.getX());
jsonWriter.addPairDouble("y", pt.getY());
}
if (bExportZs) {
jsonWriter.addPairDouble("z", pt.getZ());
}
if (bExportMs) {
jsonWriter.addPairDouble("m", pt.getM());
}
}
if (spatialReference != null) {
writeSR(spatialReference, jsonWriter);
}
jsonWriter.endObject();
}
private static void exportEnvelopeToJson(Envelope env, SpatialReference spatialReference, JsonWriter jsonWriter, Map<String, Object> exportProperties) {
boolean bExportZs = env.hasAttribute(Semantics.Z);
boolean bExportMs = env.hasAttribute(Semantics.M);
boolean bPositionAsF = false;
int decimals = 17;
if (exportProperties != null) {
Object numberOfDecimalsXY = exportProperties.get("numberOfDecimalsXY");
if (numberOfDecimalsXY != null && numberOfDecimalsXY instanceof Number) {
bPositionAsF = true;
decimals = ((Number) numberOfDecimalsXY).intValue();
}
}
jsonWriter.startObject();
if (env.isEmpty()) {
jsonWriter.addPairNull("xmin");
jsonWriter.addPairNull("ymin");
jsonWriter.addPairNull("xmax");
jsonWriter.addPairNull("ymax");
if (bExportZs) {
jsonWriter.addPairNull("zmin");
jsonWriter.addPairNull("zmax");
}
if (bExportMs) {
jsonWriter.addPairNull("mmin");
jsonWriter.addPairNull("mmax");
}
} else {
if (bPositionAsF) {
jsonWriter.addPairDouble("xmin", env.getXMin(), decimals, true);
jsonWriter.addPairDouble("ymin", env.getYMin(), decimals, true);
jsonWriter.addPairDouble("xmax", env.getXMax(), decimals, true);
jsonWriter.addPairDouble("ymax", env.getYMax(), decimals, true);
} else {
jsonWriter.addPairDouble("xmin", env.getXMin());
jsonWriter.addPairDouble("ymin", env.getYMin());
jsonWriter.addPairDouble("xmax", env.getXMax());
jsonWriter.addPairDouble("ymax", env.getYMax());
}
if (bExportZs) {
Envelope1D z = env.queryInterval(Semantics.Z, 0);
jsonWriter.addPairDouble("zmin", z.vmin);
jsonWriter.addPairDouble("zmax", z.vmax);
}
if (bExportMs) {
Envelope1D m = env.queryInterval(Semantics.M, 0);
jsonWriter.addPairDouble("mmin", m.vmin);
jsonWriter.addPairDouble("mmax", m.vmax);
}
}
if (spatialReference != null) {
writeSR(spatialReference, jsonWriter);
}
jsonWriter.endObject();
}
private static void writeSR(SpatialReference spatialReference, JsonWriter jsonWriter) {
int wkid = spatialReference.getOldID();
if (wkid > 0) {
jsonWriter.addPairObject("spatialReference");
jsonWriter.addPairInt("wkid", wkid);
int latest_wkid = spatialReference.getLatestID();
if (latest_wkid > 0 && latest_wkid != wkid) {
jsonWriter.addPairInt("latestWkid", latest_wkid);
}
jsonWriter.endObject();
} else {
String wkt = spatialReference.getText();
if (wkt != null) {
jsonWriter.addPairObject("spatialReference");
jsonWriter.addPairString("wkt", wkt);
jsonWriter.endObject();
}
}
}
}
| |
package com.kns.adapter;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.kns.model.MultiSelectVideoModel;
import com.kns.util.ImageUtil;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.Transformation;
import com.sunil.selectmutiple.R;
public class Muti_SelectVideoAdapter extends BaseAdapter{
private static final String TAG="Muti_SelectVideoAdapter";
private LayoutInflater mInflater=null;
private List<MultiSelectVideoModel> videolist=null;
private Context context=null;
private Bitmap bitmap=null;
private Button btn_update;
private boolean isActionMultiplePick;
public Muti_SelectVideoAdapter(Activity context, List<MultiSelectVideoModel> list){
mInflater = context.getLayoutInflater();
this.videolist=list;
this.context=context;
}
@Override
public int getCount() {
return videolist.size();
}
@Override
public Object getItem(int position) {
return null;
}
@Override
public long getItemId(int position) {
return 0;
}
public void setMultiplePick(boolean isMultiplePick) {
this.isActionMultiplePick = isMultiplePick;
}
public void selectAll(boolean selection) {
for (int i = 0; i < videolist.size(); i++) {
videolist.get(i).isSeleted = selection;
}
notifyDataSetChanged();
}
public void selectTencheck(boolean selection) {
if (videolist.size() > 10) {
for (int i = 0; i < 10; i++) {
videolist.get(i).isSeleted = selection;
}
notifyDataSetChanged();
}
else{
Toast.makeText(context, "There are less than 10 videos", Toast.LENGTH_LONG).show();
}
}
public void selectTwentycheck(boolean selection) {
if (videolist.size() > 20) {
for (int i = 0; i < 20; i++) {
videolist.get(i).isSeleted = selection;
}
notifyDataSetChanged();
}
else{
Toast.makeText(context, "There are less than 20 videos", Toast.LENGTH_LONG).show();
}
}
public void selectFivecheck(boolean selection) {
if (videolist.size() > 5) {
for (int i = 0; i < 5; i++) {
videolist.get(i).isSeleted = selection;
}
notifyDataSetChanged();
}
else{
Toast.makeText(context, "There are less than 5 videos", Toast.LENGTH_LONG).show();
}
}
public void unckeckedAll(boolean selection) {
for (int i = 0; i < videolist.size(); i++) {
videolist.get(i).isSeleted = selection;
}
notifyDataSetChanged();
}
public boolean isAllSelected() {
boolean isAllSelected = true;
for (int i = 0; i < videolist.size(); i++) {
if (!videolist.get(i).isSeleted) {
isAllSelected = false;
break;
}
}
return isAllSelected;
}
public boolean isAnySelected() {
boolean isAnySelected = false;
for (int i = 0; i < videolist.size(); i++) {
if (videolist.get(i).isSeleted) {
isAnySelected = true;
break;
}
}
return isAnySelected;
}
public ArrayList<MultiSelectVideoModel> getSelected() {
ArrayList<MultiSelectVideoModel> dataT = new ArrayList<MultiSelectVideoModel>();
for (int i = 0; i < videolist.size(); i++) {
if (videolist.get(i).isSeleted) {
dataT.add(videolist.get(i));
}
}
return dataT;
}
public void changeSelection(View v, int position) {
if (videolist.get(position).isSeleted) {
videolist.get(position).isSeleted = false;
} else {
videolist.get(position).isSeleted = true;
}
((ViewHolder) v.getTag()).imgQueueMultiSelected.setSelected(videolist.get(position).isSeleted);
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final ViewHolder holder;
if (convertView == null ) {
holder = new ViewHolder();
convertView = mInflater.inflate(R.layout.multiselectvideo_itemrow, null);
holder.imgQueue = (ImageView) convertView.findViewById(R.id.imgQueue);
holder.aprovestatus = (TextView) convertView.findViewById(R.id.textView_videoapprove);
holder.videoname = (TextView) convertView.findViewById(R.id.textView_videoname);
holder.imgQueueMultiSelected = (ImageView) convertView.findViewById(R.id.imgQueueMultiSelected);;
if (isActionMultiplePick) {
holder.imgQueueMultiSelected.setVisibility(View.VISIBLE);
} else {
holder.imgQueueMultiSelected.setVisibility(View.GONE);
}
convertView.setTag(holder);
}
else {
holder = (ViewHolder) convertView.getTag();
}
holder.imgQueue.setTag(position);
MultiSelectVideoModel imagemodel=videolist.get(position);
//String imageurl=imagemodel.getVideothumburl0();
String videourl=imagemodel.getVideorealurl();
String updatedvideothumb=imagemodel.getUpdatedvideothumb();
//holder.image.setDefaultImageResId(R.drawable.no_video);
String AdminApprovedFlag=imagemodel.getAdminApprovedFlag();
String videoname=imagemodel.getVideoname();
if (videoname.trim().equalsIgnoreCase("")) {
holder.videoname.setText("Not yet named");
}
else{
holder.videoname.setText(videoname);
}
if (AdminApprovedFlag.trim().equalsIgnoreCase("1")) {
holder.aprovestatus.setText("Live");
}else{
holder.aprovestatus.setText("Pending");
}
//Log.v(TAG, "Updated thumb is: "+ updatedvideothumb);
ImageUtil.galleryLog(TAG, "Updated thumb is: "+ updatedvideothumb);
/*Transformation transformation = new Transformation() {
@Override public Bitmap transform(Bitmap source) {
int targetWidth = holder.imgQueue.getWidth();
double aspectRatio = (double) source.getHeight() / (double) source.getWidth();
int targetHeight = (int) (targetWidth * aspectRatio);
Bitmap result = Bitmap.createScaledBitmap(source, targetWidth, targetHeight, false);
if (result != source) {
// Same bitmap is returned if sizes are the same
source.recycle();
}
return result;
}
@Override public String key() {
return "transformation" + " desiredWidth";
}
};*/
if (updatedvideothumb!=null && !updatedvideothumb.isEmpty()) {
Picasso.with(context)
.load(updatedvideothumb)
.resize(150, 150)
.centerInside()
//.transform(transformation)
.placeholder(R.drawable.ic_dwnloadthumb)
.error(R.drawable.ic_nothumb)
.into(holder.imgQueue);
}
else{
holder.imgQueue.setImageResource(R.drawable.no_image);
}
if (isActionMultiplePick) {
holder.imgQueueMultiSelected.setSelected(videolist.get(position).isSeleted);
}
return convertView;
}
private static class ViewHolder {
ImageView imgQueue;
ImageView imgQueueMultiSelected;
TextView aprovestatus;
TextView videoname;
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Export;
import com.itextpdf.text.BaseColor;
import com.itextpdf.text.Document;
import com.itextpdf.text.DocumentException;
import com.itextpdf.text.Element;
import com.itextpdf.text.Font;
import com.itextpdf.text.FontFactory;
import com.itextpdf.text.Image;
import com.itextpdf.text.PageSize;
import com.itextpdf.text.Phrase;
import com.itextpdf.text.pdf.BaseFont;
import com.itextpdf.text.pdf.PdfPCell;
import com.itextpdf.text.pdf.PdfPTable;
import com.itextpdf.text.pdf.PdfWriter;
import conexao.Call;
import dao.ViagemDao;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.sql.ResultSet;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.function.Consumer;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.primefaces.context.RequestContext;
/**
*
* @author ahmedjorge
*/
public class ExportViagemSemanaPdf {
static final String DATA="DATA", APOLICE = "APOLICE", INICIO = "INICIO", FIM ="FIM", DIAS = "DIAS", CLIENTE = "CLIENTE", RECIBO= "RECIBO", PREMIO = "PREMIO",
CONSUMO = "CONSUMO", SELO = "SELO", NETOUT ="NET OUT", TOTAL ="TOTAL", COMISSAO = "COMISAO", IMP_CONSUMO="IMP_CONSUMO", IMP_SELO ="IMP_SELO",
NUMEROAPOLICE = "NUMERO APOLICE", PAISDESTINO="PAIS DESTINO", CIDADEDESTINO ="CIDADE DESTINO", ZONADESTINO="ZONA DESTINO",
DATANASCIMENTO ="DATA NASCIMENTO", TELEFONE="TELEFONE", ENDERECO="ENDERECO" , LOCALNASCIMENTO ="LOCAL NASCIMENTO",
NACIONALIDADE ="NACIONALIDADE";
public static void criarDoc(Date dataInicio, Date dateFim, String user, String nomeFuncinario) {
Font fontTableCorpo = FontFactory.getFont(ConfigDoc.Fontes.FONT, BaseFont.WINANSI, BaseFont.EMBEDDED, 6f);
Font fontTableTitile = FontFactory.getFont(ConfigDoc.Fontes.FONTB, BaseFont.WINANSI, BaseFont.EMBEDDED, 6f);
Font fontRoadape = FontFactory.getFont(ConfigDoc.Fontes.FONTB, BaseFont.WINANSI, BaseFont.EMBEDDED, 8f);
Font fontRoadapeP = FontFactory.getFont(ConfigDoc.Fontes.FONTB, BaseFont.WINANSI, BaseFont.EMBEDDED, 6f);
Font fontRoadapeB = FontFactory.getFont(ConfigDoc.Fontes.FONTB, BaseFont.WINANSI, BaseFont.EMBEDDED, 8f);
Font fontRoadapeBU = FontFactory.getFont(ConfigDoc.Fontes.FONTB, BaseFont.WINANSI, BaseFont.EMBEDDED, 8f, Font.UNDEFINED);
Font fontCabecalhoN = FontFactory.getFont(ConfigDoc.Fontes.FONTB, BaseFont.WINANSI, BaseFont.EMBEDDED, 9.5f);
Font fontCorpoNG = FontFactory.getFont(ConfigDoc.Fontes.FONTB, BaseFont.WINANSI, BaseFont.EMBEDDED, 9.5f);
Font fontCabecalhoNG= FontFactory.getFont(ConfigDoc.Fontes.FONTB, BaseFont.WINANSI, BaseFont.EMBEDDED ,16f ,Font.UNDERLINE);
OutputStream outputStraem;
try {
SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy hh.mm.ss");
SimpleDateFormat sdfTitile = new SimpleDateFormat("dd-MM-yyyy");
Document documento = new Document();
documento.setPageSize(PageSize.A4.rotate());
documento.setMargins(10f, 10f, 35f, 20f);
File ff = new File(ConfigDoc.Fontes.getDiretorio() + "/" + user + "/Seguro Viagem/");
ff.mkdirs();
String Ddata = sdf.format(new Date());
ff = new File(ff.getAbsoluteFile() + "/" + "Export Mapa Viagem Semanal " + Ddata + ".pdf");
String reString = "../Documentos/" + user + "/Seguro Viagem/" + "Export Mapa Viagem Semanal " + Ddata + ".pdf";
outputStraem = new FileOutputStream(ff);
PdfWriter writer = PdfWriter.getInstance(documento, outputStraem);
PdfPTable tableDados = new PdfPTable(new float[]{5f, 10.6f, 5f, 5f, 4.6f, 19f, 5.6f, 6f, 10.6f, 9.6f, 7f, 5.5f, 5.5f});
tableDados.setWidthPercentage(100f);
BaseColor colorCinza = new BaseColor(129, 138, 145);
for (int j = 0; j < 13; j++) {
PdfPCell cellTitileTable = new PdfPCell(new Phrase(titileTable(j), fontTableTitile));
cellTitileTable.setHorizontalAlignment(PdfPCell.ALIGN_CENTER);
cellTitileTable.setVerticalAlignment(PdfPCell.ALIGN_MIDDLE);
cellTitileTable.setBackgroundColor(colorCinza);
tableDados.addCell(cellTitileTable);
}
dataViagem(dataInicio, dateFim);
float premiototal = 0;
for (HashMap<String, Object> data: hasList) {
tableDados.addCell(new Phrase(toString(data.get(DATA)), fontTableCorpo));
tableDados.addCell(ExportViagemSemanaPdf.cellEspecial(new PdfPCell(new Phrase(toString(data.get(NUMEROAPOLICE)), fontTableCorpo))));
tableDados.addCell(new Phrase(ConfigDoc.toFormat(toString(data.get(INICIO)), "dd-MM-yyyy", "yyyy-MM-dd"), fontTableCorpo));
tableDados.addCell(new Phrase(ConfigDoc.toFormat(toString(data.get(FIM)), "dd-MM-yyyy", "yyyy-MM-dd"), fontTableCorpo));
premiototal += toFloat(data.get(PREMIO));
PdfPCell cellRigh =new PdfPCell(new Phrase(ConfigDoc.toMoeda(toFloat(data.get(PREMIO)), ""), fontTableCorpo));
cellRigh.setHorizontalAlignment(PdfPCell.ALIGN_RIGHT);
tableDados.addCell(cellRigh);
tableDados.addCell(new Phrase(toString(data.get(CLIENTE)), fontTableCorpo));
tableDados.addCell(new Phrase(toString(data.get(DATANASCIMENTO)), fontTableCorpo));
tableDados.addCell(new Phrase(toString(data.get(TELEFONE)), fontTableCorpo));
tableDados.addCell(new Phrase(toString(data.get(ENDERECO)), fontTableCorpo));
tableDados.addCell(new Phrase(toString(data.get(LOCALNASCIMENTO)), fontTableCorpo));
tableDados.addCell(new Phrase(toString(data.get(PAISDESTINO)), fontTableCorpo));
tableDados.addCell(new Phrase(toString(data.get(CIDADEDESTINO)), fontTableCorpo));
tableDados.addCell(new Phrase(toString(data.get(ZONADESTINO)), fontTableCorpo));
}
PdfPCell cellTotal = new PdfPCell(ExportViagemSemanaPdf.cellEspecial(new PdfPCell(new Phrase("AL AMOUNT..........................................", fontTableTitile))));
cellTotal.setColspan(4);
cellTotal.setPadding(1.5f);
cellTotal.setBackgroundColor(colorCinza);
tableDados.addCell(cellTotal);
cellTotal = new PdfPCell(ExportViagemSemanaPdf.cellEspecial(new PdfPCell(new Phrase(ConfigDoc.toMoeda(premiototal, ""), fontTableTitile))));
cellTotal.setPadding(1.5f);
cellTotal.setBackgroundColor(colorCinza);
tableDados.addCell(cellTotal);
cellTotal = new PdfPCell(ExportViagemSemanaPdf.cellEspecial(new PdfPCell(new Phrase(" ", fontTableTitile))));
cellTotal.setColspan(8);
cellTotal.setPadding(1.5f);
cellTotal.setBackgroundColor(colorCinza);
tableDados.addCell(cellTotal);
PdfPTable pTableEmpresaPricipal = new PdfPTable(new float[]{15f, 85f});
PdfPTable pTableEmpresaInforImpres1 = new PdfPTable(1);
PdfPTable pTableEmpresaInforImpres5 = new PdfPTable(1);
PdfPCell pCellNomeEmpresa = new PdfPCell(new Phrase(ConfigDoc.Empresa.NOME, fontCabecalhoNG));
pCellNomeEmpresa.setBorder(0);
PdfPCell pCellNomeEndereco = new PdfPCell(new Phrase(ConfigDoc.Empresa.ENDERECO, fontCabecalhoN));
pCellNomeEndereco.setBorder(0);
PdfPCell pCellCaixaPostal = new PdfPCell(new Phrase(ConfigDoc.Empresa.CAIXAPOSTAL, fontCabecalhoN));
pCellCaixaPostal.setBorder(0);
PdfPCell pCellTeleFax = new PdfPCell(new Phrase(ConfigDoc.Empresa.TELEFAX + " " + ConfigDoc.Empresa.EMAIL, fontCabecalhoN));
pCellTeleFax.setBorder(0);
PdfPCell pCellSociedade = new PdfPCell(new Phrase(ConfigDoc.Empresa.SOCIEDADE, fontCabecalhoN));
pCellSociedade.setBorder(0);
Image imageEmpresa = Image.getInstance("logo.png");
imageEmpresa.scaleToFit(120f, 85f);
pTableEmpresaInforImpres1.addCell(pCellNomeEmpresa);
pTableEmpresaInforImpres1.addCell(pCellNomeEndereco);
pTableEmpresaInforImpres1.addCell(pCellCaixaPostal);
pTableEmpresaInforImpres1.addCell(pCellTeleFax);
pTableEmpresaInforImpres1.addCell(pCellSociedade);
PdfPCell cellTabela3 = new PdfPCell(pTableEmpresaInforImpres1);
cellTabela3.setBorder(0);
pTableEmpresaInforImpres5.addCell(cellTabela3);
PdfPCell cellTabela5 = new PdfPCell(pTableEmpresaInforImpres5);
cellTabela5.setBorder(0);
PdfPCell cellTabela6 = new PdfPCell(imageEmpresa);
cellTabela6.setBorder(0);
pTableEmpresaPricipal.setWidthPercentage(95);
pTableEmpresaPricipal.addCell(cellTabela6);
pTableEmpresaPricipal.addCell(cellTabela5);
PdfPTable pTableTitulo = new PdfPTable(1);
pTableTitulo.setHorizontalAlignment(Element.ALIGN_CENTER);
pTableTitulo.setWidthPercentage(100);
PdfPCell cellTitulo = new PdfPCell(new Phrase("RLELATORIO SEMANAL NO. " + "" + "\n" + ((dataInicio != null) ? sdfTitile.format(dataInicio) + " - " : "") + ((dateFim != null) ? sdfTitile.format(dateFim) : ""), fontCorpoNG));
cellTitulo.setBorder(0);
cellTitulo.setPaddingBottom(20f);
cellTitulo.setPaddingTop(10f);
cellTitulo.setHorizontalAlignment(PdfPCell.ALIGN_CENTER);
pTableTitulo.addCell(cellTitulo);
pTableEmpresaPricipal.setHorizontalAlignment(Element.ALIGN_CENTER);
PdfPTable pTableRodape = new PdfPTable(new float[]{50f,50f});
pTableRodape.setWidthPercentage(90f);
PdfPCell cellRodape = new PdfPCell(new Phrase("DEPARTAMENTO FINANCEIRO", fontRoadapeBU));
cellRodape.setBorder(0);
cellRodape.setColspan(2);
cellRodape.setPaddingTop(20f);
pTableRodape.addCell(cellRodape);
cellRodape = new PdfPCell(new Phrase("QUEIRA POR FAVOR CONFERIR OS PAGAMENTOS", fontRoadape));
cellRodape.setColspan(2);
cellRodape.setBorder(0);
pTableRodape.addCell(cellRodape);
cellRodape = new PdfPCell(new Phrase("ELABORADO POR", fontRoadapeB));
cellRodape.setBorder(0);
pTableRodape.addCell(cellRodape);
cellRodape = new PdfPCell(new Phrase("VENFICADO POR", fontRoadapeB));
cellRodape.setBorder(0);
cellRodape.setHorizontalAlignment(PdfPCell.ALIGN_RIGHT);
pTableRodape.addCell(cellRodape);
cellRodape = new PdfPCell(new Phrase("................................................", fontRoadape));
cellRodape.setBorder(0);
cellRodape.setPaddingTop(30f);
pTableRodape.addCell(cellRodape);
cellRodape = new PdfPCell(new Phrase("................................................", fontRoadape));
cellRodape.setHorizontalAlignment(PdfPCell.ALIGN_RIGHT);
cellRodape.setBorder(0);
cellRodape.setPaddingTop(30f);
pTableRodape.addCell(cellRodape);
cellRodape = new PdfPCell(new Phrase(nomeFuncinario, fontRoadapeP));
cellRodape.setColspan(2);
cellRodape.setBorder(0);
pTableRodape.addCell(cellRodape);
documento.open();
documento.add(pTableEmpresaPricipal);
documento.add(pTableTitulo);
documento.add(tableDados);
documento.add(pTableRodape);
documento.close();
RequestContext.getCurrentInstance().execute("openAllDocument('" + reString + "')");
} catch (FileNotFoundException | DocumentException ex) {
Logger.getLogger(ExportViagemSemanaPdf.class.getName()).log(Level.SEVERE, null, ex);
} catch (IOException ex) {
Logger.getLogger(ExportViagemSemanaPdf.class.getName()).log(Level.SEVERE, null, ex);
}
}
public static void main(String[] args) {
ExportViagemSemanaPdf.criarDoc( null, null, "Ah","Ahmed Ferreira");
// ExportDocViagemSemanal.criarDoc(new Date(), new Date(), "Ah", "Ahmed Ferreira");
}
public static String titileTable(int i) {
if (i == 0) {
return "DATA";
} else if (i == 1) {
return "APOLICE";
} else if (i == 2) {
return "DATA INICIO";
} else if (i == 3) {
return "DATA FIM";
} else if (i == 4) {
return "PROPOSTA DE EUROP ASSIST.";
} else if (i == 5) {
return "NOME";
} else if (i == 6) {
return "DATA NASC";
} else if (i == 7) {
return "TELEFONE";
} else if (i == 8) {
return "MORADA";
} else if (i == 9) {
return "NACIONALIDADE";
} else if(i==10){
return "DESTINO-PAIS";
} else if(i==11) {
return "DESTINO-CIDADE";
} else {
return "DESTINO-ZONA";
}
}
private static PdfPTable cellEspecial(PdfPCell cellEspcial) {
PdfPTable pTable = new PdfPTable(1);
pTable.addCell(cellEspcial);
cellEspcial.setPadding(3f);
return pTable;
}
static ArrayList<HashMap<String,Object>> hasList= new ArrayList<>();
static private void dataViagem(Date dataInicio,Date dataFim)
{
hasList = new ArrayList<>();
ResultSet rs = ViagemDao.relatorioTravel(dataInicio, dataFim);
Consumer <HashMap<String, Object>> act = (map)->
{
hasList.add(new LinkedHashMap<>(map));
};
Call.forEchaResultSet(act, rs);
}
static private Float toFloat(Object o)
{
return ((o!=null && !o.toString().isEmpty() ) ? Float.valueOf(o.toString().replace(",", ".").replace(" ", "0")): 0f );
}
static private String toString(Object o)
{
return ((o == null) ? " " : o.toString());
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.vcs.log.ui.frame;
import com.google.common.primitives.Ints;
import com.intellij.codeInspection.InspectionProfile;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.editor.colors.EditorColorsListener;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.util.BackgroundTaskUtil;
import com.intellij.openapi.progress.util.ProgressWindow;
import com.intellij.openapi.roots.ui.componentsList.components.ScrollablePanel;
import com.intellij.openapi.ui.OnePixelDivider;
import com.intellij.openapi.ui.VerticalFlowLayout;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Conditions;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.ui.FontUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.profile.ProfileChangeAdapter;
import com.intellij.ui.SeparatorComponent;
import com.intellij.ui.components.JBLabel;
import com.intellij.ui.components.JBLoadingPanel;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.StatusText;
import com.intellij.vcs.commit.CommitMessageInspectionProfile;
import com.intellij.vcs.log.CommitId;
import com.intellij.vcs.log.Hash;
import com.intellij.vcs.log.VcsCommitMetadata;
import com.intellij.vcs.log.VcsRef;
import com.intellij.vcs.log.data.VcsLogData;
import com.intellij.vcs.log.impl.HashImpl;
import com.intellij.vcs.log.ui.VcsLogColorManager;
import com.intellij.vcs.log.ui.frame.CommitPresentationUtil.CommitPresentation;
import com.intellij.vcs.log.ui.table.CommitSelectionListener;
import com.intellij.vcs.log.ui.table.VcsLogGraphTable;
import com.intellij.vcs.log.util.TroveUtil;
import com.intellij.vcs.log.util.VcsLogUtil;
import gnu.trove.TIntHashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.List;
import java.util.*;
import static com.intellij.vcs.log.ui.frame.CommitPresentationUtil.buildPresentation;
/**
* @author Kirill Likhodedov
*/
public class DetailsPanel extends JPanel implements EditorColorsListener, Disposable {
private static final int MAX_ROWS = 50;
private static final int MIN_SIZE = 20;
@NotNull private final VcsLogData myLogData;
@NotNull private final JScrollPane myScrollPane;
@NotNull private final JPanel myMainContentPanel;
@NotNull private final StatusText myEmptyText;
@NotNull private final JBLoadingPanel myLoadingPanel;
@NotNull private final VcsLogColorManager myColorManager;
@NotNull private List<Integer> mySelection = ContainerUtil.emptyList();
@NotNull private TIntHashSet myCommitIds = new TIntHashSet();
@Nullable private ProgressIndicator myResolveIndicator = null;
public DetailsPanel(@NotNull VcsLogData logData,
@NotNull VcsLogColorManager colorManager,
@NotNull Disposable parent) {
myLogData = logData;
myColorManager = colorManager;
myScrollPane = new JBScrollPane(ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
myMainContentPanel = new MyMainContentPanel();
myEmptyText = new StatusText(myMainContentPanel) {
@Override
protected boolean isStatusVisible() {
return StringUtil.isNotEmpty(getText());
}
};
myMainContentPanel.setLayout(new VerticalFlowLayout(VerticalFlowLayout.TOP, 0, 0, true, false));
myMainContentPanel.setOpaque(false);
myScrollPane.setViewportView(myMainContentPanel);
myScrollPane.setBorder(JBUI.Borders.empty());
myScrollPane.setViewportBorder(JBUI.Borders.empty());
myLoadingPanel = new JBLoadingPanel(new BorderLayout(), parent, ProgressWindow.DEFAULT_PROGRESS_DIALOG_POSTPONE_TIME_MILLIS) {
@Override
public Color getBackground() {
return CommitPanel.getCommitDetailsBackground();
}
};
myLoadingPanel.add(myScrollPane);
setLayout(new BorderLayout());
add(myLoadingPanel, BorderLayout.CENTER);
logData.getProject().getMessageBus().connect(this).subscribe(ProfileChangeAdapter.TOPIC, new ProfileChangeAdapter() {
@Override
public void profileChanged(@Nullable InspectionProfile profile) {
if (CommitMessageInspectionProfile.getInstance(myLogData.getProject()).equals(profile)) {
// only update after settings dialog is closed and settings are actually applied
ApplicationManager.getApplication().invokeLater(DetailsPanel.this::update, ModalityState.NON_MODAL);
}
}
});
myEmptyText.setText("Commit details");
Disposer.register(parent, this);
}
@Override
public void globalSchemeChange(EditorColorsScheme scheme) {
update();
}
private void update() {
for (int i = 0; i < mySelection.size(); i++) {
CommitPanel commitPanel = getCommitPanel(i);
commitPanel.update();
}
}
@Override
public Color getBackground() {
return CommitPanel.getCommitDetailsBackground();
}
public void installCommitSelectionListener(@NotNull VcsLogGraphTable graphTable) {
graphTable.getSelectionModel().addListSelectionListener(new CommitSelectionListenerForDetails(graphTable));
}
public void branchesChanged() {
for (int i = 0; i < mySelection.size(); i++) {
CommitPanel commitPanel = getCommitPanel(i);
commitPanel.updateBranches();
}
}
protected void navigate(@NotNull CommitId commitId) {
}
private void rebuildCommitPanels(int[] selection) {
myEmptyText.setText("");
int selectionLength = selection.length;
// for each commit besides the first there are two components: Separator and CommitPanel
int existingCount = (myMainContentPanel.getComponentCount() + 1) / 2;
int requiredCount = Math.min(selectionLength, MAX_ROWS);
for (int i = existingCount; i < requiredCount; i++) {
if (i > 0) {
myMainContentPanel.add(new SeparatorComponent(0, OnePixelDivider.BACKGROUND, null));
}
myMainContentPanel.add(new CommitPanel(myLogData, myColorManager, this::navigate));
}
// clear superfluous items
while (myMainContentPanel.getComponentCount() > 2 * requiredCount - 1) {
myMainContentPanel.remove(myMainContentPanel.getComponentCount() - 1);
}
if (selectionLength > MAX_ROWS) {
myMainContentPanel.add(new SeparatorComponent(0, OnePixelDivider.BACKGROUND, null));
JBLabel label = new JBLabel("(showing " + MAX_ROWS + " of " + selectionLength + " selected commits)");
label.setFont(FontUtil.getCommitMetadataFont());
label.setBorder(JBUI.Borders.emptyLeft(CommitPanel.SIDE_BORDER));
myMainContentPanel.add(label);
}
mySelection = Ints.asList(Arrays.copyOf(selection, requiredCount));
repaint();
}
private void resolveHashes(@NotNull List<? extends CommitId> ids,
@NotNull List<? extends CommitPresentation> presentations,
@NotNull Set<String> unResolvedHashes,
@NotNull Condition<Object> expired) {
if (!unResolvedHashes.isEmpty()) {
myResolveIndicator = BackgroundTaskUtil.executeOnPooledThread(this, () -> {
MultiMap<String, CommitId> resolvedHashes = MultiMap.createSmart();
Set<String> fullHashes = new HashSet<>(ContainerUtil.filter(unResolvedHashes, h -> h.length() == VcsLogUtil.FULL_HASH_LENGTH));
for (String fullHash : fullHashes) {
Hash hash = HashImpl.build(fullHash);
for (VirtualFile root : myLogData.getRoots()) {
CommitId id = new CommitId(hash, root);
if (myLogData.getStorage().containsCommit(id)) {
resolvedHashes.putValue(fullHash, id);
}
}
}
unResolvedHashes.removeAll(fullHashes);
if (!unResolvedHashes.isEmpty()) {
myLogData.getStorage().iterateCommits(commitId -> {
for (String hashString : unResolvedHashes) {
if (StringUtil.startsWithIgnoreCase(commitId.getHash().asString(), hashString)) {
resolvedHashes.putValue(hashString, commitId);
}
}
return false;
});
}
List<CommitPresentation> resolvedPresentations = ContainerUtil.map2List(presentations,
presentation -> presentation.resolve(resolvedHashes));
ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
ApplicationManager.getApplication().invokeLater(() -> {
myResolveIndicator = null;
setPresentations(ids, resolvedPresentations);
},
Conditions.or(o -> myResolveIndicator != indicator, expired));
});
}
}
private void cancelResolve() {
if (myResolveIndicator != null) {
myResolveIndicator.cancel();
myResolveIndicator = null;
}
}
private void setPresentations(@NotNull List<? extends CommitId> ids,
@NotNull List<? extends CommitPresentation> presentations) {
assert ids.size() == presentations.size();
for (int i = 0; i < mySelection.size(); i++) {
CommitPanel commitPanel = getCommitPanel(i);
commitPanel.setCommit(ids.get(i), presentations.get(i));
}
}
@NotNull
private CommitPanel getCommitPanel(int index) {
return (CommitPanel)myMainContentPanel.getComponent(2 * index);
}
@Override
public Dimension getMinimumSize() {
Dimension minimumSize = super.getMinimumSize();
return new Dimension(Math.max(minimumSize.width, JBUI.scale(MIN_SIZE)), Math.max(minimumSize.height, JBUI.scale(MIN_SIZE)));
}
@Override
public void dispose() {
cancelResolve();
}
private class CommitSelectionListenerForDetails extends CommitSelectionListener<VcsCommitMetadata> {
CommitSelectionListenerForDetails(VcsLogGraphTable graphTable) {
super(graphTable, DetailsPanel.this.myLogData.getMiniDetailsGetter());
}
@Override
protected void onDetailsLoaded(@NotNull List<? extends VcsCommitMetadata> detailsList) {
List<CommitId> ids = ContainerUtil.map(detailsList,
detail -> new CommitId(detail.getId(), detail.getRoot()));
Set<String> unResolvedHashes = ContainerUtil.newHashSet();
List<CommitPresentation> presentations = ContainerUtil.map(detailsList,
detail -> buildPresentation(myLogData.getProject(), detail,
unResolvedHashes));
setPresentations(ids, presentations);
TIntHashSet newCommitIds = TroveUtil.map2IntSet(detailsList, c -> myLogData.getStorage().getCommitIndex(c.getId(), c.getRoot()));
if (!TroveUtil.intersects(myCommitIds, newCommitIds)) {
myScrollPane.getVerticalScrollBar().setValue(0);
}
myCommitIds = newCommitIds;
List<Integer> currentSelection = mySelection;
resolveHashes(ids, presentations, unResolvedHashes, o -> currentSelection != mySelection);
}
@Override
protected void onSelection(@NotNull int[] selection) {
cancelResolve();
rebuildCommitPanels(selection);
List<Integer> currentSelection = mySelection;
ApplicationManager.getApplication().executeOnPooledThread(() -> {
List<Collection<VcsRef>> result = ContainerUtil.newArrayList();
for (Integer row : currentSelection) {
result.add(myGraphTable.getModel().getRefsAtRow(row));
}
ApplicationManager.getApplication().invokeLater(() -> {
if (currentSelection == mySelection) {
for (int i = 0; i < currentSelection.size(); i++) {
CommitPanel commitPanel = getCommitPanel(i);
commitPanel.setRefs(result.get(i));
}
}
});
});
}
@Override
protected void onEmptySelection() {
cancelResolve();
setEmpty("No commits selected");
}
@NotNull
@Override
protected List<Integer> getSelectionToLoad() {
return mySelection;
}
@Override
protected void startLoading() {
myLoadingPanel.startLoading();
}
@Override
protected void stopLoading() {
myLoadingPanel.stopLoading();
}
@Override
protected void onError(@NotNull Throwable error) {
setEmpty("Error loading commits");
}
private void setEmpty(@NotNull String text) {
myEmptyText.setText(text);
myMainContentPanel.removeAll();
mySelection = ContainerUtil.emptyList();
myCommitIds = new TIntHashSet();
}
}
private class MyMainContentPanel extends ScrollablePanel {
@Override
public Insets getInsets() {
// to fight ViewBorder
return JBUI.emptyInsets();
}
@Override
public Color getBackground() {
return CommitPanel.getCommitDetailsBackground();
}
@Override
protected void paintChildren(Graphics g) {
if (StringUtil.isNotEmpty(myEmptyText.getText())) {
myEmptyText.paint(this, g);
}
else {
super.paintChildren(g);
}
}
}
}
| |
package com.dukenlidb.nlidb.archive.model;
import java.util.List;
public class SyntacticEvaluator {
int numOfInvalid;
public SyntacticEvaluator() {
numOfInvalid = 0;
}
/**
* a root is invalid if:
* it has no child;
* it has only one child and this child is not SN;
* it has more than one child and other than the first child is not ON.
* @param node
* @return
*/
private static int checkROOT(Node node){
int numOfInvalid = 0;
List<Node> children = node.getChildren();
int sizeOfChildren = children.size();
if (sizeOfChildren == 0){
numOfInvalid++;
node.isInvalid = true;
}
else if (sizeOfChildren == 1 && !children.get(0).getInfo().getType().equals("SN")){
numOfInvalid++;
node.isInvalid = true;
}
else if (sizeOfChildren > 1){
if (!children.get(0).getInfo().getType().equals("SN")){
numOfInvalid++;
node.isInvalid = true;
}
else {
for (int j = 1; j < sizeOfChildren; j++){
if (!children.get(j).getInfo().getType().equals("ON")){
numOfInvalid++;
node.isInvalid = true;
}
}
}
}
return numOfInvalid;
}
/**
* a SN is not valid if:
* it has more than 1 child;
* it has 1 child but this child is not GNP (FN or NN).
* @param node
* @return
*/
private static int checkSN(Node node){
int numOfInvalid = 0;
List<Node> children = node.getChildren();
int sizeOfChildren = children.size();
//SN can only have one child from FN or NN
if (sizeOfChildren != 1){
numOfInvalid++;
node.isInvalid = true;
}
else{
String childType = children.get(0).getInfo().getType();
if (!(childType.equals("NN") || childType.equals("FN"))){
numOfInvalid++;
node.isInvalid = true;
}
}
return numOfInvalid;
}
/**
* a ON is invalid if:
* (1) in ComplexCondition (its parent is ROOT):
* its number of children is not 2 (left & right subtrees);
* it has 2 children, but first one is not GNP, or second one is not GNP/VN/FN.
* (2) in Condition (its parent is NN):
* its number of children is not 1;
* it has 1 child, but the child is not VN.
* @param node
* @return
*/
private static int checkON(Node node){
int numOfInvalid = 0;
String parentType = node.getParent().getInfo().getType();
List<Node> children = node.getChildren();
int sizeOfChildren = children.size();
if (parentType.equals("ROOT")){
if (sizeOfChildren != 2){
numOfInvalid++;
node.isInvalid = true;
}
else{
for (int j = 0; j<sizeOfChildren; j++){
String childType = children.get(j).getInfo().getType();
if (j==0){
if (!(childType.equals("NN") || childType.equals("FN"))){
numOfInvalid++;
node.isInvalid = true;
break;
}
}
else if (j==1){
if (childType.equals("ON")){
numOfInvalid++;
node.isInvalid = true;
break;
}
}
}
}
}
else if (parentType.equals("NN")){
if (sizeOfChildren != 1){
numOfInvalid++;
node.isInvalid = true;
}
else if (!children.get(0).getInfo().getType().equals("VN")){
numOfInvalid++;
node.isInvalid = true;
}
}
return numOfInvalid;
}
/**
* a NN is invalid if:
* it is the second NN in "NP=NN+NN*Condition", and it has children.
* it is the first NN in "GNP=NP=NN+NN*Condition", and its child is not NN, VN, ON.
* @param node
* @return
*/
private static int checkNN(Node node){
int numOfInvalid = 0;
String parentType = node.getParent().getInfo().getType();
List<Node> children = node.getChildren();
int sizeOfChildren = children.size();
//NP=NN+NN*Condition. Second NN has no child.
if (parentType.equals("NN")){
if (sizeOfChildren != 0){ //this rule is different from figure 7 (a), but I think this makes sense
numOfInvalid++;
node.isInvalid = true;
}
}
//SN+GNP, or ON+GNP, or FN+GNP. and GNP=NP=NN+NN*Condition. First NN can have any number of children from NN,ON,VN.
else if (parentType.equals("SN") || parentType.equals("FN") || parentType.equals("ON")){
if (sizeOfChildren != 0){
for (int j = 0; j < sizeOfChildren; j++){
String childType = children.get(j).getInfo().getType();
if (!(childType.equals("NN") || childType.equals("VN") || childType.equals("ON"))){
numOfInvalid++;
node.isInvalid = true;
break;
}
}
}
}
return numOfInvalid;
}
/**
* a VN is invalid if:
* it has children.
* @param node
* @return
*/
private static int checkVN(Node node){
int numOfInvalid = 0;
//String parentType = node.getParent().getInfo().getType();
List<Node> children = node.getChildren();
int sizeOfChildren = children.size();
if (sizeOfChildren != 0){ //VN cannot have children
numOfInvalid++;
node.isInvalid = true;
}
/*
else if (!(parentType.equals("ON") || parentType.equals("NN"))){ //VN can only be child of ON and NN
numOfInvalid++;
node.isInvalid = true;
}
*/
return numOfInvalid;
}
/**
* a FN is valid if:
* ON+FN, or ON+GNP, or SN+GNP, or FN+GNP. and GNP=FN+GNP,
* FN can be child of ON, without children or only 1 child of NN or FN,
* FN can be child of SN, with only 1 child of NN or FN,
* FN can be child of FN, with only 1 child of NN or FN.
* @param node
* @return
*/
private static int checkFN(Node node){
int numOfInvalid = 0;
String parentType = node.getParent().getInfo().getType();
List<Node> children = node.getChildren();
int sizeOfChildren = children.size();
if (sizeOfChildren == 0){
if (!parentType.equals("ON")){
numOfInvalid++;
node.isInvalid = true;
}
}
else if (sizeOfChildren == 1){
String childType = children.get(0).getInfo().getType();
if (!(parentType.equals("ON") || parentType.equals("SN") /*|| parentType.equals("FN")*/)){
numOfInvalid++;
node.isInvalid = true;
}
else if (!(childType.equals("NN") /*|| childType.equals("FN")*/)){
numOfInvalid++;
node.isInvalid = true;
}
}
else{
numOfInvalid++;
node.isInvalid = true;
}
return numOfInvalid;
}
/**
* Number of invalid tree nodes according to the grammar:
* Q -> (SClause)(ComplexCindition)*
* SClause -> SELECT + GNP
* ComplexCondition -> ON + (LeftSubTree*RightSubTree)
* LeftSubTree -> GNP
* RightSubTree -> GNP | VN | FN
* GNP -> (FN + GNP) | NP
* NP -> NN + (NN)*(Condition)*
* Condition -> VN | (ON + VN)
*
* +: parent-child relationship
* *: sibling relationship
* |: or
*
* Basic rule: Check invalidity only considering its children
* @param T
* @return
*/
public static int numberOfInvalidNodes (ParseTree T){
int numOfInvalid = 0; //number of invalid tree nodes
for (Node curNode : T) {
String curType = curNode.getInfo().getType();
if (curType.equals("ROOT")){ //ROOT
numOfInvalid = numOfInvalid + checkROOT(curNode);
}
if (curType.equals("SN")){ // select node
numOfInvalid = numOfInvalid + checkSN(curNode);
}
else if (curType.equals("ON")){ //operator node
numOfInvalid = numOfInvalid + checkON(curNode);
}
else if (curType.equals("NN")){ //name node
numOfInvalid = numOfInvalid + checkNN(curNode);
}
else if (curType.equals("VN")){ //value node
numOfInvalid = numOfInvalid + checkVN(curNode);
}
else if (curType.equals("FN")){ //function nodes
numOfInvalid = numOfInvalid + checkFN(curNode);
}
}
return numOfInvalid;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.containerregistry.v2019_04_01;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
/**
* The parameters for a docker quick build.
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type")
@JsonTypeName("DockerBuildRequest")
public class DockerBuildRequest extends RunRequest {
/**
* The fully qualified image names including the repository and tag.
*/
@JsonProperty(value = "imageNames")
private List<String> imageNames;
/**
* The value of this property indicates whether the image built should be
* pushed to the registry or not.
*/
@JsonProperty(value = "isPushEnabled")
private Boolean isPushEnabled;
/**
* The value of this property indicates whether the image cache is enabled
* or not.
*/
@JsonProperty(value = "noCache")
private Boolean noCache;
/**
* The Docker file path relative to the source location.
*/
@JsonProperty(value = "dockerFilePath", required = true)
private String dockerFilePath;
/**
* The name of the target build stage for the docker build.
*/
@JsonProperty(value = "target")
private String target;
/**
* The collection of override arguments to be used when executing the run.
*/
@JsonProperty(value = "arguments")
private List<Argument> arguments;
/**
* Run timeout in seconds.
*/
@JsonProperty(value = "timeout")
private Integer timeout;
/**
* The platform properties against which the run has to happen.
*/
@JsonProperty(value = "platform", required = true)
private PlatformProperties platform;
/**
* The machine configuration of the run agent.
*/
@JsonProperty(value = "agentConfiguration")
private AgentProperties agentConfiguration;
/**
* The URL(absolute or relative) of the source context. It can be an URL to
* a tar or git repository.
* If it is relative URL, the relative path should be obtained from calling
* listBuildSourceUploadUrl API.
*/
@JsonProperty(value = "sourceLocation")
private String sourceLocation;
/**
* The properties that describes a set of credentials that will be used
* when this run is invoked.
*/
@JsonProperty(value = "credentials")
private Credentials credentials;
/**
* Get the fully qualified image names including the repository and tag.
*
* @return the imageNames value
*/
public List<String> imageNames() {
return this.imageNames;
}
/**
* Set the fully qualified image names including the repository and tag.
*
* @param imageNames the imageNames value to set
* @return the DockerBuildRequest object itself.
*/
public DockerBuildRequest withImageNames(List<String> imageNames) {
this.imageNames = imageNames;
return this;
}
/**
* Get the value of this property indicates whether the image built should be pushed to the registry or not.
*
* @return the isPushEnabled value
*/
public Boolean isPushEnabled() {
return this.isPushEnabled;
}
/**
* Set the value of this property indicates whether the image built should be pushed to the registry or not.
*
* @param isPushEnabled the isPushEnabled value to set
* @return the DockerBuildRequest object itself.
*/
public DockerBuildRequest withIsPushEnabled(Boolean isPushEnabled) {
this.isPushEnabled = isPushEnabled;
return this;
}
/**
* Get the value of this property indicates whether the image cache is enabled or not.
*
* @return the noCache value
*/
public Boolean noCache() {
return this.noCache;
}
/**
* Set the value of this property indicates whether the image cache is enabled or not.
*
* @param noCache the noCache value to set
* @return the DockerBuildRequest object itself.
*/
public DockerBuildRequest withNoCache(Boolean noCache) {
this.noCache = noCache;
return this;
}
/**
* Get the Docker file path relative to the source location.
*
* @return the dockerFilePath value
*/
public String dockerFilePath() {
return this.dockerFilePath;
}
/**
* Set the Docker file path relative to the source location.
*
* @param dockerFilePath the dockerFilePath value to set
* @return the DockerBuildRequest object itself.
*/
public DockerBuildRequest withDockerFilePath(String dockerFilePath) {
this.dockerFilePath = dockerFilePath;
return this;
}
/**
* Get the name of the target build stage for the docker build.
*
* @return the target value
*/
public String target() {
return this.target;
}
/**
* Set the name of the target build stage for the docker build.
*
* @param target the target value to set
* @return the DockerBuildRequest object itself.
*/
public DockerBuildRequest withTarget(String target) {
this.target = target;
return this;
}
/**
* Get the collection of override arguments to be used when executing the run.
*
* @return the arguments value
*/
public List<Argument> arguments() {
return this.arguments;
}
/**
* Set the collection of override arguments to be used when executing the run.
*
* @param arguments the arguments value to set
* @return the DockerBuildRequest object itself.
*/
public DockerBuildRequest withArguments(List<Argument> arguments) {
this.arguments = arguments;
return this;
}
/**
* Get run timeout in seconds.
*
* @return the timeout value
*/
public Integer timeout() {
return this.timeout;
}
/**
* Set run timeout in seconds.
*
* @param timeout the timeout value to set
* @return the DockerBuildRequest object itself.
*/
public DockerBuildRequest withTimeout(Integer timeout) {
this.timeout = timeout;
return this;
}
/**
* Get the platform properties against which the run has to happen.
*
* @return the platform value
*/
public PlatformProperties platform() {
return this.platform;
}
/**
* Set the platform properties against which the run has to happen.
*
* @param platform the platform value to set
* @return the DockerBuildRequest object itself.
*/
public DockerBuildRequest withPlatform(PlatformProperties platform) {
this.platform = platform;
return this;
}
/**
* Get the machine configuration of the run agent.
*
* @return the agentConfiguration value
*/
public AgentProperties agentConfiguration() {
return this.agentConfiguration;
}
/**
* Set the machine configuration of the run agent.
*
* @param agentConfiguration the agentConfiguration value to set
* @return the DockerBuildRequest object itself.
*/
public DockerBuildRequest withAgentConfiguration(AgentProperties agentConfiguration) {
this.agentConfiguration = agentConfiguration;
return this;
}
/**
* Get the URL(absolute or relative) of the source context. It can be an URL to a tar or git repository.
If it is relative URL, the relative path should be obtained from calling listBuildSourceUploadUrl API.
*
* @return the sourceLocation value
*/
public String sourceLocation() {
return this.sourceLocation;
}
/**
* Set the URL(absolute or relative) of the source context. It can be an URL to a tar or git repository.
If it is relative URL, the relative path should be obtained from calling listBuildSourceUploadUrl API.
*
* @param sourceLocation the sourceLocation value to set
* @return the DockerBuildRequest object itself.
*/
public DockerBuildRequest withSourceLocation(String sourceLocation) {
this.sourceLocation = sourceLocation;
return this;
}
/**
* Get the properties that describes a set of credentials that will be used when this run is invoked.
*
* @return the credentials value
*/
public Credentials credentials() {
return this.credentials;
}
/**
* Set the properties that describes a set of credentials that will be used when this run is invoked.
*
* @param credentials the credentials value to set
* @return the DockerBuildRequest object itself.
*/
public DockerBuildRequest withCredentials(Credentials credentials) {
this.credentials = credentials;
return this;
}
}
| |
/*
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Roger Lawrence
* Mike McCabe
* Igor Bukanov
* Milen Nankov
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package com.google.javascript.rhino;
/**
* This class implements the JavaScript scanner.
*
* It is based on the C source files jsscan.c and jsscan.h
* in the jsref package.
*
*/
public enum Token {
RETURN,
BITOR,
BITXOR,
BITAND,
EQ,
NE,
LT,
LE,
GT,
GE,
LSH,
RSH,
URSH,
ADD,
SUB,
MUL,
DIV,
MOD,
EXPONENT,
NOT,
BITNOT,
POS,
NEG,
NEW,
DELPROP,
TYPEOF,
GETPROP,
GETELEM,
CALL,
NAME,
NUMBER,
STRING,
NULL,
THIS,
FALSE,
TRUE,
SHEQ, // shallow equality (===)
SHNE, // shallow inequality (!==)
REGEXP,
THROW,
IN,
INSTANCEOF,
ARRAYLIT, // array literal
OBJECTLIT, // object literal
TRY,
PARAM_LIST,
COMMA, // comma operator
ASSIGN, // simple assignment (=)
ASSIGN_BITOR, // |=
ASSIGN_BITXOR, // ^=
ASSIGN_BITAND, // &=
ASSIGN_LSH, // <<=
ASSIGN_RSH, // >>=
ASSIGN_URSH, // >>>=
ASSIGN_ADD, // +=
ASSIGN_SUB, // -=
ASSIGN_MUL, // *=
ASSIGN_DIV, // /=
ASSIGN_MOD, // %=
ASSIGN_EXPONENT, // **=
HOOK, // conditional (?:)
OR, // logical or (||)
AND, // logical and (&&)
INC, // increment (++)
DEC, // decrement (--)
FUNCTION, // function keyword
IF, // if keyword
SWITCH, // switch keyword
CASE, // case keyword
DEFAULT_CASE, // default keyword
WHILE, // while keyword
DO, // do keyword
FOR, // for(;;) statement
FOR_IN, // for-in
BREAK, // break keyword
CONTINUE, // continue keyword
VAR, // var keyword
WITH, // with keyword
CATCH, // catch keyword
VOID, // void keyword
EMPTY,
ROOT, // Used only for the 3 root nodes of the AST: externsRoot, jsRoot, and externsAndJsRoot
BLOCK, // statement block
LABEL, // label
EXPR_RESULT, // expression statement in scripts
SCRIPT, // top-level node for entire script
GETTER_DEF,
SETTER_DEF,
CONST, // JS 1.5 const keyword
DEBUGGER,
// JSCompiler introduced tokens
LABEL_NAME,
STRING_KEY, // object literal key
CAST,
// ES6
ARRAY_PATTERN, // destructuring patterns
OBJECT_PATTERN,
DESTRUCTURING_LHS, // The node inside a var/let/const with a destructuring LHS
CLASS, // classes
CLASS_MEMBERS, // class member container
MEMBER_FUNCTION_DEF,
SUPER,
LET, // block scoped vars
FOR_OF, // for-of
FOR_AWAIT_OF, // for-await-of
YIELD, // generators
AWAIT, // async functions
IMPORT, // modules
IMPORT_SPECS,
IMPORT_SPEC,
IMPORT_STAR, // "* as name", called NameSpaceImport in the spec.
EXPORT,
EXPORT_SPECS,
EXPORT_SPEC,
MODULE_BODY,
REST, // "..." in formal parameters, or an array pattern.
SPREAD, // "..." in a call expression, or an array literal.
COMPUTED_PROP,
TAGGED_TEMPLATELIT, // tagged template literal, e.g. foo`bar`
TEMPLATELIT, // template literal
TEMPLATELIT_SUB, // template literal substitution
DEFAULT_VALUE, // Formal parameter or destructuring element with a default value
NEW_TARGET, // new.target
// Used by type declaration ASTs
STRING_TYPE,
BOOLEAN_TYPE,
NUMBER_TYPE,
FUNCTION_TYPE,
PARAMETERIZED_TYPE,
UNION_TYPE,
ANY_TYPE,
NULLABLE_TYPE,
VOID_TYPE,
REST_PARAMETER_TYPE,
NAMED_TYPE,
OPTIONAL_PARAMETER,
RECORD_TYPE,
UNDEFINED_TYPE,
ARRAY_TYPE,
GENERIC_TYPE,
GENERIC_TYPE_LIST,
// JSDoc-only tokens
ANNOTATION,
PIPE,
STAR,
EOC,
QMARK, // type is nullable or unknown
ELLIPSIS,
BANG,
EQUALS,
LB, // left brackets
LC, // left curly braces
COLON,
// TypeScript
INTERFACE,
INTERFACE_EXTENDS,
INTERFACE_MEMBERS,
ENUM,
ENUM_MEMBERS,
IMPLEMENTS,
TYPE_ALIAS,
DECLARE,
MEMBER_VARIABLE_DEF,
INDEX_SIGNATURE,
CALL_SIGNATURE,
NAMESPACE,
NAMESPACE_ELEMENTS,
// Tokens to use for internal bookkeeping,
// an AST is invalid while these are present.
PLACEHOLDER1,
PLACEHOLDER2,
PLACEHOLDER3;
/** If the arity isn't always the same, this function returns -1 */
public static int arity(Token token) {
switch (token) {
case ANNOTATION:
case ARRAYLIT:
case BANG:
case BLOCK:
case ROOT:
case BREAK:
case CALL:
case COLON:
case CONST:
case CONTINUE:
case DEBUGGER:
case ELLIPSIS:
case EOC:
case EQUALS:
case FOR:
case IF:
case LB:
case LC:
case NEW:
case OBJECTLIT:
case PARAM_LIST:
case PIPE:
case QMARK:
case REGEXP:
case RETURN:
case SCRIPT:
case STAR:
case STRING_KEY:
case SWITCH:
case TEMPLATELIT:
case TRY:
case VAR:
case YIELD:
return -1;
case EMPTY:
case FALSE:
case IMPORT_STAR:
case LABEL_NAME:
case MEMBER_VARIABLE_DEF:
case NAME:
case NULL:
case NUMBER:
case STRING:
case THIS:
case TRUE:
return 0;
case BITNOT:
case CALL_SIGNATURE:
case CAST:
case DEC:
case DEFAULT_CASE:
case DELPROP:
case EXPR_RESULT:
case GETTER_DEF:
case INC:
case INDEX_SIGNATURE:
case MEMBER_FUNCTION_DEF:
case NAMED_TYPE:
case NEG:
case NOT:
case POS:
case REST:
case SETTER_DEF:
case SPREAD:
case TEMPLATELIT_SUB:
case THROW:
case TYPEOF:
case TYPE_ALIAS:
case VOID:
return 1;
case ADD:
case AND:
case ASSIGN:
case ASSIGN_ADD:
case ASSIGN_BITAND:
case ASSIGN_BITOR:
case ASSIGN_BITXOR:
case ASSIGN_DIV:
case ASSIGN_LSH:
case ASSIGN_MOD:
case ASSIGN_MUL:
case ASSIGN_EXPONENT:
case ASSIGN_RSH:
case ASSIGN_SUB:
case ASSIGN_URSH:
case BITAND:
case BITOR:
case BITXOR:
case CASE:
case CATCH:
case COMMA:
case COMPUTED_PROP:
case DEFAULT_VALUE:
case DIV:
case DO:
case ENUM:
case EQ:
case EXPONENT:
case GE:
case GETELEM:
case GETPROP:
case GT:
case IN:
case INSTANCEOF:
case LABEL:
case LE:
case LSH:
case LT:
case MOD:
case MUL:
case NAMESPACE:
case NE:
case OR:
case RSH:
case SHEQ:
case SHNE:
case SUB:
case TAGGED_TEMPLATELIT:
case URSH:
case WHILE:
case WITH:
return 2;
case CLASS:
case FOR_IN:
case FOR_OF:
case FOR_AWAIT_OF:
case FUNCTION:
case HOOK:
case IMPORT:
case INTERFACE:
return 3;
default:
throw new IllegalStateException("No arity defined for " + token);
}
}
}
| |
package com.badlogic.gdx.graphics.g3d.particles.influencers;
import java.util.Iterator;
import com.badlogic.gdx.assets.AssetDescriptor;
import com.badlogic.gdx.assets.AssetManager;
import com.badlogic.gdx.graphics.g3d.particles.ParallelArray.ObjectChannel;
import com.badlogic.gdx.graphics.g3d.particles.ParticleChannels;
import com.badlogic.gdx.graphics.g3d.particles.ParticleController;
import com.badlogic.gdx.graphics.g3d.particles.ParticleEffect;
import com.badlogic.gdx.graphics.g3d.particles.ResourceData;
import com.badlogic.gdx.graphics.g3d.particles.ResourceData.SaveData;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.Pool;
/** It's an {@link Influencer} which controls which {@link ParticleController} will be assigned to a particle.
* @author Inferno */
public abstract class ParticleControllerInfluencer extends Influencer{
/** Assigns the first controller of {@link ParticleControllerInfluencer#templates} to the particles.*/
public static class Single extends ParticleControllerInfluencer{
public Single (ParticleController... templates) {
super(templates);
}
public Single (){
super();
}
public Single (Single particleControllerSingle) {
super(particleControllerSingle);
}
@Override
public void init () {
ParticleController first = templates.first();
for(int i=0, c = controller.particles.capacity; i < c; ++i){
ParticleController copy = first.copy();
copy.init();
particleControllerChannel.data[i] = copy;
}
}
@Override
public void activateParticles (int startIndex, int count) {
for(int i=startIndex, c = startIndex +count; i < c; ++i){
particleControllerChannel.data[i].start();
}
}
@Override
public void killParticles (int startIndex, int count) {
for(int i=startIndex, c = startIndex +count; i < c; ++i){
particleControllerChannel.data[i].end();
}
}
@Override
public Single copy () {
return new Single(this);
}
}
/** Assigns a random controller of {@link ParticleControllerInfluencer#templates} to the particles.*/
public static class Random extends ParticleControllerInfluencer{
private class ParticleControllerPool extends Pool<ParticleController>{
public ParticleControllerPool () {}
@Override
public ParticleController newObject () {
ParticleController controller = templates.random().copy();
controller.init();
return controller;
}
@Override
public void clear () {
//Dispose every allocated instance because the templates may be changed
for(int i=0, free = pool.getFree(); i < free; ++i){
pool.obtain().dispose();
}
super.clear();
}
}
ParticleControllerPool pool;
public Random (){
super();
pool = new ParticleControllerPool();
}
public Random (ParticleController... templates) {
super(templates);
pool = new ParticleControllerPool();
}
public Random (Random particleControllerRandom) {
super(particleControllerRandom);
pool = new ParticleControllerPool();
}
@Override
public void init () {
pool.clear();
//Allocate the new instances
for(int i=0; i < controller.emitter.maxParticleCount; ++i){
pool.free(pool.newObject());
}
}
@Override
public void dispose(){
pool.clear();
super.dispose();
}
@Override
public void activateParticles (int startIndex, int count) {
for(int i=startIndex, c = startIndex +count; i < c; ++i){
ParticleController controller = pool.obtain();
controller.start();
particleControllerChannel.data[i] = controller;
}
}
@Override
public void killParticles (int startIndex, int count) {
for(int i=startIndex, c = startIndex +count; i < c; ++i){
ParticleController controller = particleControllerChannel.data[i];
controller.end();
pool.free(controller);
particleControllerChannel.data[i] = null;
}
}
@Override
public Random copy () {
return new Random(this);
}
}
public Array<ParticleController> templates;
ObjectChannel<ParticleController> particleControllerChannel;
public ParticleControllerInfluencer(){
this.templates = new Array<ParticleController>(true, 1, ParticleController.class);
}
public ParticleControllerInfluencer(ParticleController... templates){
this.templates = new Array<ParticleController>(templates);
}
public ParticleControllerInfluencer(ParticleControllerInfluencer influencer){
this(influencer.templates.items);
}
@Override
public void allocateChannels () {
particleControllerChannel = controller.particles.addChannel(ParticleChannels.ParticleController);
}
@Override
public void end(){
for(int i=0; i < controller.particles.size; ++i){
particleControllerChannel.data[i].end();
}
}
@Override
public void dispose () {
if(controller != null){
for(int i=0; i < controller.particles.size; ++i){
ParticleController controller = particleControllerChannel.data[i];
if(controller != null){
controller.dispose();
particleControllerChannel.data[i]= null;
}
}
}
}
@Override
public void save (AssetManager manager, ResourceData resources) {
SaveData data = resources.createSaveData();
Array<ParticleEffect> effects = manager.getAll(ParticleEffect.class, new Array<ParticleEffect>());
Array<ParticleController> controllers = new Array<ParticleController>(templates);
Array<Array<Integer>>effectsIndices = new Array<Array<Integer>>();
for(int i=0; i < effects.size && controllers.size >0; ++i){
ParticleEffect effect = effects.get(i);
Array<ParticleController> effectControllers = effect.getControllers();
Iterator<ParticleController> iterator = controllers.iterator();
Array<Integer> indices = null;
while(iterator.hasNext()){
ParticleController controller = iterator.next();
int index = -1;
if( (index = effectControllers.indexOf(controller, true)) >-1){
if(indices == null){
indices = new Array<Integer>();
}
iterator.remove();
indices.add(index);
}
}
if(indices != null){
data.saveAsset(manager.getAssetFileName(effect), ParticleEffect.class);
effectsIndices.add(indices);
}
}
data.save("indices", effectsIndices);
}
@Override
public void load (AssetManager manager, ResourceData resources) {
SaveData data = resources.getSaveData();
Array<Array<Integer>>effectsIndices = data.load("indices");
AssetDescriptor descriptor;
Iterator<Array<Integer>> iterator = effectsIndices.iterator();
while((descriptor = data.loadAsset()) != null){
ParticleEffect effect = (ParticleEffect)manager.get(descriptor);
if(effect == null)
throw new RuntimeException("Template is null");
Array<ParticleController> effectControllers = effect.getControllers();
Array<Integer> effectIndices = iterator.next();
for(Integer index : effectIndices){
templates.add(effectControllers.get(index));
}
}
}
}
| |
/*
* Copyright 2015 Adaptris Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.adaptris.core.services.metadata;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import javax.validation.constraints.NotBlank;
import com.adaptris.annotation.AdapterComponent;
import com.adaptris.annotation.ComponentProfile;
import com.adaptris.annotation.DisplayOrder;
import com.adaptris.annotation.InputFieldDefault;
import com.adaptris.core.AdaptrisMessage;
import com.adaptris.core.BranchingServiceImp;
import com.adaptris.core.CoreException;
import com.adaptris.core.ServiceException;
import com.adaptris.core.util.Args;
import com.adaptris.core.util.ExceptionHelper;
import com.thoughtworks.xstream.annotations.XStreamAlias;
/**
* <p>
* Branching <code>Service</code> implementation which checks the value stored against a configured metadata key against a list of
* previously received values.
* <p>
* The service obeys the following rules when checking the metadata key
* <ul>
* <li>If the looked-up value is null or empty, a {@link ServiceException} is thrown.</li>
* <li>If the value is set and has previously been received, the configured <code>nextServiceIdIfDuplicate</code> is set on the
* message.</li>
* <li>If the looked-up value is not contained in the store of previous values <code>nextServiceIdIfUnique</code> is set and the
* value is added to this store for future checking.</li>
* </p>
* <p>
* The store of previous values has a configurable maximum size. After a new value is added, if the store exceeds the maximum size
* the oldest value is removed. The store is then persisted to the configured store file.
* </p>
*
* @config check-unique-metadata-value-service
*
*
*/
@XStreamAlias("check-unique-metadata-value-service")
@AdapterComponent
@ComponentProfile(
summary = "Perform a branch by checking a metadata key and comparing it against a list of previously received values",
tag = "service,branching", branchSelector = true)
@DisplayOrder(order = {"metadataKeyToCheck", "nextServiceIdIfUnique", "nextServiceIdIfDuplicate", "storeFileUrl",
"numberOfPreviousValuesToStore"})
public class CheckUniqueMetadataValueService extends BranchingServiceImp {
/**
* <p>
* Default next Service ID to set if the message metadata value does not
* appear in the store of previously received values.
* </p>
*/
public static final String DEFAULT_SERVICE_ID_UNIQUE = "001";
/**
* <p>
* Default next Service ID to set if the message metadata value <em>does</em>
* appear in the store of previously received values.
* </p>
*/
public static final String DEFAULT_SERVICE_ID_DUPLICATE = "002";
@NotBlank
private String metadataKeyToCheck;
@NotBlank
private String storeFileUrl;
@NotBlank
private String nextServiceIdIfDuplicate;
@NotBlank
private String nextServiceIdIfUnique;
@InputFieldDefault(value = "1000")
private int numberOfPreviousValuesToStore;
// not marshalled
private transient List<Object> previousValuesStore;
private transient File store;
/**
* <p>
* Creates a new instance. Default history size is 1000.
* </p>
*/
public CheckUniqueMetadataValueService() {
this.setNumberOfPreviousValuesToStore(1000);
}
@Override
protected void initService() throws CoreException {
try {
Args.notBlank(getMetadataKeyToCheck(), "metadataKeyToCheck");
Args.notBlank(getStoreFileUrl(), "storeFileUrl");
this.store = new File(new URL(getStoreFileUrl()).getFile());
this.loadPreviouslyReceivedValues();
if (previousValuesStore == null) {
previousValuesStore = new ArrayList<Object>();
}
} catch (Exception e) {
throw ExceptionHelper.wrapCoreException(e);
}
}
@Override
protected void closeService() {
}
/**
* @see com.adaptris.core.Service
* #doService(com.adaptris.core.AdaptrisMessage)
*/
@Override
public void doService(AdaptrisMessage msg) throws ServiceException {
String value = msg.getMetadataValue(this.getMetadataKeyToCheck());
try {
Args.notBlank(value, "metadataValue");
if (previousValuesStore.contains(value)) {
this.handleDuplicate(msg, value);
} else {
this.handleNewValue(msg, value);
}
} catch (Exception e) {
throw ExceptionHelper.wrapServiceException(e);
}
}
private void handleDuplicate(AdaptrisMessage msg, String value) throws ServiceException {
String errorMessage = this.createErrorMessage(value);
log.warn(errorMessage);
msg.setNextServiceId(this.getNextServiceIdIfDuplicate());
}
private String createErrorMessage(String value) {
StringBuffer result = new StringBuffer();
result.append("value [");
result.append(value);
result.append("] stored against key [");
result.append(this.getMetadataKeyToCheck());
result.append("] exists in list of previously stored values");
return result.toString();
}
private void handleNewValue(AdaptrisMessage msg, String value) throws Exception {
msg.setNextServiceId(this.getNextServiceIdIfUnique());
previousValuesStore.add(value);
while (previousValuesStore.size() > this.getNumberOfPreviousValuesToStore()) {
previousValuesStore.remove(0);
}
this.storePreviouslyReceivedValues();
}
private void storePreviouslyReceivedValues() throws Exception {
try (FileOutputStream out = new FileOutputStream(store);
ObjectOutputStream o = new ObjectOutputStream(out)) {
o.writeObject(previousValuesStore);
}
}
private void loadPreviouslyReceivedValues() throws Exception {
if (store.exists()) {
try (FileInputStream in = new FileInputStream(store);
ObjectInputStream o = new ObjectInputStream(in)) {
previousValuesStore = (ArrayList<Object>) o.readObject();
}
}
}
int storeSize() {
return previousValuesStore.size();
}
// properties...
/**
* <p>
* Returns the metadata key whose value should be checked.
* </p>
*
* @return metadataKey the metadata key whose value should be checked
*/
public String getMetadataKeyToCheck() {
return this.metadataKeyToCheck;
}
/**
* <p>
* Sets the metadata key whose value should be checked. May not be null.
* </p>
*
* @param s the metadata key whose value should be checked
*/
public void setMetadataKeyToCheck(String s) {
this.metadataKeyToCheck = Args.notBlank(s, "metadataKeyToCheck");
}
/**
* <p>
* Returns the number of previous values to keep.
* </p>
*
* @return the number of previous values to keep
*/
public int getNumberOfPreviousValuesToStore() {
return this.numberOfPreviousValuesToStore;
}
/**
* <p>
* Sets the number of previous values to keep. Must be greater than 0.
* </p>
*
* @param i the number of previous values to keep
*/
public void setNumberOfPreviousValuesToStore(int i) {
if (i < 1) {
throw new IllegalArgumentException("history size is 0 or negative");
}
this.numberOfPreviousValuesToStore = i;
}
/**
* <p>
* Returns the persistent store for previously received values in the form of
* a file URL. E.g. <code>file:////Users/adaptris/store.dat/</code>.
* </p>
*
* @return the persistent store for previously received values in the form of
* a file URL
*/
public String getStoreFileUrl() {
return this.storeFileUrl;
}
/**
* <p>
* Sets the persistent store for previously received values in the form of a
* file URL. E.g. <code>file:////Users/adaptris/store.dat</code>. May not be
* null or empty.
* </p>
*
* @param s the persistent store for previously received values in the form of
* a file URL
*/
public void setStoreFileUrl(String s) {
this.storeFileUrl = Args.notBlank(s, "storeFileUrl");
}
/**
* <p>
* Returns the ID of the next <code>Service</code> to apply if the metadata
* exists if the store of previous values.
* </p>
*
* @return the ID of the next <code>Service</code> to apply if the metadata
* exists if the store of previous values
*/
public String getNextServiceIdIfDuplicate() {
return this.nextServiceIdIfDuplicate;
}
/**
* <p>
* Sets the ID of the next <code>Service</code> to apply if the metadata
* exists if the store of previous values. May not be null or empty.
* </p>
*
* @param s the ID of the next <code>Service</code> to apply if the metadata
* exists if the store of previous values
*/
public void setNextServiceIdIfDuplicate(String s) {
this.nextServiceIdIfDuplicate = Args.notBlank(s, "nextServiceIdIfDuplicate");
}
/**
* <p>
* Returns the ID of the next <code>Service</code> to apply if the metadata
* does not exist if the store of previous values.
* </p>
*
* @return the ID of the next <code>Service</code> to apply if the metadata
* does not exist if the store of previous values
*/
public String getNextServiceIdIfUnique() {
return this.nextServiceIdIfUnique;
}
/**
* <p>
* Sets the ID of the next <code>Service</code> to apply if the metadata does
* not exist if the store of previous values. May not be null or empty.
* </p>
*
* @param s the ID of the next <code>Service</code> to apply if the metadata
* does not exist if the store of previous values
*/
public void setNextServiceIdIfUnique(String s) {
this.nextServiceIdIfUnique = Args.notBlank(s, "nextServiceIdIfUnique");
}
@Override
public void prepare() throws CoreException {}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.orc;
import com.facebook.presto.orc.metadata.CompressionKind;
import com.facebook.presto.orc.metadata.Footer;
import com.facebook.presto.orc.metadata.statistics.IntegerStatistics;
import com.facebook.presto.spi.block.Block;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import io.airlift.slice.Slice;
import io.airlift.units.DataSize;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.io.orc.NullMemoryManager;
import org.apache.hadoop.hive.ql.io.orc.OrcFile;
import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat;
import org.apache.hadoop.hive.ql.io.orc.OrcSerde;
import org.apache.hadoop.hive.ql.io.orc.OrcWriterOptions;
import org.apache.hadoop.hive.ql.io.orc.Writer;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.Serializer;
import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.io.Writable;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.nio.ByteBuffer;
import java.util.Map;
import static com.facebook.presto.orc.OrcEncoding.ORC;
import static com.facebook.presto.orc.OrcReader.MAX_BATCH_SIZE;
import static com.facebook.presto.orc.OrcTester.Format.ORC_12;
import static com.facebook.presto.orc.OrcTester.MAX_BLOCK_SIZE;
import static com.facebook.presto.orc.OrcTester.createCustomOrcRecordReader;
import static com.facebook.presto.orc.OrcTester.createOrcRecordWriter;
import static com.facebook.presto.orc.OrcTester.createSettableStructObjectInspector;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.hadoop.hive.ql.io.orc.CompressionKind.SNAPPY;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
public class TestOrcReaderPositions
{
@Test
public void testEntireFile()
throws Exception
{
try (TempFile tempFile = new TempFile()) {
createMultiStripeFile(tempFile.getFile());
try (OrcRecordReader reader = createCustomOrcRecordReader(tempFile, ORC, OrcPredicate.TRUE, BIGINT)) {
assertEquals(reader.getReaderRowCount(), 100);
assertEquals(reader.getReaderPosition(), 0);
assertEquals(reader.getFileRowCount(), reader.getReaderRowCount());
assertEquals(reader.getFilePosition(), reader.getReaderPosition());
for (int i = 0; i < 5; i++) {
assertEquals(reader.nextBatch(), 20);
assertEquals(reader.getReaderPosition(), i * 20L);
assertEquals(reader.getFilePosition(), reader.getReaderPosition());
assertCurrentBatch(reader, i);
}
assertEquals(reader.nextBatch(), -1);
assertEquals(reader.getReaderPosition(), 100);
assertEquals(reader.getFilePosition(), reader.getReaderPosition());
}
}
}
@Test
public void testStripeSkipping()
throws Exception
{
try (TempFile tempFile = new TempFile()) {
createMultiStripeFile(tempFile.getFile());
// test reading second and fourth stripes
OrcPredicate predicate = (numberOfRows, statisticsByColumnIndex) -> {
if (numberOfRows == 100) {
return true;
}
IntegerStatistics stats = statisticsByColumnIndex.get(0).getIntegerStatistics();
return ((stats.getMin() == 60) && (stats.getMax() == 117)) ||
((stats.getMin() == 180) && (stats.getMax() == 237));
};
try (OrcRecordReader reader = createCustomOrcRecordReader(tempFile, ORC, predicate, BIGINT)) {
assertEquals(reader.getFileRowCount(), 100);
assertEquals(reader.getReaderRowCount(), 40);
assertEquals(reader.getFilePosition(), 0);
assertEquals(reader.getReaderPosition(), 0);
// second stripe
assertEquals(reader.nextBatch(), 20);
assertEquals(reader.getReaderPosition(), 0);
assertEquals(reader.getFilePosition(), 20);
assertCurrentBatch(reader, 1);
// fourth stripe
assertEquals(reader.nextBatch(), 20);
assertEquals(reader.getReaderPosition(), 20);
assertEquals(reader.getFilePosition(), 60);
assertCurrentBatch(reader, 3);
assertEquals(reader.nextBatch(), -1);
assertEquals(reader.getReaderPosition(), 40);
assertEquals(reader.getFilePosition(), 100);
}
}
}
@Test
public void testRowGroupSkipping()
throws Exception
{
try (TempFile tempFile = new TempFile()) {
// create single strip file with multiple row groups
int rowCount = 142_000;
createSequentialFile(tempFile.getFile(), rowCount);
// test reading two row groups from middle of file
OrcPredicate predicate = (numberOfRows, statisticsByColumnIndex) -> {
if (numberOfRows == rowCount) {
return true;
}
IntegerStatistics stats = statisticsByColumnIndex.get(0).getIntegerStatistics();
return (stats.getMin() == 50_000) || (stats.getMin() == 60_000);
};
try (OrcRecordReader reader = createCustomOrcRecordReader(tempFile, ORC, predicate, BIGINT)) {
assertEquals(reader.getFileRowCount(), rowCount);
assertEquals(reader.getReaderRowCount(), rowCount);
assertEquals(reader.getFilePosition(), 0);
assertEquals(reader.getReaderPosition(), 0);
long position = 50_000;
while (true) {
int batchSize = reader.nextBatch();
if (batchSize == -1) {
break;
}
Block block = reader.readBlock(BIGINT, 0);
for (int i = 0; i < batchSize; i++) {
assertEquals(BIGINT.getLong(block, i), position + i);
}
assertEquals(reader.getFilePosition(), position);
assertEquals(reader.getReaderPosition(), position);
position += batchSize;
}
assertEquals(position, 70_000);
assertEquals(reader.getFilePosition(), rowCount);
assertEquals(reader.getReaderPosition(), rowCount);
}
}
}
@Test
public void testBatchSizesForVariableWidth()
throws Exception
{
// the test creates a table with one column and 10 row groups (i.e., 100K rows)
// the 1st row group has strings with each of length 300,
// the 2nd row group has strings with each of length 600,
// the 3rd row group has strings with each of length 900, and so on
// the test is to show when loading those strings,
// we are first bounded by MAX_BATCH_SIZE = 1024 rows because 1024 X 900B < 1MB
// then bounded by MAX_BLOCK_SIZE = 1MB because 1024 X 1200B > 1MB
try (TempFile tempFile = new TempFile()) {
// create single strip file with multiple row groups
int rowsInRowGroup = 10_000;
int rowGroupCounts = 10;
int baseStringBytes = 300;
int rowCount = rowsInRowGroup * rowGroupCounts;
createGrowingSequentialFile(tempFile.getFile(), rowCount, rowsInRowGroup, baseStringBytes);
try (OrcRecordReader reader = createCustomOrcRecordReader(tempFile, ORC, OrcPredicate.TRUE, VARCHAR)) {
assertEquals(reader.getFileRowCount(), rowCount);
assertEquals(reader.getReaderRowCount(), rowCount);
assertEquals(reader.getFilePosition(), 0);
assertEquals(reader.getReaderPosition(), 0);
// each value's length = original value length + 4 bytes to denote offset + 1 byte to denote if null
int currentStringBytes = baseStringBytes + Integer.BYTES + Byte.BYTES;
int rowCountsInCurrentRowGroup = 0;
while (true) {
int batchSize = reader.nextBatch();
if (batchSize == -1) {
break;
}
rowCountsInCurrentRowGroup += batchSize;
Block block = reader.readBlock(VARCHAR, 0);
if (MAX_BATCH_SIZE * currentStringBytes <= MAX_BLOCK_SIZE.toBytes()) {
// Either we are bounded by 1024 rows per batch, or it is the last batch in the row group
// For the first 3 row groups, the strings are of length 300, 600, and 900 respectively
// So the loaded data is bounded by MAX_BATCH_SIZE
assertTrue(block.getPositionCount() == MAX_BATCH_SIZE || rowCountsInCurrentRowGroup == rowsInRowGroup);
}
else {
// Either we are bounded by 1MB per batch, or it is the last batch in the row group
// From the 4th row group, the strings are have length > 1200
// So the loaded data is bounded by MAX_BLOCK_SIZE
assertTrue(block.getPositionCount() == MAX_BLOCK_SIZE.toBytes() / currentStringBytes || rowCountsInCurrentRowGroup == rowsInRowGroup);
}
if (rowCountsInCurrentRowGroup == rowsInRowGroup) {
rowCountsInCurrentRowGroup = 0;
currentStringBytes += baseStringBytes;
}
else if (rowCountsInCurrentRowGroup > rowsInRowGroup) {
assertTrue(false, "read more rows in the current row group");
}
}
}
}
}
@Test
public void testBatchSizesForFixedWidth()
throws Exception
{
// the test creates a table with one column and 10 row groups
// the each row group has bigints of length 8 in bytes,
// the test is to show that the loaded data is always bounded by MAX_BATCH_SIZE because 1024 X 8B < 1MB
try (TempFile tempFile = new TempFile()) {
// create single strip file with multiple row groups
int rowsInRowGroup = 10_000;
int rowGroupCounts = 10;
int rowCount = rowsInRowGroup * rowGroupCounts;
createSequentialFile(tempFile.getFile(), rowCount);
try (OrcRecordReader reader = createCustomOrcRecordReader(tempFile, ORC, OrcPredicate.TRUE, BIGINT)) {
assertEquals(reader.getFileRowCount(), rowCount);
assertEquals(reader.getReaderRowCount(), rowCount);
assertEquals(reader.getFilePosition(), 0);
assertEquals(reader.getReaderPosition(), 0);
int rowCountsInCurrentRowGroup = 0;
while (true) {
int batchSize = reader.nextBatch();
if (batchSize == -1) {
break;
}
rowCountsInCurrentRowGroup += batchSize;
Block block = reader.readBlock(BIGINT, 0);
// 8 bytes per row; 1024 row at most given 1024 X 8B < 1MB
assertTrue(block.getPositionCount() == MAX_BATCH_SIZE || rowCountsInCurrentRowGroup == rowsInRowGroup);
if (rowCountsInCurrentRowGroup == rowsInRowGroup) {
rowCountsInCurrentRowGroup = 0;
}
else if (rowCountsInCurrentRowGroup > rowsInRowGroup) {
assertTrue(false, "read more rows in the current row group");
}
}
}
}
}
@Test
public void testReadUserMetadata()
throws Exception
{
try (TempFile tempFile = new TempFile()) {
Map<String, String> metadata = ImmutableMap.of(
"a", "ala",
"b", "ma",
"c", "kota");
createFileWithOnlyUserMetadata(tempFile.getFile(), metadata);
OrcDataSource orcDataSource = new FileOrcDataSource(tempFile.getFile(), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), true);
OrcReader orcReader = new OrcReader(orcDataSource, ORC, new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE));
Footer footer = orcReader.getFooter();
Map<String, String> readMetadata = Maps.transformValues(footer.getUserMetadata(), Slice::toStringAscii);
assertEquals(readMetadata, metadata);
}
}
private static void assertCurrentBatch(OrcRecordReader reader, int stripe)
throws IOException
{
Block block = reader.readBlock(BIGINT, 0);
for (int i = 0; i < 20; i++) {
assertEquals(BIGINT.getLong(block, i), ((stripe * 20L) + i) * 3);
}
}
// write 5 stripes of 20 values each: (0,3,6,..,57), (60,..,117), .., (..297)
private static void createMultiStripeFile(File file)
throws IOException, ReflectiveOperationException, SerDeException
{
FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, CompressionKind.NONE, BIGINT);
@SuppressWarnings("deprecation") Serializer serde = new OrcSerde();
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", BIGINT);
Object row = objectInspector.create();
StructField field = objectInspector.getAllStructFieldRefs().get(0);
for (int i = 0; i < 300; i += 3) {
if ((i > 0) && (i % 60 == 0)) {
flushWriter(writer);
}
objectInspector.setStructFieldData(row, field, (long) i);
Writable record = serde.serialize(row, objectInspector);
writer.write(record);
}
writer.close(false);
}
private static void createFileWithOnlyUserMetadata(File file, Map<String, String> metadata)
throws IOException
{
Configuration conf = new Configuration();
OrcFile.WriterOptions writerOptions = new OrcWriterOptions(conf)
.memory(new NullMemoryManager(conf))
.inspector(createSettableStructObjectInspector("test", BIGINT))
.compress(SNAPPY);
Writer writer = OrcFile.createWriter(new Path(file.toURI()), writerOptions);
for (Map.Entry<String, String> entry : metadata.entrySet()) {
writer.addUserMetadata(entry.getKey(), ByteBuffer.wrap(entry.getValue().getBytes(UTF_8)));
}
writer.close();
}
private static void flushWriter(FileSinkOperator.RecordWriter writer)
throws IOException, ReflectiveOperationException
{
Field field = OrcOutputFormat.class.getClassLoader()
.loadClass(OrcOutputFormat.class.getName() + "$OrcRecordWriter")
.getDeclaredField("writer");
field.setAccessible(true);
((Writer) field.get(writer)).writeIntermediateFooter();
}
private static void createSequentialFile(File file, int count)
throws IOException, SerDeException
{
FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, CompressionKind.NONE, BIGINT);
@SuppressWarnings("deprecation") Serializer serde = new OrcSerde();
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", BIGINT);
Object row = objectInspector.create();
StructField field = objectInspector.getAllStructFieldRefs().get(0);
for (int i = 0; i < count; i++) {
objectInspector.setStructFieldData(row, field, (long) i);
Writable record = serde.serialize(row, objectInspector);
writer.write(record);
}
writer.close(false);
}
private static void createGrowingSequentialFile(File file, int count, int step, int initialLength)
throws IOException, SerDeException
{
FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, CompressionKind.NONE, VARCHAR);
@SuppressWarnings("deprecation") Serializer serde = new OrcSerde();
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", VARCHAR);
Object row = objectInspector.create();
StructField field = objectInspector.getAllStructFieldRefs().get(0);
StringBuilder builder = new StringBuilder();
for (int i = 0; i < initialLength; i++) {
builder.append("0");
}
String seedString = builder.toString();
// gradually grow the length of a cell
int previousLength = initialLength;
for (int i = 0; i < count; i++) {
if ((i / step + 1) * initialLength > previousLength) {
previousLength = (i / step + 1) * initialLength;
builder.append(seedString);
}
objectInspector.setStructFieldData(row, field, builder.toString());
Writable record = serde.serialize(row, objectInspector);
writer.write(record);
}
writer.close(false);
}
}
| |
package gr.ntua.cslab.celar.slipstreamClient;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.security.SecureRandom;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import java.util.StringTokenizer;
import javax.net.ssl.*;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.IOUtils;
import org.apache.log4j.Logger;
import com.sixsq.slipstream.exceptions.ValidationException;
import com.sixsq.slipstream.persistence.Authz;
import com.sixsq.slipstream.persistence.CloudImageIdentifier;
import com.sixsq.slipstream.persistence.ImageModule;
import com.sixsq.slipstream.persistence.Module;
import com.sixsq.slipstream.persistence.ModuleParameter;
import com.sixsq.slipstream.persistence.ProjectModule;
import com.sixsq.slipstream.persistence.Target;
import com.sixsq.slipstream.persistence.User;
import com.sixsq.slipstream.statemachine.States;
import com.sixsq.slipstream.util.SerializationUtil;
import static gr.ntua.cslab.celar.slipstreamClient.SSXMLParser.parse;
public class SlipStreamSSService {
private String user, password, url, cookie, cookieFile, connectorName;
private boolean cookieAuth;
public Logger logger = Logger.getLogger(SlipStreamSSService.class);
public HashMap<String,String> baseImageReferences; //imageName-reference
public HashMap<String,HashMap<String,String>> baseImages; //imageName-cloud-flavorID
public List<ModuleParameter> baseParameters;
private static String preScaleScript=
"#!/bin/bash\n"
+ "set -e\n\n"
+ "# Pre-scale: intended to be ran before any vertical scaling and horizontal downscaling action. \n\n"
+ "function before_vm_remove() { echo \"Before VM remove\";\n $remove \n}\n"
+ "function before_vm_resize() { echo \"Before VM resize\";\n $resize \n}\n"
+ "function before_disk_attach() { echo \"Before disk attach\";\n $disk_attach \n}\n"
+ "function before_disk_detach() { echo \"Before disk detach\";\n $disk_detach \n}\n\n"
+ "case $SLIPSTREAM_SCALING_ACTION in\n"
+ "vm_remove)\n"
+ "before_vm_remove ;;\n"
+ "vm_resize)\n"
+ "before_vm_resize ;;\n"
+ "disk_attach)\n"
+ "before_disk_attach ;;\n"
+ "disk_detach)\n"
+ "before_disk_detach ;;\n"
+ "esac\n";
private static String postScaleScript=
"#!/bin/bash\n"
+ "set -e\n\n"
+ "# Post-Scale: intended to be ran after vertical scaling action. \n\n"
+ "function after_vm_resize() { echo \"After VM resize\";\n $resize \n}\n"
+ "function after_disk_attach() { echo \"After disk attach\";\n $disk_attach \n}\n"
+ "function after_disk_detach() { echo \"After disk detach\";\n $disk_detach \n}\n\n"
+ "case $SLIPSTREAM_SCALING_ACTION in\n"
+ "vm_resize)\n"
+ "after_vm_resize ;;\n"
+ "disk_attach)\n"
+ "after_disk_attach ;;\n"
+ "disk_detach)\n"
+ "after_disk_detach ;;\n"
+ "esac\n";
/*public Module getModule(String name) throws Exception{
String[] command = new String[] {"ss-module-get", "-u", user, "-p", password, "--endpoint", url, name};
String ret = executeCommand(command);
if(ret.startsWith("<?xml")){
System.out.println(ret);
Module m = (Module)SerializationUtil.fromXml(ret, ImageModule.class);
//String xml = SerializationUtil.toXmlString(m);
System.out.println(m.getName());
//m.
return m;
}
return null;
}*/
public String writeToFile(String script) throws IOException{
logger.info("writing file:" + script);
BufferedWriter writer = null;
String file = "/tmp/script.sh";
try
{
writer = new BufferedWriter( new FileWriter(file));
writer.write( script);
}
catch ( IOException e)
{
}
finally
{
try
{
if ( writer != null)
writer.close( );
}
catch ( IOException e)
{
throw e;
}
}
return file;
}
private String writeXML(String xml) throws IOException{
logger.debug(xml);
BufferedWriter writer = null;
String xmlfile = "/tmp/test.xml";
try
{
writer = new BufferedWriter( new FileWriter( xmlfile));
writer.write( xml);
}
catch ( IOException e)
{
}
finally
{
try
{
if ( writer != null)
writer.close( );
}
catch ( IOException e)
{
throw e;
}
}
return xmlfile;
}
private void writeCookie(String cookie) throws IOException{
logger.debug("Writing cookie");
BufferedWriter writer = null;
this.cookieFile = "/tmp/slipstream-"+System.currentTimeMillis()+".cookie";
try
{
writer = new BufferedWriter( new FileWriter( this.cookieFile));
writer.write("cookie = ");
writer.write( cookie);
}
catch ( IOException e)
{
}
finally
{
try
{
if ( writer != null)
writer.close( );
}
catch ( IOException e)
{
throw e;
}
}
}
public boolean putUser(User user) throws Exception{
logger.info("Putting user: "+ user.getName());
String xml = SerializationUtil.toXmlString(user);
String xmlfile = writeXML(xml);
String[] command;
if(cookieAuth){
command = new String[] {"ss-user-put", "-u", this.user, "--cookie="+cookieFile, "--endpoint", url, xmlfile};
}
else{
command = new String[] {"ss-user-put", "-u", this.user, "-p", password, "--endpoint", url, xmlfile};
}
Map<String, String> ret = executeCommand(command);
if(!ret.get("error").equals("")){
throw new Exception(ret.get("error"));
}
return true;
}
public boolean putModule(Module module) throws Exception{
logger.info("Putting "+module.getClass() +" module: "+ module.getName());
String xml = SerializationUtil.toXmlString(module);
logger.debug(xml);
String xmlfile = writeXML(xml);
String[] command;
if(cookieAuth){
command = new String[] {"ss-module-put", "-u", user, "--cookie="+cookieFile, "--endpoint", url, xmlfile};
}
else{
command = new String[] {"ss-module-put", "-u", user, "-p", password, "--endpoint", url, xmlfile};
}
Map<String, String> ret = executeCommand(command);
if(!ret.get("error").equals("")){
throw new Exception(ret.get("error"));
}
return true;
}
public boolean terminateApplication(String deploymentID) throws Exception{
logger.info("Terminating deployment: "+deploymentID);
String[] command;
if(cookieAuth){
command = new String[] {"curl", url+"/run/"+deploymentID, "--cookie", cookie, "-X", "DELETE", "-k"};
}
else{
command = new String[] {"curl", url+"/run/"+deploymentID, "--user", user+":"+password, "-X", "DELETE", "-k"};
}
Map<String, String> ret = executeCommand(command);
/*if(!ret.get("error").equals("")){
throw new Exception(ret.get("error"));
}*/
return true;
}
public String launchApplication(String name, Map<String,String> deploymentParameters) throws Exception{
String[] command;
if(deploymentParameters.size()==0){
logger.info("Launching application: "+name+" without parameters");
if(cookieAuth){
command = new String[] {"ss-execute", "-u", user, "--cookie="+cookieFile, "--endpoint", url, "--mutable-run", name};
}
else{
command = new String[] {"ss-execute", "-u", user, "-p", password, "--endpoint", url, "--mutable-run", name};
}
}
else{
String params = "";
int i =0;
for(Entry<String, String> e : deploymentParameters.entrySet()){
if(i>0){
params+=",";
}
params+=e.getKey()+"="+e.getValue();
i++;
}
logger.info("Launching application: "+name+" with parameters: "+params);
if(cookieAuth){
command = new String[] {"ss-execute", "-u", user, "--cookie="+cookieFile, "--endpoint", url, "--mutable-run", "--parameters", params, name};
}
else{
command = new String[] {"ss-execute", "-u", user, "-p", password, "--endpoint", url, "--mutable-run", "--parameters", params, name};
}
}
Map<String, String> ret = executeCommand(command);
if(!ret.get("error").equals("")){
throw new Exception(ret.get("error"));
}
String r = ret.get("output");
String deploymentId = r.substring(r.lastIndexOf("/")+1,r.length());
deploymentId = deploymentId.replaceAll("(\\r|\\n|\\t)", "");
logger.info("deploymentId: "+deploymentId);
return deploymentId;
}
public States getDeploymentState(String deploymentID) throws Exception{
logger.info("Getting deployment state for deploymentID: "+deploymentID);
//String[] command = new String[] {"ss-run-get", "--endpoint", url, "-u", user, "-p", password, deploymentID};
//String[] command = new String[] {"curl", url+"/run/"+deploymentID, "--user", user+":"+password, "-k"};
//String ret = executeCommand(command);
String ret = httpsGet(url+"/run/"+deploymentID+"?media=xml");
if(ret.startsWith("<!DOCTYPE html>")){
return States.Unknown;
}
else{
SAXParserFactory parserFactor = SAXParserFactory.newInstance();
SAXParser parser = parserFactor.newSAXParser();
SAXStateHandler handler = new SAXStateHandler();
parser.parse(new ByteArrayInputStream(ret.getBytes(StandardCharsets.UTF_8)),handler);
return handler.state;
}
}
public HashMap<String,String> getDeploymentIPs(String deploymentID) throws Exception{
logger.info("Getting deployment ips for deploymentID: "+deploymentID);
logger.info("URL: "+url+"/run/"+deploymentID+"?media=xml");
String ret = httpsGet(url+"/run/"+deploymentID+"?media=xml");
if(ret.startsWith("<!DOCTYPE html>")){
return new HashMap<>();
}
else{
SAXParserFactory parserFactor = SAXParserFactory.newInstance();
SAXParser parser = parserFactor.newSAXParser();
SAXStateHandler handler = new SAXStateHandler();
parser.parse(new ByteArrayInputStream(ret.getBytes(StandardCharsets.UTF_8)),handler);
logger.info(handler.getIps());
return handler.getIps();
}
}
private String httpsGet(String urlLink) throws Exception {
TrustManager[] trustAllCerts = new TrustManager[] {
new X509TrustManager() {
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
public void checkClientTrusted(X509Certificate[] certs, String authType) {}
public void checkServerTrusted(X509Certificate[] certs, String authType) {}
}};
// Ignore differences between given hostname and certificate hostname
HostnameVerifier hv = new HostnameVerifier() {
public boolean verify(String hostname, SSLSession session) { return true; }
};
// Install the all-trusting trust manager
SSLContext sc = SSLContext.getInstance("SSL");
sc.init(null, trustAllCerts, new SecureRandom());
HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
HttpsURLConnection.setDefaultHostnameVerifier(hv);
SSLSocketFactory sslsocketfactory = sc.getSocketFactory();//(SSLSocketFactory) SSLSocketFactory.getDefault();
URL url1 = new URL(urlLink);
HttpsURLConnection conn = (HttpsURLConnection)url1.openConnection();
conn.setSSLSocketFactory(sslsocketfactory);
if(cookieAuth){
conn.addRequestProperty("Cookie", cookie.split(";", 1)[0]);
}
else{
String userpass = user + ":" + password;
String basicAuth = "Basic " + new String(new Base64().encode(userpass.getBytes()));
conn.setRequestProperty ("Authorization", basicAuth);
}
InputStream inputStream = conn.getInputStream();
return IOUtils.toString(inputStream, "UTF-8");
}
public void attachDisk(String deploymnetId, String type, String id, Integer gb) throws Exception {
logger.info("Attaching disk vm: "+type+"."+id+" from deployment: "+deploymnetId+" disk: "+ gb+"GB");
String[] command;
if(cookieAuth){
command = new String[] {};
}
else{
//command = new String[] {"curl", url+"/run/"+deploymnetId+"/"+type, "-d", "ids="+ids, "--user", user+":"+password,"-X", "DELETE", "-k", "-D", "-"};
command = new String[] {"ss-scale-disk", "--endpoint", url, "-u", user, "-p", password, "--attach", gb+"", deploymnetId, type, id};
}
Map<String, String> ret = executeCommand(command);
/*if(!ret.get("error").equals("")){
throw new Exception(ret.get("error"));
}*/
}
public void detachDisk(String deploymnetId, String type, String id, String diskId) throws Exception {
logger.info("Detaching disk vm: "+type+"."+id+" from deployment: "+deploymnetId+" disk id: "+ diskId);
String[] command;
if(cookieAuth){
command = new String[] {};
}
else{
//command = new String[] {"curl", url+"/run/"+deploymnetId+"/"+type, "-d", "ids="+ids, "--user", user+":"+password,"-X", "DELETE", "-k", "-D", "-"};
command = new String[] {"ss-scale-disk", "--endpoint", url, "-u", user, "-p", password, "--detach", diskId+"", deploymnetId, type, id};
}
Map<String, String> ret = executeCommand(command);
/*if(!ret.get("error").equals("")){
throw new Exception(ret.get("error"));
}*/
}
public void scaleVM(String deploymnetId, String type, String id, Integer cpu, Integer ram) throws Exception {
logger.info("Scaling vm: "+type+"."+id+" from deployment: "+deploymnetId+" new cpu: "+ cpu+" ram: "+ram);
String[] command;
if(cookieAuth){
command = new String[] {};
}
else{
//command = new String[] {"curl", url+"/run/"+deploymnetId+"/"+type, "-d", "ids="+ids, "--user", user+":"+password,"-X", "DELETE", "-k", "-D", "-"};
command = new String[] {"ss-scale-resize", "--endpoint", url, "-u", user, "-p", password, "--cpu", cpu+"","--ram",ram+"", deploymnetId, type, id};
}
Map<String, String> ret = executeCommand(command);
/*if(!ret.get("error").equals("")){
throw new Exception(ret.get("error"));
}*/
}
public void scaleVM(String deploymnetId, String type, String id, String flavor) throws Exception {
logger.info("Scaling vm: "+type+"."+id+" from deployment: "+deploymnetId+" new flavor: "+flavor);
String[] command;
if(cookieAuth){
command = new String[] {};
}
else{
//command = new String[] {"curl", url+"/run/"+deploymnetId+"/"+type, "-d", "ids="+ids, "--user", user+":"+password,"-X", "DELETE", "-k", "-D", "-"};
command = new String[] {"ss-scale-resize", "--endpoint", url, "-u", user, "-p", password, "--instance-type", flavor, deploymnetId, type, id};
}
Map<String, String> ret = executeCommand(command);
/*if(!ret.get("error").equals("")){
throw new Exception(ret.get("error"));
}*/
}
public void addVM(String deploymentId, String type, Integer number, Integer cores, Integer ram, Integer disk) throws Exception {
logger.info(String.format("Adding %d VMs of type %s to deployment %s of flavor (%d cores, %d RAM, %d disk)",
number,
type,
deploymentId,
cores,
ram,
disk));
String command;
if(cookieAuth) {
command=String.format("ss-node-add --cookie %s --endpoint %s %s %s %d %s",
cookieFile,
url,
deploymentId,
type,
number,
calculateFlavorParameter(cores, ram, disk));
} else {
command=String.format("ss-node-add -u %s -p %s --endpoint %s %s %s %d %s",
user,
password,
url,
deploymentId,
type,
number,
calculateFlavorParameter(cores, ram, disk));
}
Map<String, String> ret = executeCommand(command.split(" "));
logger.info("Returned: "+ret.toString());
}
// this method returns the necessary strings for flavor to be appended AS IS into ss-node-add command
private String calculateFlavorParameter(Integer cores, Integer ram, Integer disk) {
if(getConnectorName().contains("okeanos")) {
return "--runtime-parameter="+getConnectorName()+".instance.type:C"+cores+"R"+ram+"D"+disk+"drbd " +
"--runtime-parameter="+getConnectorName()+".cpu:"+cores+" "+
"--runtime-parameter="+getConnectorName()+".ram:"+ram+" ";
} else {
return "--runtime-parameter="+getConnectorName()+".cpu:"+cores+" --runtime-parameter="+getConnectorName()+".ram:"+ram;
}
}
// ss-node-add -u celar -p celar2015 --endpoint https://83.212.102.166 0e54569b-6b66-420d-b7c8-bcf247837445 Worker 1 --runtime-parameter okeanos.instance.type:C2R4096D20drbd
public String addVM(String deploymnetId, String type, Integer number) throws Exception {
logger.info("Adding "+number+" vms: "+type+" to deployment: "+deploymnetId);
String[] command;
if(cookieAuth){
command = new String[] {"curl", url+"/run/"+deploymnetId+"/"+type, "-d", "n="+number, "--cookie", cookie,"-X", "POST", "-H", "Content-Type: text/plain", "-k", "-D", "-"};
}
else{
command = new String[] {"curl", url+"/run/"+deploymnetId+"/"+type, "-d", "n="+number, "--user", user+":"+password,"-X", "POST", "-H", "Content-Type: text/plain", "-k", "-D", "-"};
}
Map<String, String> ret = executeCommand(command);
/*if(!ret.get("error").equals("")){
throw new Exception(ret.get("error"));
}*/
return ret.get("output");
}
public void removeVM(String deploymnetId, String type, String ids) throws Exception {
logger.info("Removing vm: "+type+"."+ids+" from deployment: "+deploymnetId);
String[] command;
if(cookieAuth){
command = new String[] {"curl", url+"/run/"+deploymnetId+"/"+type, "-d", "ids="+ids, "--cookie", cookie,"-X", "DELETE", "-k", "-D", "-"};
}
else{
//command = new String[] {"curl", url+"/run/"+deploymnetId+"/"+type, "-d", "ids="+ids, "--user", user+":"+password,"-X", "DELETE", "-k", "-D", "-"};
command = new String[] {"ss-node-remove", "--endpoint", url, "-u", user, "-p", password, deploymnetId, type, ids};
}
Map<String, String> ret = executeCommand(command);
/*if(!ret.get("error").equals("")){
throw new Exception(ret.get("error"));
}*/
}
/**
* Remove a number of VMs from a specific node type
* @param deploymnetId the unique Id of the deployment
* @param type the node type
* @param number the number of the VMs to be removed
* @throws Exception
*/
public void removeVM(String deploymnetId, String type, int number) throws Exception {
// logger.info("Removing vm: "+type+"."+ids+" from deployment: "+deploymnetId);
HashMap<String,String> ips = this.getDeploymentIPs(deploymnetId);
List<String> vmsToBeDeleted = new ArrayList<>(number);
for(String vm : ips.keySet()) {
if(vm.startsWith(type) && vmsToBeDeleted.size()<number) {
vmsToBeDeleted.add(vm.split(":")[0]); // keeping only the identifier, no the "hostname" keyword
}
}
String ids = "";
for(int i=0;i<number;i++) {
String id =vmsToBeDeleted.get(i);
String s = id.substring(id.indexOf('.')+1, id.length());
System.out.println(s);
ids+=s;
if(i!=number-1) {
ids+=" ";
}
}
this.removeVM(deploymnetId, type, ids);
}
public List<String> removeVMIDs(String deploymnetId, String type, int number) throws Exception {
// logger.info("Removing vm: "+type+"."+ids+" from deployment: "+deploymnetId);
HashMap<String,String> ips = this.getDeploymentIPs(deploymnetId);
List<String> vmsToBeDeleted = new ArrayList<>(number);
for(Entry<String,String> vm : ips.entrySet()) {
if(vm.getKey().startsWith(type) && vmsToBeDeleted.size()<number) {
vmsToBeDeleted.add(vm.getKey().split(":")[0]); // keeping only the identifier, no the "hostname" keyword
}
}
return vmsToBeDeleted;
}
public void removeVMswithIDs(String deploymnetId, List<String> vmsToBeDeleted, String type) throws Exception {
String ids = "";
int number = vmsToBeDeleted.size();
for(int i=0;i<number;i++) {
String id =vmsToBeDeleted.get(i);
String s = id.substring(id.indexOf('.')+1, id.length());
logger.info("ID: "+s);
ids+=s;
if(i!=number-1) {
ids+=" ";
}
}
this.removeVM(deploymnetId, type, ids);
}
public List<String> translateIPs(String deploymnetId, List<String> ids) throws Exception {
HashMap<String,String> ips = this.getDeploymentIPs(deploymnetId);
List<String> ret = new ArrayList<String>();
for(String key : ids) {
ret.add(ips.get(key+":hostname"));
}
return ret;
}
public void waitForReadyState(String deploymnetId) throws Exception {
logger.info("Waiting for ready state deploymentID: "+deploymnetId);
while(true){
States state = getDeploymentState(deploymnetId);
logger.info("Current State: "+state);
if(state.equals(States.Ready))
break;
Thread.sleep(10000);
}
}
public String createApplication(String appName, String appVersion) throws Exception {
ProjectModule project = new ProjectModule(appName);
Authz auth = new Authz(getUser(), project);
project.setAuthz(auth);
putModule(project);
return appName;
}
public String getImageReference(String imageName) throws Exception {
logger.info("Getting image reference: "+imageName);
String reference = baseImageReferences.get(imageName);
if(reference!=null)
return reference;
String projectName = user+"_images";
ProjectModule project = new ProjectModule(projectName);
Authz auth = new Authz(getUser(), project);
project.setAuthz(auth);
putModule(project);
reference = projectName+"/"+imageName;
ImageModule module = new ImageModule(reference);
module.setIsBase(true);
if(getConnectorName().equals("okeanos")){
module.setLoginUser("root");
}
else{ //specific image used by Dataplay
if(imageName.equals("af5194cf-f4e8-3213-baee-bd36c1c1c60b") || imageName.equals("092ffc8f-f494-35eb-96d5-c8035ba61b1f")) {
module.setLoginUser("centos");
module.setPlatform("centos");
} else {
module.setLoginUser("ubuntu");
module.setPlatform("ubuntu");
}
}
module.setDescription("Baseline Image "+imageName);
auth = new Authz(getUser(), module);
module.setAuthz(auth);
HashMap<String, String> imageIds = baseImages.get(imageName);
if(imageIds==null){
imageIds = new HashMap<String, String>();
imageIds.put(connectorName, imageName);
imageIds.put("okeanos", imageName);
//logger.error("No imageIDs for image with name: "+imageName);
//throw new Exception("No imageIDs for image with name: "+imageName);
}
Set<CloudImageIdentifier> cloudImageIdentifiers = new HashSet<CloudImageIdentifier>();
for(Entry<String, String> e : imageIds.entrySet()){
CloudImageIdentifier ident = new CloudImageIdentifier(module, e.getKey(), e.getValue());
cloudImageIdentifiers.add(ident);
}
module.setCloudImageIdentifiers(cloudImageIdentifiers );
for(ModuleParameter p : baseParameters){
module.setParameter(p);
}
putModule(module);
String ref = "module/"+reference;
baseImageReferences.put(imageName, ref);
return ref;
}
public Map<String,String> executeCommand(String[] command) throws IOException, InterruptedException {
String c="Executing command: ";
for (int i = 0; i < command.length; i++) {
c+=command[i]+" ";
}
logger.info(c);
StringBuffer output = new StringBuffer();
ProcessBuilder p = new ProcessBuilder(command);
Process p1 = p.start();
//Process p = Runtime.getRuntime().exec(command);
p1.waitFor();
Map<String,String> ret = new HashMap<String, String>();
BufferedReader reader = new BufferedReader(new InputStreamReader(p1.getInputStream()));
String line = "";
while ((line = reader.readLine())!= null) {
output.append(line + "\n");
}
logger.info("Command Output: "+output.toString());
ret.put("output", output.toString());
reader = new BufferedReader(new InputStreamReader(p1.getErrorStream()));
line = "";
output = new StringBuffer();
while ((line = reader.readLine())!= null) {
output.append(line + "\n");
}
logger.info("Command Error: "+output.toString());
ret.put("error", output.toString());
return ret;
}
public SlipStreamSSService(String user, String password, String url) throws ValidationException {
super();
logger.info("Init ssService user: "+user+" password: "+password+" url: "+url);
this.user = user;
this.password = password;
this.url = url;
this.connectorName="Flexiant";
init();
}
public SlipStreamSSService(String user, String password, String url, String connectorName) throws ValidationException {
super();
logger.info("Init ssService user: "+user+" password: "+password+" url: "+url);
this.user = user;
this.password = password;
this.url = url;
this.connectorName=connectorName;
init();
}
public SlipStreamSSService(String user, String cookie, String url, Boolean cookieAuth) throws Exception {
super();
this.user = user;
this.cookie = cookie;
writeCookie(cookie);
this.cookieAuth = true;
this.url = url;
init();
}
private void init() throws ValidationException{
baseImageReferences = new HashMap<String,String>();
baseImages = new HashMap<>();
HashMap<String,String> temp = new HashMap<String, String>();
temp.put(connectorName, "81aef2d3-0291-38ef-b53a-22fcd5418e60");
temp.put("okeanos", "fe31fced-a3cf-49c6-b43b-f58f5235ba45");
temp.put("stratuslab", "HZTKYZgX7XzSokCHMB60lS0wsiv");
baseImages.put("ubuntu-12.04", temp);
baseImages.put("Ubuntu 12.04.1 LTS", temp);
baseImages.put("Ubuntu12.04.1LTS", temp);
baseParameters = new ArrayList<ModuleParameter>();
/*String parameterName = "Flexiant.ram";
String description = "ram";
String value = "2048";
ModuleParameter parameter = new ModuleParameter(parameterName, value, description);
parameter.setCategory("Flexiant");
parameter.setDefaultValue("2048");
baseParameters.add(parameter);
parameterName = "Flexiant.cpu";
description = "cpu";
value = "2";
parameter = new ModuleParameter(parameterName, value, description);
parameter.setCategory("Flexiant");
parameter.setDefaultValue("2");
baseParameters.add(parameter);
parameterName = "okeanos.instance.type";
description = "Flavor";
value = "C2R2048D10ext_vlmc";
parameter = new ModuleParameter(parameterName, value, description);
parameter.setCategory("okeanos");
parameter.setDefaultValue("C2R2048D10ext_vlmc");
baseParameters.add(parameter);
parameterName = "okeanos.security.groups";
description = "Security Groups (comma separated list)";
value = "default";
parameter = new ModuleParameter(parameterName, value, description);
parameter.setCategory("okeanos");
parameter.setDefaultValue("default");
baseParameters.add(parameter);
parameterName = "ready";
description = "Server ready";
parameter = new ModuleParameter(parameterName, "", description);
parameter.setCategory("Output");
baseParameters.add(parameter);
parameterName = "loaded";
description = "Data loaded";
parameter = new ModuleParameter(parameterName, "", description);
parameter.setCategory("Output");
baseParameters.add(parameter);*/
}
public String patchExecuteScript(String script){
String jcatascopiaInit = "#!/bin/bash \n"
+ "ip=$(ss-get hostname) \n"
+ "hostname=$(hostname) \n"
+ "echo $ip $hostname >> /etc/hosts \n"
+ "SERVER_IP=$(ss-get orchestrator-"+connectorName+":hostname) \n"
+ "CELAR_REPO=http://snf-175960.vm.okeanos.grnet.gr \n"
+ "JC_VERSION=LATEST \n"
+ "JC_ARTIFACT=JCatascopia-Agent \n"
+ "JC_GROUP=eu.celarcloud.cloud-ms \n"
+ "JC_TYPE=tar.gz \n"
+ "DISTRO=$(eval cat /etc/*release) \n"
+ "if [[ \"$DISTRO\" == *Ubuntu* ]]; then \n"
+ " apt-get update -y \n"
+ " #download and install java \n"
+ " apt-get install -y openjdk-7-jre-headless \n"
+ "fi \n"
+ "if [[ \"$DISTRO\" == *CentOS* ]]; then \n"
+ " yum -y update \n"
+ " yum install -y wget \n"
+ " #download and install java \n"
+ " yum -y install java-1.7.0-openjdk \n"
+ "fi \n"
+ "#download,install and start jcatascopia agent... \n"
+ "URL=\"$CELAR_REPO/nexus/service/local/artifact/maven/redirect?r=snapshots&g=$JC_GROUP&a=$JC_ARTIFACT&v=$JC_VERSION&p=$JC_TYPE\" \n"
+ "wget -O JCatascopia-Agent.tar.gz $URL \n"
+ "tar xvfz JCatascopia-Agent.tar.gz \n"
+ "eval \"sed -i 's/server_ip=.*/server_ip=$SERVER_IP/g' JCatascopia-Agent-*/JCatascopiaAgentDir/resources/agent.properties\" \n"
+ "cd JCatascopia-Agent-* \n"
+ "./installer.sh \n"
+ "cd .. \n"
+ "/etc/init.d/JCatascopia-Agent restart \n";
return jcatascopiaInit+script;
}
public List<ModuleParameter> getOutputParamsFromScript(String script) throws ValidationException{
List<ModuleParameter> ret = new ArrayList<ModuleParameter>();
String[] s = script.split("ss-set ");
for (int i = 1; i < s.length; i++) {
String param = s[i].substring(0,s[i].indexOf(" "));
String parameterName = param;
String description = param;
ModuleParameter parameter = new ModuleParameter(parameterName, "", description);
parameter.setCategory("Output");
ret.add(parameter);
}
return ret;
}
public List<ModuleParameter> createFlavorParameters(String flavor)throws ValidationException{
List<ModuleParameter> ret = new ArrayList<ModuleParameter>();
String cpu="", ram="", disk="";
String[] fl = flavor.split(" ");
for (int i = 0; i < fl.length; i++) {
String[] f = fl[i].split(":");
switch (f[0]) {
case "vcpus":
cpu=f[1];
break;
case "ram":
ram=f[1];
break;
case "disk":
disk=f[1];
break;
default:
break;
}
}
String okeanosFlavor = "C"+cpu+"R"+ram+"D"+disk+"drbd";
logger.info("Okeanos flavor: "+okeanosFlavor);
String parameterName = "okeanos.instance.type";
String description = "Flavor";
String value = okeanosFlavor;
ModuleParameter parameter = new ModuleParameter(parameterName, value, description);
parameter.setCategory("okeanos");
parameter.setDefaultValue(okeanosFlavor);
ret.add(parameter);
parameterName = "okeanos.security.groups";
description = "Security Groups (comma separated list)";
value = "default";
parameter = new ModuleParameter(parameterName, value, description);
parameter.setCategory("okeanos");
parameter.setDefaultValue("default");
ret.add(parameter);
parameterName = connectorName+".ram";
description = "ram";
value = ram;
parameter = new ModuleParameter(parameterName, value, description);
parameter.setCategory(connectorName);
parameter.setDefaultValue(ram);
ret.add(parameter);
parameterName = connectorName+".cpu";
description = "cpu";
value = cpu;
parameter = new ModuleParameter(parameterName, value, description);
parameter.setCategory(connectorName);
parameter.setDefaultValue(cpu);
ret.add(parameter);
return ret;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getConnectorName() {
return connectorName;
}
public void setConnectorName(String connectorName) {
this.connectorName = connectorName;
}
public Map<String,String> getAllRuntimeParams(String deploymentId) throws Exception{
String ret= httpsGet(getUrl()+"/run/"+deploymentId+"?media=xml");
return parse(ret);
}
public void generateTargetScripts(HashMap<String, String> scripts,
Set<Target> targets) {
String preScale = new String(preScaleScript);
String postScale = new String(postScaleScript);
String postOnVmAdd = "";
String postOnVmRemove = "";
for(Entry<String, String> script: scripts.entrySet()){
if(script.getKey().contains("scaleOut") && script.getKey().contains("Pre")){
//preScale = preScale.replace("$remove", script.getValue());
}
else if(script.getKey().contains("scaleOut") && (script.getKey().contains("Post") || script.getKey().contains("Lifecycle"))){
postOnVmAdd = script.getValue();
}
else if(script.getKey().contains("scaleIn") && script.getKey().contains("Pre")){
preScale = preScale.replace("$remove", script.getValue());
}
else if(script.getKey().contains("scaleIn") && (script.getKey().contains("Post") || script.getKey().contains("Lifecycle"))){
postOnVmRemove = script.getValue();
}
else if(script.getKey().contains("vmResize") && script.getKey().contains("Pre")){
preScale = preScale.replace("$resize", script.getValue());
}
else if(script.getKey().contains("vmResize") && (script.getKey().contains("Post") || script.getKey().contains("Lifecycle"))){
postScale = postScale.replace("$resize", script.getValue());
}
else if(script.getKey().contains("attachDisk") && script.getKey().contains("Pre")){
preScale = preScale.replace("$disk_attach", script.getValue());
}
else if(script.getKey().contains("attachDisk") && (script.getKey().contains("Post") || script.getKey().contains("Lifecycle"))){
postScale = postScale.replace("$disk_attach", script.getValue());
}
else if(script.getKey().contains("detachDisk") && script.getKey().contains("Pre")){
preScale = preScale.replace("$disk_detach", script.getValue());
}
else if(script.getKey().contains("detachDisk") && (script.getKey().contains("Post") || script.getKey().contains("Lifecycle"))){
postScale = postScale.replace("$disk_detach", script.getValue());
}
}
postScale = postScale.replace("$resize", "");
postScale = postScale.replace("$disk_attach", "");
postScale = postScale.replace("$disk_detach", "");
preScale = preScale.replace("$remove", "");
preScale = preScale.replace("$resize", "");
preScale = preScale.replace("$disk_attach", "");
preScale = preScale.replace("$disk_detach", "");
logger.debug("---------------------------------------------preScale---------------------------------------------");
logger.debug(preScale);
logger.debug("---------------------------------------------postScale---------------------------------------------");
logger.debug(postScale);
logger.debug("---------------------------------------------postOnVmAdd---------------------------------------------");
logger.debug(postOnVmAdd);
logger.debug("---------------------------------------------postOnVmRemove---------------------------------------------");
logger.debug(postOnVmRemove);
Target postOnVmAddt = new Target(Target.ONVMADD_TARGET, postOnVmAdd);
targets.add(postOnVmAddt);
Target postOnVmRemovet = new Target(Target.ONVMREMOVE_TARGET, postOnVmRemove);
targets.add(postOnVmRemovet);
Target postScalet = new Target(Target.POSTSCALE_TARGET, postScale);
targets.add(postScalet);
Target preScalet = new Target(Target.PRESCALE_TARGET, preScale);
targets.add(preScalet);
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.resourcemanager.compute;
import com.azure.core.http.HttpPipeline;
import com.azure.resourcemanager.compute.models.CachingTypes;
import com.azure.resourcemanager.compute.models.Disk;
import com.azure.resourcemanager.compute.models.KnownLinuxVirtualMachineImage;
import com.azure.resourcemanager.compute.models.ProximityPlacementGroupType;
import com.azure.resourcemanager.compute.models.VirtualMachine;
import com.azure.resourcemanager.compute.models.VirtualMachineDataDisk;
import com.azure.resourcemanager.compute.models.VirtualMachineSizeTypes;
import com.azure.resourcemanager.network.models.LoadBalancer;
import com.azure.resourcemanager.network.models.LoadBalancerFrontend;
import com.azure.resourcemanager.network.models.LoadBalancerPublicFrontend;
import com.azure.resourcemanager.network.models.LoadBalancerSkuType;
import com.azure.resourcemanager.network.models.LoadBalancingRule;
import com.azure.resourcemanager.network.models.Network;
import com.azure.resourcemanager.network.models.PublicIpAddress;
import com.azure.resourcemanager.network.models.PublicIPSkuType;
import com.azure.resourcemanager.network.models.Subnet;
import com.azure.resourcemanager.network.models.TransportProtocol;
import com.azure.resourcemanager.resources.models.ResourceGroup;
import com.azure.resourcemanager.resources.fluentcore.arm.AvailabilityZoneId;
import com.azure.core.management.Region;
import com.azure.resourcemanager.resources.fluentcore.model.Creatable;
import com.azure.resourcemanager.resources.fluentcore.model.CreatedResources;
import java.util.Iterator;
import java.util.Map;
import com.azure.core.management.profile.AzureProfile;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
public class VirtualMachineAvailabilityZoneOperationsTests extends ComputeManagementTest {
private String rgName = "";
private final Region region = locationOrDefault(Region.US_EAST2);
private final String vmName = "javavm";
@Override
protected void initializeClients(HttpPipeline httpPipeline, AzureProfile profile) {
rgName = generateRandomResourceName("javacsmrg", 15);
super.initializeClients(httpPipeline, profile);
}
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
@Test
public void canCreateZonedVirtualMachineWithImplicitZoneForRelatedResources() throws Exception {
final String pipDnsLabel = generateRandomResourceName("pip", 10);
final String proxyGroupName = "plg1Test";
// Create a zoned virtual machine
//
VirtualMachine virtualMachine =
computeManager
.virtualMachines()
.define(vmName)
.withRegion(region)
.withNewResourceGroup(rgName)
.withNewPrimaryNetwork("10.0.0.0/28")
.withPrimaryPrivateIPAddressDynamic()
.withNewPrimaryPublicIPAddress(pipDnsLabel)
.withNewProximityPlacementGroup(proxyGroupName, ProximityPlacementGroupType.STANDARD)
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("Foo12")
.withSsh(sshPublicKey())
// Optionals
.withAvailabilityZone(AvailabilityZoneId.ZONE_1)
.withSize(VirtualMachineSizeTypes.fromString("Standard_D2a_v4"))
.withOSDiskCaching(CachingTypes.READ_WRITE)
// Create VM
.create();
// Checks the zone assigned to the virtual machine
//
Assertions.assertNotNull(virtualMachine.availabilityZones());
Assertions.assertFalse(virtualMachine.availabilityZones().isEmpty());
Assertions.assertTrue(virtualMachine.availabilityZones().contains(AvailabilityZoneId.ZONE_1));
// Check the proximity placement group information
Assertions.assertNotNull(virtualMachine.proximityPlacementGroup());
Assertions
.assertEquals(
ProximityPlacementGroupType.STANDARD,
virtualMachine.proximityPlacementGroup().proximityPlacementGroupType());
Assertions.assertNotNull(virtualMachine.proximityPlacementGroup().virtualMachineIds());
Assertions
.assertTrue(
virtualMachine
.id()
.equalsIgnoreCase(virtualMachine.proximityPlacementGroup().virtualMachineIds().get(0)));
// Checks the zone assigned to the implicitly created public IP address.
// Implicitly created PIP will be BASIC
//
PublicIpAddress publicIPAddress = virtualMachine.getPrimaryPublicIPAddress();
Assertions.assertNotNull(publicIPAddress.availabilityZones());
Assertions.assertFalse(publicIPAddress.availabilityZones().isEmpty());
Assertions.assertTrue(publicIPAddress.availabilityZones().contains(AvailabilityZoneId.ZONE_1));
// Checks the zone assigned to the implicitly created managed OS disk.
//
String osDiskId = virtualMachine.osDiskId(); // Only VM based on managed disk can have zone assigned
Assertions.assertNotNull(osDiskId);
Assertions.assertFalse(osDiskId.isEmpty());
Disk osDisk = computeManager.disks().getById(osDiskId);
Assertions.assertNotNull(osDisk);
// Checks the zone assigned to the implicitly created managed OS disk.
//
Assertions.assertNotNull(osDisk.availabilityZones());
Assertions.assertFalse(osDisk.availabilityZones().isEmpty());
Assertions.assertTrue(osDisk.availabilityZones().contains(AvailabilityZoneId.ZONE_1));
}
@Test
public void canCreateZonedVirtualMachineWithExplicitZoneForRelatedResources() throws Exception {
// Create zoned public IP for the virtual machine
//
final String pipDnsLabel = generateRandomResourceName("pip", 10);
PublicIpAddress publicIPAddress =
networkManager
.publicIpAddresses()
.define(pipDnsLabel)
.withRegion(region)
.withNewResourceGroup(rgName)
.withStaticIP()
// Optionals
.withAvailabilityZone(
AvailabilityZoneId.ZONE_1) // since the SKU is BASIC and VM is zoned, PIP must be zoned
.withSku(
PublicIPSkuType
.BASIC) // Basic sku is never zone resilient, so if you want it zoned, specify explicitly as
// above.
// Create PIP
.create();
// Create a zoned data disk for the virtual machine
//
final String diskName = generateRandomResourceName("dsk", 10);
Disk dataDisk =
computeManager
.disks()
.define(diskName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withData()
.withSizeInGB(100)
// Optionals
.withAvailabilityZone(AvailabilityZoneId.ZONE_1)
// Create Disk
.create();
// Create a zoned virtual machine
//
VirtualMachine virtualMachine =
computeManager
.virtualMachines()
.define(vmName)
.withRegion(region)
.withNewResourceGroup(rgName)
.withNewPrimaryNetwork("10.0.0.0/28")
.withPrimaryPrivateIPAddressDynamic()
.withExistingPrimaryPublicIPAddress(publicIPAddress)
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("Foo12")
.withSsh(sshPublicKey())
// Optionals
.withAvailabilityZone(AvailabilityZoneId.ZONE_1)
.withExistingDataDisk(dataDisk)
.withSize(VirtualMachineSizeTypes.fromString("Standard_D2a_v4"))
// Create VM
.create();
// Checks the zone assigned to the virtual machine
//
Assertions.assertNotNull(virtualMachine.availabilityZones());
Assertions.assertFalse(virtualMachine.availabilityZones().isEmpty());
Assertions.assertTrue(virtualMachine.availabilityZones().contains(AvailabilityZoneId.ZONE_1));
// Checks the zone assigned to the explicitly created public IP address.
//
publicIPAddress = virtualMachine.getPrimaryPublicIPAddress();
Assertions.assertNotNull(publicIPAddress.sku());
Assertions.assertTrue(publicIPAddress.sku().equals(PublicIPSkuType.BASIC));
Assertions.assertNotNull(publicIPAddress.availabilityZones());
Assertions.assertFalse(publicIPAddress.availabilityZones().isEmpty());
Assertions.assertTrue(publicIPAddress.availabilityZones().contains(AvailabilityZoneId.ZONE_1));
// Check the zone assigned to the explicitly created data disk
//
Map<Integer, VirtualMachineDataDisk> dataDisks = virtualMachine.dataDisks();
Assertions.assertNotNull(dataDisks);
Assertions.assertFalse(dataDisks.isEmpty());
VirtualMachineDataDisk dataDisk1 = dataDisks.values().iterator().next();
Assertions.assertNotNull(dataDisk1.id());
dataDisk = computeManager.disks().getById(dataDisk1.id());
Assertions.assertNotNull(dataDisk);
Assertions.assertNotNull(dataDisk.availabilityZones());
Assertions.assertFalse(dataDisk.availabilityZones().isEmpty());
Assertions.assertTrue(dataDisk.availabilityZones().contains(AvailabilityZoneId.ZONE_1));
// Checks the zone assigned to the implicitly created managed OS disk.
//
String osDiskId = virtualMachine.osDiskId(); // Only VM based on managed disk can have zone assigned
Assertions.assertNotNull(osDiskId);
Assertions.assertFalse(osDiskId.isEmpty());
Disk osDisk = computeManager.disks().getById(osDiskId);
Assertions.assertNotNull(osDisk);
// Checks the zone assigned to the implicitly created managed OS disk.
//
Assertions.assertNotNull(osDisk.availabilityZones());
Assertions.assertFalse(osDisk.availabilityZones().isEmpty());
Assertions.assertTrue(osDisk.availabilityZones().contains(AvailabilityZoneId.ZONE_1));
}
@Test
public void canCreateZonedVirtualMachineWithZoneResilientPublicIP() throws Exception {
// Create zone resilient public IP for the virtual machine
//
final String pipDnsLabel = generateRandomResourceName("pip", 10);
PublicIpAddress publicIPAddress =
networkManager
.publicIpAddresses()
.define(pipDnsLabel)
.withRegion(region)
.withNewResourceGroup(rgName)
.withStaticIP()
// Optionals
.withSku(
PublicIPSkuType
.STANDARD) // No zone selected, STANDARD SKU is zone resilient [zone resilient: resources
// deployed in all zones by the service and it will be served by all AZs all the
// time]
// Create PIP
.create();
// Create a zoned virtual machine
//
VirtualMachine virtualMachine =
computeManager
.virtualMachines()
.define(vmName)
.withRegion(region)
.withNewResourceGroup(rgName)
.withNewPrimaryNetwork("10.0.0.0/28")
.withPrimaryPrivateIPAddressDynamic()
.withExistingPrimaryPublicIPAddress(publicIPAddress)
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("Foo12")
.withSsh(sshPublicKey())
// Optionals
.withAvailabilityZone(AvailabilityZoneId.ZONE_1)
.withSize(VirtualMachineSizeTypes.fromString("Standard_D2a_v4"))
// Create VM
.create();
// Checks the zone assigned to the virtual machine
//
Assertions.assertNotNull(virtualMachine.availabilityZones());
Assertions.assertFalse(virtualMachine.availabilityZones().isEmpty());
Assertions.assertTrue(virtualMachine.availabilityZones().contains(AvailabilityZoneId.ZONE_1));
// Check the zone resilient PIP
//
publicIPAddress = virtualMachine.getPrimaryPublicIPAddress();
Assertions.assertNotNull(publicIPAddress.sku());
Assertions.assertTrue(publicIPAddress.sku().equals(PublicIPSkuType.STANDARD));
Assertions
.assertNotNull(
publicIPAddress
.availabilityZones()); // Though zone-resilient, this property won't be populated by the service.
Assertions.assertTrue(publicIPAddress.availabilityZones().isEmpty());
}
@Test
@Disabled("Though valid scenario, ignoring it due to network service bug")
@SuppressWarnings("unchecked")
public void
canCreateRegionalNonAvailSetVirtualMachinesAndAssociateThemWithSingleBackendPoolOfZoneResilientLoadBalancer()
throws Exception {
final String networkName = generateRandomResourceName("net", 10);
Network network =
networkManager
.networks()
.define(networkName)
.withRegion(region)
.withNewResourceGroup(rgName)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/29")
.withSubnet("subnet2", "10.0.0.8/29")
.create();
// create two regional virtual machine, which does not belongs to any availability set
//
Iterator<Subnet> subnets = network.subnets().values().iterator();
// Define first regional virtual machine
//
Creatable<VirtualMachine> creatableVM1 =
computeManager
.virtualMachines()
.define(generateRandomResourceName("vm1", 10))
.withRegion(region)
.withExistingResourceGroup(rgName)
.withExistingPrimaryNetwork(network)
.withSubnet(subnets.next().name()) // Put VM in first subnet
.withPrimaryPrivateIPAddressDynamic()
.withoutPrimaryPublicIPAddress()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("Foo12")
.withSsh(sshPublicKey())
// Optionals
.withSize(VirtualMachineSizeTypes.fromString("Standard_D2a_v4"));
// Define second regional virtual machine
//
Creatable<VirtualMachine> creatableVM2 =
computeManager
.virtualMachines()
.define(generateRandomResourceName("vm2", 10))
.withRegion(region)
.withExistingResourceGroup(rgName)
.withExistingPrimaryNetwork(network)
.withSubnet(subnets.next().name()) // Put VM in second subnet
.withPrimaryPrivateIPAddressDynamic()
.withoutPrimaryPublicIPAddress()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("Foo12")
.withSsh(sshPublicKey())
// Optionals
.withSize(VirtualMachineSizeTypes.fromString("Standard_D2a_v4"));
CreatedResources<VirtualMachine> createdVMs =
computeManager.virtualMachines().create(creatableVM1, creatableVM2);
VirtualMachine firstVirtualMachine = createdVMs.get(creatableVM1.key());
VirtualMachine secondVirtualMachine = createdVMs.get(creatableVM2.key());
// Work around bug in the network service
// Once the fix is deployed remove below code to powerOff and deallocate VMs
//
// Completable completable1 =
// firstVirtualMachine.powerOffAsync().concatWith(firstVirtualMachine.deallocateAsync());
// Completable completable2 =
// secondVirtualMachine.powerOffAsync().concatWith(secondVirtualMachine.deallocateAsync());
// Completable.merge(completable1, completable2).await();
// Creates a public IP address for the internet-facing load-balancer
//
final String pipDnsLabel = generateRandomResourceName("pip", 10);
PublicIpAddress publicIPAddress =
networkManager
.publicIpAddresses()
.define(pipDnsLabel)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withStaticIP()
// Optionals
.withSku(PublicIPSkuType.STANDARD) // STANDARD LB requires STANDARD PIP
// Create PIP
.create();
// Creates a Internet-Facing LoadBalancer with one front-end IP configuration and
// two backend pool associated with this IP Config
//
final String lbName = generateRandomResourceName("lb", 10);
LoadBalancer lb =
this
.networkManager
.loadBalancers()
.define(lbName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.defineLoadBalancingRule("rule-1")
.withProtocol(TransportProtocol.TCP)
.fromFrontend("front-end-1")
.fromFrontendPort(80)
.toExistingVirtualMachines(firstVirtualMachine, secondVirtualMachine)
.withProbe("tcpProbe-1")
.attach()
.definePublicFrontend("front-end-1") // Define the frontend IP configuration used by the LB rule
.withExistingPublicIpAddress(publicIPAddress)
.attach()
.defineTcpProbe("tcpProbe-1") // Define the Probe used by the LB rule
.withPort(25)
.withIntervalInSeconds(15)
.withNumberOfProbes(5)
.attach()
.withSku(
LoadBalancerSkuType
.STANDARD) // "zone-resilient LB" which don't have the constraint that all VMs needs to be in
// the same availability set
.create();
// Zone resilient LB does not care VMs are zoned or regional, in the above cases VMs are regional.
//
// rx.Completable.merge(firstVirtualMachine.startAsync(), secondVirtualMachine.startAsync()).await();
// Verify frontends
Assertions.assertEquals(1, lb.frontends().size());
Assertions.assertEquals(1, lb.publicFrontends().size());
Assertions.assertEquals(0, lb.privateFrontends().size());
LoadBalancerFrontend frontend = lb.frontends().values().iterator().next();
Assertions.assertTrue(frontend.isPublic());
LoadBalancerPublicFrontend publicFrontend = (LoadBalancerPublicFrontend) frontend;
Assertions.assertTrue(publicIPAddress.id().equalsIgnoreCase(publicFrontend.publicIpAddressId()));
// Verify backends
Assertions.assertEquals(1, lb.backends().size());
// Verify probes
Assertions.assertEquals(1, lb.tcpProbes().size());
Assertions.assertTrue(lb.tcpProbes().containsKey("tcpProbe-1"));
// Verify rules
Assertions.assertEquals(1, lb.loadBalancingRules().size());
Assertions.assertTrue(lb.loadBalancingRules().containsKey("rule-1"));
LoadBalancingRule rule = lb.loadBalancingRules().get("rule-1");
Assertions.assertNotNull(rule.backend());
Assertions.assertTrue(rule.probe().name().equalsIgnoreCase("tcpProbe-1"));
// Note that above configuration is not possible for BASIC LB, BASIC LB has following limitation
// It supports VMs only in a single availability Set in a backend pool, though multiple backend pool
// can be associated with VMs in the single availability set, you cannot create a set of VMs in another
// availability set and put it in a different backend pool.
// Start the VM [This can be removed once the fix is deployed]
// rx.Completable.merge(firstVirtualMachine.startAsync(), secondVirtualMachine.startAsync()).await();
}
@Test
@Disabled("Though valid scenario, ignoring it due to network service bug")
@SuppressWarnings("unchecked")
public void canCreateZonedVirtualMachinesAndAssociateThemWithSingleBackendPoolOfZoneResilientLoadBalancer()
throws Exception {
final String networkName = generateRandomResourceName("net", 10);
Network network =
networkManager
.networks()
.define(networkName)
.withRegion(region)
.withNewResourceGroup(rgName)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/29")
.withSubnet("subnet2", "10.0.0.8/29")
.create();
// create two regional virtual machine, which does not belongs to any availability set
//
Iterator<Subnet> subnets = network.subnets().values().iterator();
// Define first regional virtual machine
//
Creatable<VirtualMachine> creatableVM1 =
computeManager
.virtualMachines()
.define(generateRandomResourceName("vm1", 10))
.withRegion(region)
.withExistingResourceGroup(rgName)
.withExistingPrimaryNetwork(network)
.withSubnet(subnets.next().name()) // Put VM in first subnet
.withPrimaryPrivateIPAddressDynamic()
.withoutPrimaryPublicIPAddress()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("Foo12")
.withSsh(sshPublicKey())
.withAvailabilityZone(AvailabilityZoneId.ZONE_1)
// Optionals
.withSize(VirtualMachineSizeTypes.fromString("Standard_D2a_v4"));
// Define second regional virtual machine
//
Creatable<VirtualMachine> creatableVM2 =
computeManager
.virtualMachines()
.define(generateRandomResourceName("vm2", 10))
.withRegion(region)
.withExistingResourceGroup(rgName)
.withExistingPrimaryNetwork(network)
.withSubnet(subnets.next().name()) // Put VM in second subnet
.withPrimaryPrivateIPAddressDynamic()
.withoutPrimaryPublicIPAddress()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("Foo12")
.withSsh(sshPublicKey())
.withAvailabilityZone(AvailabilityZoneId.ZONE_1)
// Optionals
.withSize(VirtualMachineSizeTypes.fromString("Standard_D2a_v4"));
CreatedResources<VirtualMachine> createdVMs =
computeManager.virtualMachines().create(creatableVM1, creatableVM2);
// Creates a public IP address for the internet-facing load-balancer
//
final String pipDnsLabel = generateRandomResourceName("pip", 10);
PublicIpAddress publicIPAddress =
networkManager
.publicIpAddresses()
.define(pipDnsLabel)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withStaticIP()
// Optionals
.withSku(PublicIPSkuType.STANDARD) // STANDARD LB requires STANDARD PIP
// Create PIP
.create();
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().getByName(network.resourceGroupName());
// Creates a Internet-Facing LoadBalancer with one front-end IP configuration and
// two backend pool associated with this IP Config
//
final String loadBalancerName = generateRandomResourceName("extlb" + "1" + "-", 18);
final String publicIPName = "pip-" + loadBalancerName;
final String frontendName = loadBalancerName + "-FE1";
// Sku of PublicIP and LoadBalancer must match
//
PublicIpAddress lbPip =
this
.networkManager
.publicIpAddresses()
.define(publicIPName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withLeafDomainLabel(publicIPName)
// Optionals
.withStaticIP()
.withSku(PublicIPSkuType.STANDARD)
// Create
.create();
LoadBalancer loadBalancer =
this
.networkManager
.loadBalancers()
.define(loadBalancerName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
// Add two rules that uses above backend and probe
.defineLoadBalancingRule("httpRule")
.withProtocol(TransportProtocol.TCP)
.fromFrontend(frontendName)
.fromFrontendPort(80)
.toExistingVirtualMachines(createdVMs.get(creatableVM1.key()))
.withProbe("httpProbe")
.attach()
.defineLoadBalancingRule("httpsRule")
.withProtocol(TransportProtocol.TCP)
.fromFrontend(frontendName)
.fromFrontendPort(443)
.toExistingVirtualMachines(createdVMs.get(creatableVM2.key()))
.withProbe("httpsProbe")
.attach()
// Explicitly define the frontend
.definePublicFrontend(frontendName)
.withExistingPublicIpAddress(publicIPAddress) // Frontend with PIP means internet-facing load-balancer
.attach()
// Add two probes one per rule
.defineHttpProbe("httpProbe")
.withRequestPath("/")
.attach()
.defineHttpProbe("httpsProbe")
.withRequestPath("/")
.attach()
.withSku(LoadBalancerSkuType.STANDARD)
.create();
// Zone resilient LB does not care VMs are zoned or regional, in the above cases VMs are zoned.
//
// Note that above configuration is not possible for BASIC LB, BASIC LB has following limitation
// It supports VMs only in a single availability Set in a backend pool, though multiple backend pool
// can be associated with VMs in the single availability set, you cannot create a set of VMs in another
// availability set and put it in a different backend pool.
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.operators.sort;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.flink.api.common.functions.FlatJoinFunction;
import org.apache.flink.api.common.typeutils.TypeComparator;
import org.apache.flink.api.common.typeutils.TypePairComparator;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.core.memory.MemorySegment;
import org.apache.flink.runtime.io.disk.iomanager.IOManager;
import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
import org.apache.flink.runtime.memorymanager.MemoryAllocationException;
import org.apache.flink.runtime.memorymanager.MemoryManager;
import org.apache.flink.runtime.operators.resettable.BlockResettableIterator;
import org.apache.flink.runtime.operators.resettable.SpillingResettableIterator;
import org.apache.flink.runtime.operators.util.JoinTaskIterator;
import org.apache.flink.runtime.util.KeyGroupedIterator;
import org.apache.flink.util.Collector;
import org.apache.flink.util.MutableObjectIterator;
/**
* An implementation of the {@link JoinTaskIterator} that realizes the
* matching through a sort-merge join strategy.
*/
public class MergeMatchIterator<T1, T2, O> implements JoinTaskIterator<T1, T2, O> {
/**
* The log used by this iterator to log messages.
*/
private static final Logger LOG = LoggerFactory.getLogger(MergeMatchIterator.class);
// --------------------------------------------------------------------------------------------
private TypePairComparator<T1, T2> comp;
private KeyGroupedIterator<T1> iterator1;
private KeyGroupedIterator<T2> iterator2;
private final TypeSerializer<T1> serializer1;
private final TypeSerializer<T2> serializer2;
private T1 copy1;
private T1 spillHeadCopy;
private T2 copy2;
private T2 blockHeadCopy;
private final BlockResettableIterator<T2> blockIt; // for N:M cross products with same key
private final List<MemorySegment> memoryForSpillingIterator;
private final MemoryManager memoryManager;
private final IOManager ioManager;
// --------------------------------------------------------------------------------------------
public MergeMatchIterator(MutableObjectIterator<T1> input1, MutableObjectIterator<T2> input2,
TypeSerializer<T1> serializer1, TypeComparator<T1> comparator1,
TypeSerializer<T2> serializer2, TypeComparator<T2> comparator2, TypePairComparator<T1, T2> pairComparator,
MemoryManager memoryManager, IOManager ioManager, int numMemoryPages, AbstractInvokable parentTask)
throws MemoryAllocationException
{
if (numMemoryPages < 2) {
throw new IllegalArgumentException("Merger needs at least 2 memory pages.");
}
this.comp = pairComparator;
this.serializer1 = serializer1;
this.serializer2 = serializer2;
this.copy1 = serializer1.createInstance();
this.spillHeadCopy = serializer1.createInstance();
this.copy2 = serializer2.createInstance();
this.blockHeadCopy = serializer2.createInstance();
this.memoryManager = memoryManager;
this.ioManager = ioManager;
this.iterator1 = new KeyGroupedIterator<T1>(input1, this.serializer1, comparator1.duplicate());
this.iterator2 = new KeyGroupedIterator<T2>(input2, this.serializer2, comparator2.duplicate());
final int numPagesForSpiller = numMemoryPages > 20 ? 2 : 1;
this.blockIt = new BlockResettableIterator<T2>(this.memoryManager, this.serializer2,
(numMemoryPages - numPagesForSpiller), parentTask);
this.memoryForSpillingIterator = memoryManager.allocatePages(parentTask, numPagesForSpiller);
}
@Override
public void open() throws IOException {}
@Override
public void close() {
if (this.blockIt != null) {
try {
this.blockIt.close();
}
catch (Throwable t) {
LOG.error("Error closing block memory iterator: " + t.getMessage(), t);
}
}
this.memoryManager.release(this.memoryForSpillingIterator);
}
@Override
public void abort() {
close();
}
/**
* Calls the <code>JoinFunction#match()</code> method for all two key-value pairs that share the same key and come
* from different inputs. The output of the <code>match()</code> method is forwarded.
* <p>
* This method first zig-zags between the two sorted inputs in order to find a common
* key, and then calls the match stub with the cross product of the values.
*
* @throws Exception Forwards all exceptions from the user code and the I/O system.
*
* @see org.apache.flink.runtime.operators.util.JoinTaskIterator#callWithNextKey(FlatJoinFunction, Collector)
*/
@Override
public boolean callWithNextKey(final FlatJoinFunction<T1, T2, O> matchFunction, final Collector<O> collector)
throws Exception
{
if (!this.iterator1.nextKey() || !this.iterator2.nextKey()) {
// consume all remaining keys (hack to prevent remaining inputs during iterations, lets get rid of this soon)
while (this.iterator1.nextKey());
while (this.iterator2.nextKey());
return false;
}
final TypePairComparator<T1, T2> comparator = this.comp;
comparator.setReference(this.iterator1.getCurrent());
T2 current2 = this.iterator2.getCurrent();
// zig zag
while (true) {
// determine the relation between the (possibly composite) keys
final int comp = comparator.compareToReference(current2);
if (comp == 0) {
break;
}
if (comp < 0) {
if (!this.iterator2.nextKey()) {
return false;
}
current2 = this.iterator2.getCurrent();
}
else {
if (!this.iterator1.nextKey()) {
return false;
}
comparator.setReference(this.iterator1.getCurrent());
}
}
// here, we have a common key! call the match function with the cross product of the
// values
final KeyGroupedIterator<T1>.ValuesIterator values1 = this.iterator1.getValues();
final KeyGroupedIterator<T2>.ValuesIterator values2 = this.iterator2.getValues();
final T1 firstV1 = values1.next();
final T2 firstV2 = values2.next();
final boolean v1HasNext = values1.hasNext();
final boolean v2HasNext = values2.hasNext();
// check if one side is already empty
// this check could be omitted if we put this in MatchTask.
// then we can derive the local strategy (with build side).
if (v1HasNext) {
if (v2HasNext) {
// both sides contain more than one value
// TODO: Decide which side to spill and which to block!
crossMwithNValues(firstV1, values1, firstV2, values2, matchFunction, collector);
} else {
crossSecond1withNValues(firstV2, firstV1, values1, matchFunction, collector);
}
} else {
if (v2HasNext) {
crossFirst1withNValues(firstV1, firstV2, values2, matchFunction, collector);
} else {
// both sides contain only one value
matchFunction.join(firstV1, firstV2, collector);
}
}
return true;
}
/**
* Crosses a single value from the first input with N values, all sharing a common key.
* Effectively realizes a <i>1:N</i> match (join).
*
* @param val1 The value form the <i>1</i> side.
* @param firstValN The first of the values from the <i>N</i> side.
* @param valsN Iterator over remaining <i>N</i> side values.
*
* @throws Exception Forwards all exceptions thrown by the stub.
*/
private void crossFirst1withNValues(final T1 val1, final T2 firstValN,
final Iterator<T2> valsN, final FlatJoinFunction<T1, T2, O> matchFunction, final Collector<O> collector)
throws Exception
{
this.copy1 = this.serializer1.copy(val1, this.copy1);
matchFunction.join(this.copy1, firstValN, collector);
// set copy and match first element
boolean more = true;
do {
final T2 nRec = valsN.next();
if (valsN.hasNext()) {
this.copy1 = this.serializer1.copy(val1, this.copy1);
matchFunction.join(this.copy1, nRec, collector);
} else {
matchFunction.join(val1, nRec, collector);
more = false;
}
}
while (more);
}
/**
* Crosses a single value from the second side with N values, all sharing a common key.
* Effectively realizes a <i>N:1</i> match (join).
*
* @param val1 The value form the <i>1</i> side.
* @param firstValN The first of the values from the <i>N</i> side.
* @param valsN Iterator over remaining <i>N</i> side values.
*
* @throws Exception Forwards all exceptions thrown by the stub.
*/
private void crossSecond1withNValues(T2 val1, T1 firstValN,
Iterator<T1> valsN, FlatJoinFunction<T1, T2, O> matchFunction, Collector<O> collector)
throws Exception
{
this.copy2 = this.serializer2.copy(val1, this.copy2);
matchFunction.join(firstValN, this.copy2, collector);
// set copy and match first element
boolean more = true;
do {
final T1 nRec = valsN.next();
if (valsN.hasNext()) {
this.copy2 = this.serializer2.copy(val1, this.copy2);
matchFunction.join(nRec,this.copy2,collector);
} else {
matchFunction.join(nRec, val1, collector);
more = false;
}
}
while (more);
}
/**
* @param firstV1
* @param spillVals
* @param firstV2
* @param blockVals
*/
private void crossMwithNValues(final T1 firstV1, Iterator<T1> spillVals,
final T2 firstV2, final Iterator<T2> blockVals,
final FlatJoinFunction<T1, T2, O> matchFunction, final Collector<O> collector)
throws Exception
{
// ==================================================
// We have one first (head) element from both inputs (firstV1 and firstV2)
// We have an iterator for both inputs.
// we make the V1 side the spilling side and the V2 side the blocking side.
// In order to get the full cross product without unnecessary spilling, we do the
// following:
// 1) cross the heads
// 2) cross the head of the spilling side against the first block of the blocking side
// 3) cross the iterator of the spilling side with the head of the block side
// 4) cross the iterator of the spilling side with the first block
// ---------------------------------------------------
// If the blocking side has more than one block, we really need to make the spilling side fully
// resettable. For each further block on the block side, we do:
// 5) cross the head of the spilling side with the next block
// 6) cross the spilling iterator with the next block.
// match the first values first
this.copy1 = this.serializer1.copy(firstV1, this.copy1);
this.blockHeadCopy = this.serializer2.copy(firstV2, this.blockHeadCopy);
// --------------- 1) Cross the heads -------------------
matchFunction.join(this.copy1, firstV2, collector);
// for the remaining values, we do a block-nested-loops join
SpillingResettableIterator<T1> spillIt = null;
try {
// create block iterator on the second input
this.blockIt.reopen(blockVals);
// ------------- 2) cross the head of the spilling side with the first block ------------------
while (this.blockIt.hasNext()) {
final T2 nextBlockRec = this.blockIt.next();
this.copy1 = this.serializer1.copy(firstV1, this.copy1);
matchFunction.join(this.copy1, nextBlockRec, collector);
}
this.blockIt.reset();
// spilling is required if the blocked input has data beyond the current block.
// in that case, create the spilling iterator
final Iterator<T1> leftSideIter;
final boolean spillingRequired = this.blockIt.hasFurtherInput();
if (spillingRequired)
{
// more data than would fit into one block. we need to wrap the other side in a spilling iterator
// create spilling iterator on first input
spillIt = new SpillingResettableIterator<T1>(spillVals, this.serializer1,
this.memoryManager, this.ioManager, this.memoryForSpillingIterator);
leftSideIter = spillIt;
spillIt.open();
this.spillHeadCopy = this.serializer1.copy(firstV1, this.spillHeadCopy);
}
else {
leftSideIter = spillVals;
}
// cross the values in the v1 iterator against the current block
while (leftSideIter.hasNext()) {
final T1 nextSpillVal = leftSideIter.next();
this.copy1 = this.serializer1.copy(nextSpillVal, this.copy1);
// -------- 3) cross the iterator of the spilling side with the head of the block side --------
this.copy2 = this.serializer2.copy(this.blockHeadCopy, this.copy2);
matchFunction.join(this.copy1, this.copy2, collector);
// -------- 4) cross the iterator of the spilling side with the first block --------
while (this.blockIt.hasNext()) {
T2 nextBlockRec = this.blockIt.next();
// get instances of key and block value
this.copy1 = this.serializer1.copy(nextSpillVal, this.copy1);
matchFunction.join(this.copy1, nextBlockRec, collector);
}
// reset block iterator
this.blockIt.reset();
}
// if everything from the block-side fit into a single block, we are done.
// note that in this special case, we did not create a spilling iterator at all
if (!spillingRequired) {
return;
}
// here we are, because we have more blocks on the block side
// loop as long as there are blocks from the blocked input
while (this.blockIt.nextBlock())
{
// rewind the spilling iterator
spillIt.reset();
// ------------- 5) cross the head of the spilling side with the next block ------------
while (this.blockIt.hasNext()) {
this.copy1 = this.serializer1.copy(this.spillHeadCopy, this.copy1);
final T2 nextBlockVal = blockIt.next();
matchFunction.join(this.copy1, nextBlockVal, collector);
}
this.blockIt.reset();
// -------- 6) cross the spilling iterator with the next block. ------------------
while (spillIt.hasNext())
{
// get value from resettable iterator
final T1 nextSpillVal = spillIt.next();
// cross value with block values
while (this.blockIt.hasNext()) {
// get instances of key and block value
final T2 nextBlockVal = this.blockIt.next();
this.copy1 = this.serializer1.copy(nextSpillVal, this.copy1);
matchFunction.join(this.copy1, nextBlockVal, collector);
}
// reset block iterator
this.blockIt.reset();
}
// reset v1 iterator
spillIt.reset();
}
}
finally {
if (spillIt != null) {
this.memoryForSpillingIterator.addAll(spillIt.close());
}
}
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.Http2CodecUtil.CONNECTION_STREAM_ID;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_PRIORITY_WEIGHT;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_WINDOW_SIZE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import io.netty.handler.codec.http2.Http2FrameWriter.Configuration;
import io.netty.util.collection.IntObjectHashMap;
import io.netty.util.collection.IntObjectMap;
import java.util.Arrays;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
/**
* Tests for {@link DefaultHttp2RemoteFlowController}.
*/
public class DefaultHttp2RemoteFlowControllerTest {
private static final int STREAM_A = 1;
private static final int STREAM_B = 3;
private static final int STREAM_C = 5;
private static final int STREAM_D = 7;
private static final int STREAM_E = 9;
private DefaultHttp2RemoteFlowController controller;
@Mock
private ByteBuf buffer;
@Mock
private Http2FrameWriter frameWriter;
@Mock
private Http2FrameSizePolicy frameWriterSizePolicy;
@Mock
private Configuration frameWriterConfiguration;
@Mock
private ChannelHandlerContext ctx;
@Mock
private ChannelPromise promise;
private DefaultHttp2Connection connection;
@Before
public void setup() throws Http2Exception {
MockitoAnnotations.initMocks(this);
when(ctx.newPromise()).thenReturn(promise);
connection = new DefaultHttp2Connection(false);
controller = new DefaultHttp2RemoteFlowController(connection, frameWriter);
connection.local().createStream(STREAM_A).open(false);
connection.local().createStream(STREAM_B).open(false);
Http2Stream streamC = connection.local().createStream(STREAM_C).open(false);
Http2Stream streamD = connection.local().createStream(STREAM_D).open(false);
streamC.setPriority(STREAM_A, DEFAULT_PRIORITY_WEIGHT, false);
streamD.setPriority(STREAM_A, DEFAULT_PRIORITY_WEIGHT, false);
resetFrameWriter();
}
@Test
public void initialWindowSizeShouldOnlyChangeStreams() throws Http2Exception {
controller.initialWindowSize(0);
assertEquals(DEFAULT_WINDOW_SIZE, window(CONNECTION_STREAM_ID));
assertEquals(0, window(STREAM_A));
assertEquals(0, window(STREAM_B));
assertEquals(0, window(STREAM_C));
assertEquals(0, window(STREAM_D));
}
@Test
public void windowUpdateShouldChangeConnectionWindow() throws Http2Exception {
incrementWindowSize(CONNECTION_STREAM_ID, 100);
assertEquals(DEFAULT_WINDOW_SIZE + 100, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
}
@Test
public void windowUpdateShouldChangeStreamWindow() throws Http2Exception {
incrementWindowSize(STREAM_A, 100);
assertEquals(DEFAULT_WINDOW_SIZE, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE + 100, window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
}
@Test
public void frameShouldBeSentImmediately() throws Http2Exception {
final ByteBuf data = dummyData(5, 5);
try {
send(STREAM_A, data.slice(0, 5), 5);
verifyWrite(STREAM_A, data.slice(0, 5), 5);
assertEquals(1, data.refCnt());
} finally {
manualSafeRelease(data);
}
}
@Test
public void lastWriteFutureShouldBeSaved() throws Http2Exception {
ChannelPromise promise2 = Mockito.mock(ChannelPromise.class);
final ByteBuf data = dummyData(5, 5);
try {
// Write one frame.
Http2Stream stream = stream(STREAM_A);
ChannelFuture future1 = controller.sendFlowControlledFrame(ctx, stream, data, 0, false, promise);
assertEquals(future1, controller.lastFlowControlledFrameSent(stream));
// Now write another and verify that the last write is updated.
ChannelFuture future2 = controller.sendFlowControlledFrame(ctx, stream, data, 0, false, promise2);
assertNotSame(future1, future2);
assertEquals(future2, controller.lastFlowControlledFrameSent(stream));
} finally {
manualSafeRelease(data);
}
}
@Test
public void frameLargerThanMaxFrameSizeShouldBeSplit() throws Http2Exception {
when(frameWriterSizePolicy.maxFrameSize()).thenReturn(3);
final ByteBuf data = dummyData(5, 0);
try {
send(STREAM_A, data.copy(), 5);
verifyWrite(STREAM_A, data.slice(0, 3), 0);
verifyWrite(STREAM_A, data.slice(3, 2), 1);
verifyWrite(STREAM_A, Unpooled.EMPTY_BUFFER, 3);
verifyWrite(STREAM_A, Unpooled.EMPTY_BUFFER, 1);
} finally {
manualSafeRelease(data);
}
}
@Test
public void emptyFrameShouldBeSentImmediately() throws Http2Exception {
send(STREAM_A, Unpooled.EMPTY_BUFFER, 0);
verifyWrite(STREAM_A, Unpooled.EMPTY_BUFFER, 0);
}
@Test
public void frameShouldSplitForMaxFrameSize() throws Http2Exception {
when(frameWriterSizePolicy.maxFrameSize()).thenReturn(5);
final ByteBuf data = dummyData(10, 0);
try {
ByteBuf slice1 = data.slice(0, 5);
ByteBuf slice2 = data.slice(5, 5);
send(STREAM_A, data.slice(), 0);
verifyWrite(STREAM_A, slice1, 0);
verifyWrite(STREAM_A, slice2, 0);
assertEquals(2, data.refCnt());
} finally {
manualSafeRelease(data);
}
}
@Test
public void stalledStreamShouldQueueFrame() throws Http2Exception {
controller.initialWindowSize(0);
final ByteBuf data = dummyData(10, 5);
try {
send(STREAM_A, data.slice(0, 10), 5);
verifyNoWrite(STREAM_A);
assertEquals(1, data.refCnt());
} finally {
manualSafeRelease(data);
}
}
@Test
public void frameShouldSplit() throws Http2Exception {
controller.initialWindowSize(5);
final ByteBuf data = dummyData(5, 5);
try {
send(STREAM_A, data.slice(0, 5), 5);
// Verify that a partial frame of 5 was sent.
ArgumentCaptor<ByteBuf> argument = ArgumentCaptor.forClass(ByteBuf.class);
// None of the padding should be sent in the frame.
captureWrite(STREAM_A, argument, 0, false);
final ByteBuf writtenBuf = argument.getValue();
assertEquals(5, writtenBuf.readableBytes());
assertEquals(data.slice(0, 5), writtenBuf);
assertEquals(2, writtenBuf.refCnt());
assertEquals(2, data.refCnt());
} finally {
manualSafeRelease(data);
}
}
@Test
public void frameShouldSplitPadding() throws Http2Exception {
controller.initialWindowSize(5);
final ByteBuf data = dummyData(3, 7);
try {
send(STREAM_A, data.slice(0, 3), 7);
// Verify that a partial frame of 5 was sent.
ArgumentCaptor<ByteBuf> argument = ArgumentCaptor.forClass(ByteBuf.class);
captureWrite(STREAM_A, argument, 2, false);
final ByteBuf writtenBuf = argument.getValue();
assertEquals(3, writtenBuf.readableBytes());
assertEquals(data.slice(0, 3), writtenBuf);
assertEquals(2, writtenBuf.refCnt());
assertEquals(2, data.refCnt());
} finally {
manualSafeRelease(data);
}
}
@Test
public void emptyFrameShouldSplitPadding() throws Http2Exception {
controller.initialWindowSize(5);
final ByteBuf data = dummyData(0, 10);
try {
send(STREAM_A, data.slice(0, 0), 10);
// Verify that a partial frame of 5 was sent.
ArgumentCaptor<ByteBuf> argument = ArgumentCaptor.forClass(ByteBuf.class);
captureWrite(STREAM_A, argument, 5, false);
final ByteBuf writtenBuf = argument.getValue();
assertEquals(0, writtenBuf.readableBytes());
assertEquals(1, writtenBuf.refCnt());
assertEquals(1, data.refCnt());
} finally {
manualSafeRelease(data);
}
}
@Test
public void windowUpdateShouldSendFrame() throws Http2Exception {
controller.initialWindowSize(10);
final ByteBuf data = dummyData(10, 10);
try {
send(STREAM_A, data.slice(0, 10), 10);
verifyWrite(STREAM_A, data.slice(0, 10), 0);
// Update the window and verify that the rest of the frame is written.
incrementWindowSize(STREAM_A, 10);
verifyWrite(STREAM_A, Unpooled.EMPTY_BUFFER, 10);
assertEquals(DEFAULT_WINDOW_SIZE - data.readableBytes(), window(CONNECTION_STREAM_ID));
assertEquals(0, window(STREAM_A));
assertEquals(10, window(STREAM_B));
assertEquals(10, window(STREAM_C));
assertEquals(10, window(STREAM_D));
} finally {
manualSafeRelease(data);
}
}
@Test
public void initialWindowUpdateShouldSendFrame() throws Http2Exception {
controller.initialWindowSize(0);
final ByteBuf data = dummyData(10, 0);
try {
send(STREAM_A, data.slice(), 0);
verifyNoWrite(STREAM_A);
// Verify that the entire frame was sent.
controller.initialWindowSize(10);
ArgumentCaptor<ByteBuf> argument = ArgumentCaptor.forClass(ByteBuf.class);
captureWrite(STREAM_A, argument, 0, false);
final ByteBuf writtenBuf = argument.getValue();
assertEquals(data, writtenBuf);
assertEquals(1, data.refCnt());
} finally {
manualSafeRelease(data);
}
}
@Test
public void successiveSendsShouldNotInteract() throws Http2Exception {
// Collapse the connection window to force queueing.
incrementWindowSize(CONNECTION_STREAM_ID, -window(CONNECTION_STREAM_ID));
assertEquals(0, window(CONNECTION_STREAM_ID));
ByteBuf data = dummyData(5, 5);
ByteBuf dataOnly = data.slice(0, 5);
try {
// Queue data for stream A and allow most of it to be written.
send(STREAM_A, dataOnly.slice(), 5);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
incrementWindowSize(CONNECTION_STREAM_ID, 8);
ArgumentCaptor<ByteBuf> argument = ArgumentCaptor.forClass(ByteBuf.class);
captureWrite(STREAM_A, argument, 3, false);
ByteBuf writtenBuf = argument.getValue();
assertEquals(dataOnly, writtenBuf);
assertEquals(65527, window(STREAM_A));
assertEquals(0, window(CONNECTION_STREAM_ID));
resetFrameWriter();
// Queue data for stream B and allow the rest of A and all of B to be written.
send(STREAM_B, dataOnly.slice(), 5);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
incrementWindowSize(CONNECTION_STREAM_ID, 12);
assertEquals(0, window(CONNECTION_STREAM_ID));
// Verify the rest of A is written.
captureWrite(STREAM_A, argument, 2, false);
writtenBuf = argument.getValue();
assertEquals(Unpooled.EMPTY_BUFFER, writtenBuf);
assertEquals(65525, window(STREAM_A));
argument = ArgumentCaptor.forClass(ByteBuf.class);
captureWrite(STREAM_B, argument, 5, false);
writtenBuf = argument.getValue();
assertEquals(dataOnly, writtenBuf);
assertEquals(65525, window(STREAM_B));
} finally {
manualSafeRelease(data);
}
}
@Test
public void negativeWindowShouldNotThrowException() throws Http2Exception {
final int initWindow = 20;
final int secondWindowSize = 10;
controller.initialWindowSize(initWindow);
Http2Stream streamA = connection.stream(STREAM_A);
final ByteBuf data = dummyData(initWindow, 0);
final ByteBuf data2 = dummyData(5, 0);
try {
// Deplete the stream A window to 0
send(STREAM_A, data.slice(0, initWindow), 0);
verifyWrite(STREAM_A, data.slice(0, initWindow), 0);
// Make the window size for stream A negative
controller.initialWindowSize(initWindow - secondWindowSize);
assertEquals(-secondWindowSize, controller.windowSize(streamA));
// Queue up a write. It should not be written now because the window is negative
resetFrameWriter();
send(STREAM_A, data2.slice(), 0);
verifyNoWrite(STREAM_A);
// Open the window size back up a bit (no send should happen)
incrementWindowSize(STREAM_A, 5);
assertEquals(-5, controller.windowSize(streamA));
verifyNoWrite(STREAM_A);
// Open the window size back up a bit (no send should happen)
incrementWindowSize(STREAM_A, 5);
assertEquals(0, controller.windowSize(streamA));
verifyNoWrite(STREAM_A);
// Open the window size back up and allow the write to happen
incrementWindowSize(STREAM_A, 5);
assertEquals(0, controller.windowSize(streamA));
// Verify that the entire frame was sent.
ArgumentCaptor<ByteBuf> argument = ArgumentCaptor.forClass(ByteBuf.class);
captureWrite(STREAM_A, argument, 0, false);
final ByteBuf writtenBuf = argument.getValue();
assertEquals(data2, writtenBuf);
assertEquals(1, data2.refCnt());
} finally {
manualSafeRelease(data);
manualSafeRelease(data2);
}
}
@Test
public void initialWindowUpdateShouldSendEmptyFrame() throws Http2Exception {
controller.initialWindowSize(0);
// First send a frame that will get buffered.
final ByteBuf data = dummyData(10, 0);
try {
send(STREAM_A, data.slice(), 0);
verifyNoWrite(STREAM_A);
// Now send an empty frame on the same stream and verify that it's also buffered.
send(STREAM_A, Unpooled.EMPTY_BUFFER, 0);
verifyNoWrite(STREAM_A);
// Re-expand the window and verify that both frames were sent.
controller.initialWindowSize(10);
verifyWrite(STREAM_A, data.slice(), 0);
verifyWrite(STREAM_A, Unpooled.EMPTY_BUFFER, 0);
} finally {
manualSafeRelease(data);
}
}
@Test
public void initialWindowUpdateShouldSendPartialFrame() throws Http2Exception {
controller.initialWindowSize(0);
final ByteBuf data = dummyData(10, 0);
try {
send(STREAM_A, data, 0);
verifyNoWrite(STREAM_A);
// Verify that a partial frame of 5 was sent.
controller.initialWindowSize(5);
ArgumentCaptor<ByteBuf> argument = ArgumentCaptor.forClass(ByteBuf.class);
captureWrite(STREAM_A, argument, 0, false);
ByteBuf writtenBuf = argument.getValue();
assertEquals(5, writtenBuf.readableBytes());
assertEquals(data.slice(0, 5), writtenBuf);
assertEquals(2, writtenBuf.refCnt());
assertEquals(2, data.refCnt());
} finally {
manualSafeRelease(data);
}
}
@Test
public void connectionWindowUpdateShouldSendFrame() throws Http2Exception {
// Set the connection window size to zero.
exhaustStreamWindow(CONNECTION_STREAM_ID);
final ByteBuf data = dummyData(10, 0);
try {
send(STREAM_A, data.slice(), 0);
verifyNoWrite(STREAM_A);
// Verify that the entire frame was sent.
incrementWindowSize(CONNECTION_STREAM_ID, 10);
assertEquals(0, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE - data.readableBytes(), window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
ArgumentCaptor<ByteBuf> argument = ArgumentCaptor.forClass(ByteBuf.class);
captureWrite(STREAM_A, argument, 0, false);
final ByteBuf writtenBuf = argument.getValue();
assertEquals(data, writtenBuf);
assertEquals(1, data.refCnt());
} finally {
manualSafeRelease(data);
}
}
@Test
public void connectionWindowUpdateShouldSendPartialFrame() throws Http2Exception {
// Set the connection window size to zero.
exhaustStreamWindow(CONNECTION_STREAM_ID);
final ByteBuf data = dummyData(10, 0);
try {
send(STREAM_A, data, 0);
verifyNoWrite(STREAM_A);
// Verify that a partial frame of 5 was sent.
incrementWindowSize(CONNECTION_STREAM_ID, 5);
assertEquals(0, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE - data.readableBytes(), window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
ArgumentCaptor<ByteBuf> argument = ArgumentCaptor.forClass(ByteBuf.class);
captureWrite(STREAM_A, argument, 0, false);
final ByteBuf writtenBuf = argument.getValue();
assertEquals(5, writtenBuf.readableBytes());
assertEquals(data.slice(0, 5), writtenBuf);
assertEquals(2, writtenBuf.refCnt());
assertEquals(2, data.refCnt());
} finally {
manualSafeRelease(data);
}
}
@Test
public void streamWindowUpdateShouldSendFrame() throws Http2Exception {
// Set the stream window size to zero.
exhaustStreamWindow(STREAM_A);
final ByteBuf data = dummyData(10, 0);
try {
send(STREAM_A, data.slice(), 0);
verifyNoWrite(STREAM_A);
// Verify that the entire frame was sent.
incrementWindowSize(STREAM_A, 10);
assertEquals(DEFAULT_WINDOW_SIZE - data.readableBytes(), window(CONNECTION_STREAM_ID));
assertEquals(0, window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
ArgumentCaptor<ByteBuf> argument = ArgumentCaptor.forClass(ByteBuf.class);
captureWrite(STREAM_A, argument, 0, false);
final ByteBuf writtenBuf = argument.getValue();
assertEquals(data, writtenBuf);
assertEquals(1, data.refCnt());
} finally {
manualSafeRelease(data);
}
}
@Test
public void streamWindowUpdateShouldSendPartialFrame() throws Http2Exception {
// Set the stream window size to zero.
exhaustStreamWindow(STREAM_A);
final ByteBuf data = dummyData(10, 0);
try {
send(STREAM_A, data, 0);
verifyNoWrite(STREAM_A);
// Verify that a partial frame of 5 was sent.
incrementWindowSize(STREAM_A, 5);
assertEquals(DEFAULT_WINDOW_SIZE - data.readableBytes(), window(CONNECTION_STREAM_ID));
assertEquals(0, window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
ArgumentCaptor<ByteBuf> argument = ArgumentCaptor.forClass(ByteBuf.class);
captureWrite(STREAM_A, argument, 0, false);
ByteBuf writtenBuf = argument.getValue();
assertEquals(5, writtenBuf.readableBytes());
assertEquals(2, writtenBuf.refCnt());
assertEquals(2, data.refCnt());
} finally {
manualSafeRelease(data);
}
}
/**
* In this test, we give stream A padding and verify that it's padding is properly split.
*
* <pre>
* 0
* / \
* A B
* </pre>
*/
@Test
public void multipleStreamsShouldSplitPadding() throws Http2Exception {
// Block the connection
exhaustStreamWindow(CONNECTION_STREAM_ID);
// Try sending 10 bytes on each stream. They will be pending until we free up the
// connection.
final ByteBuf[] bufs = { dummyData(3, 0), dummyData(10, 0) };
try {
send(STREAM_A, bufs[0], 7);
send(STREAM_B, bufs[1], 0);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
// Open up the connection window.
incrementWindowSize(CONNECTION_STREAM_ID, 10);
assertEquals(0, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE - 5, window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE - 5, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
final ArgumentCaptor<ByteBuf> captor = ArgumentCaptor.forClass(ByteBuf.class);
// Verify that 5 bytes from A were written: 3 from data and 2 from padding.
captureWrite(STREAM_A, captor, 2, false);
assertEquals(3, captor.getValue().readableBytes());
captureWrite(STREAM_B, captor, 0, false);
assertEquals(5, captor.getValue().readableBytes());
} finally {
manualSafeRelease(bufs);
}
}
/**
* In this test, we block A which allows bytes to be written by C and D. Here's a view of the tree (stream A is
* blocked).
*
* <pre>
* 0
* / \
* [A] B
* / \
* C D
* </pre>
*/
@Test
public void blockedStreamShouldSpreadDataToChildren() throws Http2Exception {
// Block stream A
exhaustStreamWindow(STREAM_A);
// Block the connection
exhaustStreamWindow(CONNECTION_STREAM_ID);
// Try sending 10 bytes on each stream. They will be pending until we free up the
// connection.
final ByteBuf[] bufs = { dummyData(10, 0), dummyData(10, 0), dummyData(10, 0), dummyData(10, 0) };
try {
send(STREAM_A, bufs[0], 0);
send(STREAM_B, bufs[1], 0);
send(STREAM_C, bufs[2], 0);
send(STREAM_D, bufs[3], 0);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
verifyNoWrite(STREAM_C);
verifyNoWrite(STREAM_D);
// Verify that the entire frame was sent.
incrementWindowSize(CONNECTION_STREAM_ID, 10);
assertEquals(0, window(CONNECTION_STREAM_ID));
assertEquals(0, window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE - 5, window(STREAM_B), 2);
assertEquals(2 * DEFAULT_WINDOW_SIZE - 5, window(STREAM_C) + window(STREAM_D), 5);
final ArgumentCaptor<ByteBuf> captor = ArgumentCaptor.forClass(ByteBuf.class);
// Verify that no write was done for A, since it's blocked.
verifyNoWrite(STREAM_A);
captureWrite(STREAM_B, captor, 0, false);
assertEquals(5, captor.getValue().readableBytes(), 2);
// Verify that C and D each shared half of A's allowance. Since A's allowance (5) cannot
// be split evenly, one will get 3 and one will get 2.
captureWrite(STREAM_C, captor, 0, false);
int c = captor.getValue().readableBytes();
captureWrite(STREAM_D, captor, 0, false);
int d = captor.getValue().readableBytes();
assertEquals(5, c + d, 4);
assertEquals(1, Math.abs(c - d));
} finally {
manualSafeRelease(bufs);
}
}
/**
* In this test, we block B which allows all bytes to be written by A. A should not share the data with its children
* since it's not blocked.
*
* <pre>
* 0
* / \
* A [B]
* / \
* C D
* </pre>
*/
@Test
public void childrenShouldNotSendDataUntilParentBlocked() throws Http2Exception {
// Block stream B
exhaustStreamWindow(STREAM_B);
// Block the connection
exhaustStreamWindow(CONNECTION_STREAM_ID);
// Send 10 bytes to each.
final ByteBuf[] bufs = { dummyData(10, 0), dummyData(10, 0), dummyData(10, 0), dummyData(10, 0) };
try {
send(STREAM_A, bufs[0], 0);
send(STREAM_B, bufs[1], 0);
send(STREAM_C, bufs[2], 0);
send(STREAM_D, bufs[3], 0);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
verifyNoWrite(STREAM_C);
verifyNoWrite(STREAM_D);
// Verify that the entire frame was sent.
incrementWindowSize(CONNECTION_STREAM_ID, 10);
assertEquals(0, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE - 10, window(STREAM_A));
assertEquals(0, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
final ArgumentCaptor<ByteBuf> captor = ArgumentCaptor.forClass(ByteBuf.class);
// Verify that A received all the bytes.
captureWrite(STREAM_A, captor, 0, false);
assertEquals(10, captor.getValue().readableBytes());
verifyNoWrite(STREAM_B);
verifyNoWrite(STREAM_C);
verifyNoWrite(STREAM_D);
} finally {
manualSafeRelease(bufs);
}
}
/**
* In this test, we block B which allows all bytes to be written by A. Once A is blocked, it will spill over the
* remaining of its portion to its children.
*
* <pre>
* 0
* / \
* A [B]
* / \
* C D
* </pre>
*/
@Test
public void parentShouldWaterFallDataToChildren() throws Http2Exception {
// Block stream B
exhaustStreamWindow(STREAM_B);
// Block the connection
exhaustStreamWindow(CONNECTION_STREAM_ID);
// Only send 5 to A so that it will allow data from its children.
final ByteBuf[] bufs = { dummyData(5, 0), dummyData(10, 0), dummyData(10, 0), dummyData(10, 0) };
try {
send(STREAM_A, bufs[0], 0);
send(STREAM_B, bufs[1], 0);
send(STREAM_C, bufs[2], 0);
send(STREAM_D, bufs[3], 0);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
verifyNoWrite(STREAM_C);
verifyNoWrite(STREAM_D);
// Verify that the entire frame was sent.
incrementWindowSize(CONNECTION_STREAM_ID, 10);
assertEquals(0, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE - 5, window(STREAM_A));
assertEquals(0, window(STREAM_B));
assertEquals(2 * DEFAULT_WINDOW_SIZE - 5, window(STREAM_C) + window(STREAM_D));
final ArgumentCaptor<ByteBuf> captor = ArgumentCaptor.forClass(ByteBuf.class);
// Verify that no write was done for B, since it's blocked.
verifyNoWrite(STREAM_B);
captureWrite(STREAM_A, captor, 0, false);
assertEquals(5, captor.getValue().readableBytes());
// Verify that C and D each shared half of A's allowance. Since A's allowance (5) cannot
// be split evenly, one will get 3 and one will get 2.
captureWrite(STREAM_C, captor, 0, false);
int c = captor.getValue().readableBytes();
captureWrite(STREAM_D, captor, 0, false);
int d = captor.getValue().readableBytes();
assertEquals(5, c + d);
assertEquals(1, Math.abs(c - d));
} finally {
manualSafeRelease(bufs);
}
}
/**
* In this test, we verify re-prioritizing a stream. We start out with B blocked:
*
* <pre>
* 0
* / \
* A [B]
* / \
* C D
* </pre>
*
* We then re-prioritize D so that it's directly off of the connection and verify that A and D split the written
* bytes between them.
*
* <pre>
* 0
* /|\
* / | \
* A [B] D
* /
* C
* </pre>
*/
@Test
public void reprioritizeShouldAdjustOutboundFlow() throws Http2Exception {
// Block stream B
exhaustStreamWindow(STREAM_B);
// Block the connection
exhaustStreamWindow(CONNECTION_STREAM_ID);
// Send 10 bytes to each.
final ByteBuf[] bufs = { dummyData(10, 0), dummyData(10, 0), dummyData(10, 0), dummyData(10, 0) };
try {
send(STREAM_A, bufs[0], 0);
send(STREAM_B, bufs[1], 0);
send(STREAM_C, bufs[2], 0);
send(STREAM_D, bufs[3], 0);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
verifyNoWrite(STREAM_C);
verifyNoWrite(STREAM_D);
// Re-prioritize D as a direct child of the connection.
setPriority(STREAM_D, 0, DEFAULT_PRIORITY_WEIGHT, false);
// Verify that the entire frame was sent.
incrementWindowSize(CONNECTION_STREAM_ID, 10);
assertEquals(0, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE - 5, window(STREAM_A), 2);
assertEquals(0, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE - 5, window(STREAM_D), 2);
final ArgumentCaptor<ByteBuf> captor = ArgumentCaptor.forClass(ByteBuf.class);
// Verify that A received all the bytes.
captureWrite(STREAM_A, captor, 0, false);
assertEquals(5, captor.getValue().readableBytes(), 2);
captureWrite(STREAM_D, captor, 0, false);
assertEquals(5, captor.getValue().readableBytes(), 2);
verifyNoWrite(STREAM_B);
verifyNoWrite(STREAM_C);
} finally {
manualSafeRelease(bufs);
}
}
/**
* In this test, we root all streams at the connection, and then verify that data is split appropriately based on
* weight (all available data is the same).
*
* <pre>
* 0
* / / \ \
* A B C D
* </pre>
*/
@Test
public void writeShouldPreferHighestWeight() throws Http2Exception {
// Block the connection
exhaustStreamWindow(CONNECTION_STREAM_ID);
// Root the streams at the connection and assign weights.
setPriority(STREAM_A, 0, (short) 50, false);
setPriority(STREAM_B, 0, (short) 200, false);
setPriority(STREAM_C, 0, (short) 100, false);
setPriority(STREAM_D, 0, (short) 100, false);
// Send a bunch of data on each stream.
final ByteBuf[] bufs = { dummyData(1000, 0), dummyData(1000, 0), dummyData(1000, 0), dummyData(1000, 0) };
try {
send(STREAM_A, bufs[0], 0);
send(STREAM_B, bufs[1], 0);
send(STREAM_C, bufs[2], 0);
send(STREAM_D, bufs[3], 0);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
verifyNoWrite(STREAM_C);
verifyNoWrite(STREAM_D);
// Allow 1000 bytes to be sent.
incrementWindowSize(CONNECTION_STREAM_ID, 1000);
final ArgumentCaptor<ByteBuf> captor = ArgumentCaptor.forClass(ByteBuf.class);
captureWrite(STREAM_A, captor, 0, false);
int aWritten = captor.getValue().readableBytes();
int min = aWritten;
int max = aWritten;
captureWrite(STREAM_B, captor, 0, false);
int bWritten = captor.getValue().readableBytes();
min = Math.min(min, bWritten);
max = Math.max(max, bWritten);
captureWrite(STREAM_C, captor, 0, false);
int cWritten = captor.getValue().readableBytes();
min = Math.min(min, cWritten);
max = Math.max(max, cWritten);
captureWrite(STREAM_D, captor, 0, false);
int dWritten = captor.getValue().readableBytes();
min = Math.min(min, dWritten);
max = Math.max(max, dWritten);
assertEquals(1000, aWritten + bWritten + cWritten + dWritten);
assertEquals(aWritten, min);
assertEquals(bWritten, max);
assertTrue(aWritten < cWritten);
assertEquals(cWritten, dWritten, 1);
assertTrue(cWritten < bWritten);
assertEquals(0, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE - aWritten, window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE - bWritten, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE - cWritten, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE - dWritten, window(STREAM_D));
} finally {
manualSafeRelease(bufs);
}
}
/**
* In this test, we root all streams at the connection, and then verify that data is split equally among the stream,
* since they all have the same weight.
*
* <pre>
* 0
* / / \ \
* A B C D
* </pre>
*/
@Test
public void samePriorityShouldDistributeBasedOnData() throws Http2Exception {
// Block the connection
exhaustStreamWindow(CONNECTION_STREAM_ID);
// Root the streams at the connection with the same weights.
setPriority(STREAM_A, 0, DEFAULT_PRIORITY_WEIGHT, false);
setPriority(STREAM_B, 0, DEFAULT_PRIORITY_WEIGHT, false);
setPriority(STREAM_C, 0, DEFAULT_PRIORITY_WEIGHT, false);
setPriority(STREAM_D, 0, DEFAULT_PRIORITY_WEIGHT, false);
// Send a bunch of data on each stream.
final ByteBuf[] bufs = { dummyData(400, 0), dummyData(500, 0), dummyData(0, 0), dummyData(700, 0) };
try {
send(STREAM_A, bufs[0], 0);
send(STREAM_B, bufs[1], 0);
send(STREAM_C, bufs[2], 0);
send(STREAM_D, bufs[3], 0);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
verifyNoWrite(STREAM_D);
// The write will occur on C, because it's an empty frame.
final ArgumentCaptor<ByteBuf> captor = ArgumentCaptor.forClass(ByteBuf.class);
captureWrite(STREAM_C, captor, 0, false);
assertEquals(0, captor.getValue().readableBytes());
// Allow 1000 bytes to be sent.
incrementWindowSize(CONNECTION_STREAM_ID, 999);
assertEquals(0, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE - 333, window(STREAM_A), 50);
assertEquals(DEFAULT_WINDOW_SIZE - 333, window(STREAM_B), 50);
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE - 333, window(STREAM_D), 50);
captureWrite(STREAM_A, captor, 0, false);
int aWritten = captor.getValue().readableBytes();
captureWrite(STREAM_B, captor, 0, false);
int bWritten = captor.getValue().readableBytes();
captureWrite(STREAM_D, captor, 0, false);
int dWritten = captor.getValue().readableBytes();
assertEquals(999, aWritten + bWritten + dWritten);
assertEquals(333, aWritten, 50);
assertEquals(333, bWritten, 50);
assertEquals(333, dWritten, 50);
} finally {
manualSafeRelease(bufs);
}
}
/**
* In this test, we block all streams and verify the priority bytes for each sub tree at each node are correct
*
* <pre>
* [0]
* / \
* A B
* / \
* C D
* </pre>
*/
@Test
public void subTreeBytesShouldBeCorrect() throws Http2Exception {
// Block the connection
exhaustStreamWindow(CONNECTION_STREAM_ID);
Http2Stream stream0 = connection.connectionStream();
Http2Stream streamA = connection.stream(STREAM_A);
Http2Stream streamB = connection.stream(STREAM_B);
Http2Stream streamC = connection.stream(STREAM_C);
Http2Stream streamD = connection.stream(STREAM_D);
// Send a bunch of data on each stream.
final IntObjectMap<Integer> streamSizes = new IntObjectHashMap<Integer>(4);
streamSizes.put(STREAM_A, 400);
streamSizes.put(STREAM_B, 500);
streamSizes.put(STREAM_C, 600);
streamSizes.put(STREAM_D, 700);
final ByteBuf[] bufs = { dummyData(streamSizes.get(STREAM_A), 0), dummyData(streamSizes.get(STREAM_B), 0),
dummyData(streamSizes.get(STREAM_C), 0), dummyData(streamSizes.get(STREAM_D), 0) };
try {
send(STREAM_A, bufs[0], 0);
send(STREAM_B, bufs[1], 0);
send(STREAM_C, bufs[2], 0);
send(STREAM_D, bufs[3], 0);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
verifyNoWrite(STREAM_C);
verifyNoWrite(STREAM_D);
assertEquals(calculateStreamSizeSum(streamSizes,
Arrays.asList(STREAM_A, STREAM_B, STREAM_C, STREAM_D)),
streamableBytesForTree(stream0));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_A, STREAM_C, STREAM_D)),
streamableBytesForTree(streamA));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_B)),
streamableBytesForTree(streamB));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_C)),
streamableBytesForTree(streamC));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_D)),
streamableBytesForTree(streamD));
} finally {
manualSafeRelease(bufs);
}
}
/**
* In this test, we block all streams shift the priority tree and verify priority bytes for each subtree are correct
*
* <pre>
* [0]
* / \
* A B
* / \
* C D
* </pre>
*
* After the tree shift:
*
* <pre>
* [0]
* |
* A
* |
* B
* / \
* C D
* </pre>
*/
@Test
public void subTreeBytesShouldBeCorrectWithRestructure() throws Http2Exception {
// Block the connection
exhaustStreamWindow(CONNECTION_STREAM_ID);
Http2Stream stream0 = connection.connectionStream();
Http2Stream streamA = connection.stream(STREAM_A);
Http2Stream streamB = connection.stream(STREAM_B);
Http2Stream streamC = connection.stream(STREAM_C);
Http2Stream streamD = connection.stream(STREAM_D);
// Send a bunch of data on each stream.
final IntObjectMap<Integer> streamSizes = new IntObjectHashMap<Integer>(4);
streamSizes.put(STREAM_A, 400);
streamSizes.put(STREAM_B, 500);
streamSizes.put(STREAM_C, 600);
streamSizes.put(STREAM_D, 700);
final ByteBuf[] bufs = { dummyData(streamSizes.get(STREAM_A), 0), dummyData(streamSizes.get(STREAM_B), 0),
dummyData(streamSizes.get(STREAM_C), 0), dummyData(streamSizes.get(STREAM_D), 0) };
try {
send(STREAM_A, bufs[0], 0);
send(STREAM_B, bufs[1], 0);
send(STREAM_C, bufs[2], 0);
send(STREAM_D, bufs[3], 0);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
verifyNoWrite(STREAM_C);
verifyNoWrite(STREAM_D);
streamB.setPriority(STREAM_A, DEFAULT_PRIORITY_WEIGHT, true);
assertEquals(calculateStreamSizeSum(streamSizes,
Arrays.asList(STREAM_A, STREAM_B, STREAM_C, STREAM_D)),
streamableBytesForTree(stream0));
assertEquals(calculateStreamSizeSum(streamSizes,
Arrays.asList(STREAM_A, STREAM_B, STREAM_C, STREAM_D)),
streamableBytesForTree(streamA));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_B, STREAM_C, STREAM_D)),
streamableBytesForTree(streamB));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_C)),
streamableBytesForTree(streamC));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_D)),
streamableBytesForTree(streamD));
} finally {
manualSafeRelease(bufs);
}
}
/**
* In this test, we block all streams and add a node to the priority tree and verify
*
* <pre>
* [0]
* / \
* A B
* / \
* C D
* </pre>
*
* After the tree shift:
*
* <pre>
* [0]
* / \
* A B
* |
* E
* / \
* C D
* </pre>
*/
@Test
public void subTreeBytesShouldBeCorrectWithAddition() throws Http2Exception {
// Block the connection
exhaustStreamWindow(CONNECTION_STREAM_ID);
Http2Stream stream0 = connection.connectionStream();
Http2Stream streamA = connection.stream(STREAM_A);
Http2Stream streamB = connection.stream(STREAM_B);
Http2Stream streamC = connection.stream(STREAM_C);
Http2Stream streamD = connection.stream(STREAM_D);
Http2Stream streamE = connection.local().createStream(STREAM_E).open(false);
streamE.setPriority(STREAM_A, DEFAULT_PRIORITY_WEIGHT, true);
// Send a bunch of data on each stream.
final IntObjectMap<Integer> streamSizes = new IntObjectHashMap<Integer>(4);
streamSizes.put(STREAM_A, 400);
streamSizes.put(STREAM_B, 500);
streamSizes.put(STREAM_C, 600);
streamSizes.put(STREAM_D, 700);
streamSizes.put(STREAM_E, 900);
final ByteBuf[] bufs = { dummyData(streamSizes.get(STREAM_A), 0), dummyData(streamSizes.get(STREAM_B), 0),
dummyData(streamSizes.get(STREAM_C), 0), dummyData(streamSizes.get(STREAM_D), 0),
dummyData(streamSizes.get(STREAM_E), 0) };
try {
send(STREAM_A, bufs[0], 0);
send(STREAM_B, bufs[1], 0);
send(STREAM_C, bufs[2], 0);
send(STREAM_D, bufs[3], 0);
send(STREAM_E, bufs[4], 0);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
verifyNoWrite(STREAM_C);
verifyNoWrite(STREAM_D);
verifyNoWrite(STREAM_E);
assertEquals(calculateStreamSizeSum(streamSizes,
Arrays.asList(STREAM_A, STREAM_B, STREAM_C, STREAM_D, STREAM_E)),
streamableBytesForTree(stream0));
assertEquals(calculateStreamSizeSum(streamSizes,
Arrays.asList(STREAM_A, STREAM_E, STREAM_C, STREAM_D)),
streamableBytesForTree(streamA));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_B)),
streamableBytesForTree(streamB));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_C)),
streamableBytesForTree(streamC));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_D)),
streamableBytesForTree(streamD));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_E, STREAM_C, STREAM_D)),
streamableBytesForTree(streamE));
} finally {
manualSafeRelease(bufs);
}
}
/**
* In this test, we block all streams and remove a node from the priority tree and verify
*
* <pre>
* [0]
* / \
* A B
* / \
* C D
* </pre>
*
* After the tree shift:
*
* <pre>
* [0]
* / | \
* C D B
* </pre>
*/
@Test
public void subTreeBytesShouldBeCorrectWithRemoval() throws Http2Exception {
// Block the connection
exhaustStreamWindow(CONNECTION_STREAM_ID);
Http2Stream stream0 = connection.connectionStream();
Http2Stream streamA = connection.stream(STREAM_A);
Http2Stream streamB = connection.stream(STREAM_B);
Http2Stream streamC = connection.stream(STREAM_C);
Http2Stream streamD = connection.stream(STREAM_D);
// Send a bunch of data on each stream.
final IntObjectMap<Integer> streamSizes = new IntObjectHashMap<Integer>(4);
streamSizes.put(STREAM_A, 400);
streamSizes.put(STREAM_B, 500);
streamSizes.put(STREAM_C, 600);
streamSizes.put(STREAM_D, 700);
final ByteBuf[] bufs = { dummyData(streamSizes.get(STREAM_A), 0), dummyData(streamSizes.get(STREAM_B), 0),
dummyData(streamSizes.get(STREAM_C), 0), dummyData(streamSizes.get(STREAM_D), 0) };
try {
send(STREAM_A, bufs[0], 0);
send(STREAM_B, bufs[1], 0);
send(STREAM_C, bufs[2], 0);
send(STREAM_D, bufs[3], 0);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
verifyNoWrite(STREAM_C);
verifyNoWrite(STREAM_D);
streamA.close();
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_B, STREAM_C, STREAM_D)),
streamableBytesForTree(stream0));
assertEquals(0, streamableBytesForTree(streamA));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_B)),
streamableBytesForTree(streamB));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_C)),
streamableBytesForTree(streamC));
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_D)),
streamableBytesForTree(streamD));
} finally {
manualSafeRelease(bufs);
}
}
private static int calculateStreamSizeSum(IntObjectMap<Integer> streamSizes, List<Integer> streamIds) {
int sum = 0;
for (int i = 0; i < streamIds.size(); ++i) {
Integer streamSize = streamSizes.get(streamIds.get(i));
if (streamSize != null) {
sum += streamSize;
}
}
return sum;
}
private void send(int streamId, ByteBuf data, int padding) throws Http2Exception {
Http2Stream stream = stream(streamId);
ChannelFuture future = controller.sendFlowControlledFrame(ctx, stream, data, padding, false, promise);
assertEquals(future, controller.lastFlowControlledFrameSent(stream));
}
private void verifyWrite(int streamId, ByteBuf data, int padding) {
verify(frameWriter).writeData(eq(ctx), eq(streamId), eq(data), eq(padding), eq(false), eq(promise));
}
private void verifyNoWrite(int streamId) {
verify(frameWriter, never()).writeData(eq(ctx), eq(streamId), any(ByteBuf.class), anyInt(), anyBoolean(),
eq(promise));
}
private void captureWrite(int streamId, ArgumentCaptor<ByteBuf> captor, int padding,
boolean endStream) {
verify(frameWriter).writeData(eq(ctx), eq(streamId), captor.capture(), eq(padding),
eq(endStream), eq(promise));
}
private void setPriority(int stream, int parent, int weight, boolean exclusive) throws Http2Exception {
connection.stream(stream).setPriority(parent, (short) weight, exclusive);
}
private void exhaustStreamWindow(int streamId) throws Http2Exception {
incrementWindowSize(streamId, -window(streamId));
}
private void resetFrameWriter() {
Mockito.reset(frameWriter);
when(frameWriter.configuration()).thenReturn(frameWriterConfiguration);
when(frameWriterConfiguration.frameSizePolicy()).thenReturn(frameWriterSizePolicy);
when(frameWriterSizePolicy.maxFrameSize()).thenReturn(Integer.MAX_VALUE);
}
private int window(int streamId) throws Http2Exception {
return controller.windowSize(stream(streamId));
}
private void incrementWindowSize(int streamId, int delta) throws Http2Exception {
controller.incrementWindowSize(ctx, stream(streamId), delta);
}
private int streamableBytesForTree(Http2Stream stream) throws Http2Exception {
return controller.streamableBytesForTree(stream);
}
private Http2Stream stream(int streamId) throws Http2Exception {
return connection.requireStream(streamId);
}
private static ByteBuf dummyData(int size, int padding) {
final String repeatedData = "0123456789";
final ByteBuf buffer = Unpooled.buffer(size + padding);
for (int index = 0; index < size; ++index) {
buffer.writeByte(repeatedData.charAt(index % repeatedData.length()));
}
buffer.writeZero(padding);
return buffer;
}
private static void manualSafeRelease(ByteBuf data) {
while (data.refCnt() > 0) { // Manually release just to be safe if the test fails
data.release();
}
}
private static void manualSafeRelease(ByteBuf[] bufs) {
for (int i = 0; i < bufs.length; ++i) {
manualSafeRelease(bufs[i]);
}
}
}
| |
/**
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.service.dreams;
import java.io.FileDescriptor;
import java.io.PrintWriter;
import android.annotation.SdkConstant;
import android.annotation.SdkConstant.SdkConstantType;
import android.app.AlarmManager;
import android.app.Service;
import android.content.Intent;
import android.graphics.PixelFormat;
import android.graphics.drawable.ColorDrawable;
import android.os.Handler;
import android.os.IBinder;
import android.os.RemoteException;
import android.os.ServiceManager;
import android.util.Slog;
import android.view.ActionMode;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.view.WindowManagerGlobal;
import android.view.WindowManager.LayoutParams;
import android.view.accessibility.AccessibilityEvent;
import com.android.internal.policy.PolicyManager;
import com.android.internal.util.DumpUtils;
import com.android.internal.util.DumpUtils.Dump;
/**
* Extend this class to implement a custom dream (available to the user as a "Daydream").
*
* <p>Dreams are interactive screensavers launched when a charging device is idle, or docked in a
* desk dock. Dreams provide another modality for apps to express themselves, tailored for
* an exhibition/lean-back experience.</p>
*
* <p>The {@code DreamService} lifecycle is as follows:</p>
* <ol>
* <li>{@link #onAttachedToWindow}
* <p>Use this for initial setup, such as calling {@link #setContentView setContentView()}.</li>
* <li>{@link #onDreamingStarted}
* <p>Your dream has started, so you should begin animations or other behaviors here.</li>
* <li>{@link #onDreamingStopped}
* <p>Use this to stop the things you started in {@link #onDreamingStarted}.</li>
* <li>{@link #onDetachedFromWindow}
* <p>Use this to dismantle resources (for example, detach from handlers
* and listeners).</li>
* </ol>
*
* <p>In addition, onCreate and onDestroy (from the Service interface) will also be called, but
* initialization and teardown should be done by overriding the hooks above.</p>
*
* <p>To be available to the system, your {@code DreamService} should be declared in the
* manifest as follows:</p>
* <pre>
* <service
* android:name=".MyDream"
* android:exported="true"
* android:icon="@drawable/my_icon"
* android:label="@string/my_dream_label" >
*
* <intent-filter>
* <action android:name="android.service.dreams.DreamService" />
* <category android:name="android.intent.category.DEFAULT" />
* </intent-filter>
*
* <!-- Point to additional information for this dream (optional) -->
* <meta-data
* android:name="android.service.dream"
* android:resource="@xml/my_dream" />
* </service>
* </pre>
*
* <p>If specified with the {@code <meta-data>} element,
* additional information for the dream is defined using the
* {@link android.R.styleable#Dream <dream>} element in a separate XML file.
* Currently, the only addtional
* information you can provide is for a settings activity that allows the user to configure
* the dream behavior. For example:</p>
* <p class="code-caption">res/xml/my_dream.xml</p>
* <pre>
* <dream xmlns:android="http://schemas.android.com/apk/res/android"
* android:settingsActivity="com.example.app/.MyDreamSettingsActivity" />
* </pre>
* <p>This makes a Settings button available alongside your dream's listing in the
* system settings, which when pressed opens the specified activity.</p>
*
*
* <p>To specify your dream layout, call {@link #setContentView}, typically during the
* {@link #onAttachedToWindow} callback. For example:</p>
* <pre>
* public class MyDream extends DreamService {
*
* @Override
* public void onAttachedToWindow() {
* super.onAttachedToWindow();
*
* // Exit dream upon user touch
* setInteractive(false);
* // Hide system UI
* setFullscreen(true);
* // Set the dream layout
* setContentView(R.layout.dream);
* }
* }
* </pre>
*/
public class DreamService extends Service implements Window.Callback {
private final String TAG = DreamService.class.getSimpleName() + "[" + getClass().getSimpleName() + "]";
/**
* The name of the dream manager service.
* @hide
*/
public static final String DREAM_SERVICE = "dreams";
/**
* The {@link Intent} that must be declared as handled by the service.
*/
@SdkConstant(SdkConstantType.SERVICE_ACTION)
public static final String SERVICE_INTERFACE =
"android.service.dreams.DreamService";
/**
* Name under which a Dream publishes information about itself.
* This meta-data must reference an XML resource containing
* a <code><{@link android.R.styleable#Dream dream}></code>
* tag.
*/
public static final String DREAM_META_DATA = "android.service.dream";
private final IDreamManager mSandman;
private final Handler mHandler = new Handler();
private IBinder mWindowToken;
private Window mWindow;
private boolean mInteractive;
private boolean mLowProfile = true;
private boolean mFullscreen;
private boolean mScreenBright = true;
private boolean mStarted;
private boolean mFinished;
private boolean mCanDoze;
private boolean mDozing;
private DozeHardware mDozeHardware;
private boolean mDebug = false;
public DreamService() {
mSandman = IDreamManager.Stub.asInterface(ServiceManager.getService(DREAM_SERVICE));
}
/**
* @hide
*/
public void setDebug(boolean dbg) {
mDebug = dbg;
}
// begin Window.Callback methods
/** {@inheritDoc} */
@Override
public boolean dispatchKeyEvent(KeyEvent event) {
// TODO: create more flexible version of mInteractive that allows use of KEYCODE_BACK
if (!mInteractive) {
if (mDebug) Slog.v(TAG, "Finishing on keyEvent");
safelyFinish();
return true;
} else if (event.getKeyCode() == KeyEvent.KEYCODE_BACK) {
if (mDebug) Slog.v(TAG, "Finishing on back key");
safelyFinish();
return true;
}
return mWindow.superDispatchKeyEvent(event);
}
/** {@inheritDoc} */
@Override
public boolean dispatchKeyShortcutEvent(KeyEvent event) {
if (!mInteractive) {
if (mDebug) Slog.v(TAG, "Finishing on keyShortcutEvent");
safelyFinish();
return true;
}
return mWindow.superDispatchKeyShortcutEvent(event);
}
/** {@inheritDoc} */
@Override
public boolean dispatchTouchEvent(MotionEvent event) {
// TODO: create more flexible version of mInteractive that allows clicks
// but finish()es on any other kind of activity
if (!mInteractive) {
if (mDebug) Slog.v(TAG, "Finishing on touchEvent");
safelyFinish();
return true;
}
return mWindow.superDispatchTouchEvent(event);
}
/** {@inheritDoc} */
@Override
public boolean dispatchTrackballEvent(MotionEvent event) {
if (!mInteractive) {
if (mDebug) Slog.v(TAG, "Finishing on trackballEvent");
safelyFinish();
return true;
}
return mWindow.superDispatchTrackballEvent(event);
}
/** {@inheritDoc} */
@Override
public boolean dispatchGenericMotionEvent(MotionEvent event) {
if (!mInteractive) {
if (mDebug) Slog.v(TAG, "Finishing on genericMotionEvent");
safelyFinish();
return true;
}
return mWindow.superDispatchGenericMotionEvent(event);
}
/** {@inheritDoc} */
@Override
public boolean dispatchPopulateAccessibilityEvent(AccessibilityEvent event) {
return false;
}
/** {@inheritDoc} */
@Override
public View onCreatePanelView(int featureId) {
return null;
}
/** {@inheritDoc} */
@Override
public boolean onCreatePanelMenu(int featureId, Menu menu) {
return false;
}
/** {@inheritDoc} */
@Override
public boolean onPreparePanel(int featureId, View view, Menu menu) {
return false;
}
/** {@inheritDoc} */
@Override
public boolean onMenuOpened(int featureId, Menu menu) {
return false;
}
/** {@inheritDoc} */
@Override
public boolean onMenuItemSelected(int featureId, MenuItem item) {
return false;
}
/** {@inheritDoc} */
@Override
public void onWindowAttributesChanged(LayoutParams attrs) {
}
/** {@inheritDoc} */
@Override
public void onContentChanged() {
}
/** {@inheritDoc} */
@Override
public void onWindowFocusChanged(boolean hasFocus) {
}
/** {@inheritDoc} */
@Override
public void onAttachedToWindow() {
}
/** {@inheritDoc} */
@Override
public void onDetachedFromWindow() {
}
/** {@inheritDoc} */
@Override
public void onPanelClosed(int featureId, Menu menu) {
}
/** {@inheritDoc} */
@Override
public boolean onSearchRequested() {
return false;
}
/** {@inheritDoc} */
@Override
public ActionMode onWindowStartingActionMode(android.view.ActionMode.Callback callback) {
return null;
}
/** {@inheritDoc} */
@Override
public void onActionModeStarted(ActionMode mode) {
}
/** {@inheritDoc} */
@Override
public void onActionModeFinished(ActionMode mode) {
}
// end Window.Callback methods
// begin public api
/**
* Retrieves the current {@link android.view.WindowManager} for the dream.
* Behaves similarly to {@link android.app.Activity#getWindowManager()}.
*
* @return The current window manager, or null if the dream is not started.
*/
public WindowManager getWindowManager() {
return mWindow != null ? mWindow.getWindowManager() : null;
}
/**
* Retrieves the current {@link android.view.Window} for the dream.
* Behaves similarly to {@link android.app.Activity#getWindow()}.
*
* @return The current window, or null if the dream is not started.
*/
public Window getWindow() {
return mWindow;
}
/**
* Inflates a layout resource and set it to be the content view for this Dream.
* Behaves similarly to {@link android.app.Activity#setContentView(int)}.
*
* <p>Note: Requires a window, do not call before {@link #onAttachedToWindow()}</p>
*
* @param layoutResID Resource ID to be inflated.
*
* @see #setContentView(android.view.View)
* @see #setContentView(android.view.View, android.view.ViewGroup.LayoutParams)
*/
public void setContentView(int layoutResID) {
getWindow().setContentView(layoutResID);
}
/**
* Sets a view to be the content view for this Dream.
* Behaves similarly to {@link android.app.Activity#setContentView(android.view.View)} in an activity,
* including using {@link ViewGroup.LayoutParams#MATCH_PARENT} as the layout height and width of the view.
*
* <p>Note: This requires a window, so you should usually call it during
* {@link #onAttachedToWindow()} and never earlier (you <strong>cannot</strong> call it
* during {@link #onCreate}).</p>
*
* @see #setContentView(int)
* @see #setContentView(android.view.View, android.view.ViewGroup.LayoutParams)
*/
public void setContentView(View view) {
getWindow().setContentView(view);
}
/**
* Sets a view to be the content view for this Dream.
* Behaves similarly to
* {@link android.app.Activity#setContentView(android.view.View, android.view.ViewGroup.LayoutParams)}
* in an activity.
*
* <p>Note: This requires a window, so you should usually call it during
* {@link #onAttachedToWindow()} and never earlier (you <strong>cannot</strong> call it
* during {@link #onCreate}).</p>
*
* @param view The desired content to display.
* @param params Layout parameters for the view.
*
* @see #setContentView(android.view.View)
* @see #setContentView(int)
*/
public void setContentView(View view, ViewGroup.LayoutParams params) {
getWindow().setContentView(view, params);
}
/**
* Adds a view to the Dream's window, leaving other content views in place.
*
* <p>Note: Requires a window, do not call before {@link #onAttachedToWindow()}</p>
*
* @param view The desired content to display.
* @param params Layout parameters for the view.
*/
public void addContentView(View view, ViewGroup.LayoutParams params) {
getWindow().addContentView(view, params);
}
/**
* Finds a view that was identified by the id attribute from the XML that
* was processed in {@link #onCreate}.
*
* <p>Note: Requires a window, do not call before {@link #onAttachedToWindow()}</p>
*
* @return The view if found or null otherwise.
*/
public View findViewById(int id) {
return getWindow().findViewById(id);
}
/**
* Marks this dream as interactive to receive input events.
*
* <p>Non-interactive dreams (default) will dismiss on the first input event.</p>
*
* <p>Interactive dreams should call {@link #finish()} to dismiss themselves.</p>
*
* @param interactive True if this dream will handle input events.
*/
public void setInteractive(boolean interactive) {
mInteractive = interactive;
}
/**
* Returns whether or not this dream is interactive. Defaults to false.
*
* @see #setInteractive(boolean)
*/
public boolean isInteractive() {
return mInteractive;
}
/**
* Sets View.SYSTEM_UI_FLAG_LOW_PROFILE on the content view.
*
* @param lowProfile True to set View.SYSTEM_UI_FLAG_LOW_PROFILE
* @hide There is no reason to have this -- dreams can set this flag
* on their own content view, and from there can actually do the
* correct interactions with it (seeing when it is cleared etc).
*/
public void setLowProfile(boolean lowProfile) {
if (mLowProfile != lowProfile) {
mLowProfile = lowProfile;
int flag = View.SYSTEM_UI_FLAG_LOW_PROFILE;
applySystemUiVisibilityFlags(mLowProfile ? flag : 0, flag);
}
}
/**
* Returns whether or not this dream is in low profile mode. Defaults to true.
*
* @see #setLowProfile(boolean)
* @hide
*/
public boolean isLowProfile() {
return getSystemUiVisibilityFlagValue(View.SYSTEM_UI_FLAG_LOW_PROFILE, mLowProfile);
}
/**
* Controls {@link android.view.WindowManager.LayoutParams#FLAG_FULLSCREEN}
* on the dream's window.
*
* @param fullscreen If true, the fullscreen flag will be set; else it
* will be cleared.
*/
public void setFullscreen(boolean fullscreen) {
if (mFullscreen != fullscreen) {
mFullscreen = fullscreen;
int flag = WindowManager.LayoutParams.FLAG_FULLSCREEN;
applyWindowFlags(mFullscreen ? flag : 0, flag);
}
}
/**
* Returns whether or not this dream is in fullscreen mode. Defaults to false.
*
* @see #setFullscreen(boolean)
*/
public boolean isFullscreen() {
return mFullscreen;
}
/**
* Marks this dream as keeping the screen bright while dreaming.
*
* @param screenBright True to keep the screen bright while dreaming.
*/
public void setScreenBright(boolean screenBright) {
if (mScreenBright != screenBright) {
mScreenBright = screenBright;
int flag = WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON;
applyWindowFlags(mScreenBright ? flag : 0, flag);
}
}
/**
* Returns whether or not this dream keeps the screen bright while dreaming.
* Defaults to false, allowing the screen to dim if necessary.
*
* @see #setScreenBright(boolean)
*/
public boolean isScreenBright() {
return getWindowFlagValue(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON, mScreenBright);
}
/**
* Returns true if this dream is allowed to doze.
* <p>
* The value returned by this method is only meaningful when the dream has started.
* </p>
*
* @return True if this dream can doze.
* @see #startDozing
* @hide experimental
*/
public boolean canDoze() {
return mCanDoze;
}
/**
* Starts dozing, entering a deep dreamy sleep.
* <p>
* Dozing enables the system to conserve power while the user is not actively interacting
* with the device. While dozing, the display will remain on in a low-power state
* and will continue to show its previous contents but the application processor and
* other system components will be allowed to suspend when possible.
* </p><p>
* While the application processor is suspended, the dream may stop executing code
* for long periods of time. Prior to being suspended, the dream may schedule periodic
* wake-ups to render new content by scheduling an alarm with the {@link AlarmManager}.
* The dream may also keep the CPU awake by acquiring a
* {@link android.os.PowerManager#PARTIAL_WAKE_LOCK partial wake lock} when necessary.
* Note that since the purpose of doze mode is to conserve power (especially when
* running on battery), the dream should not wake the CPU very often or keep it
* awake for very long.
* </p><p>
* It is a good idea to call this method some time after the dream's entry animation
* has completed and the dream is ready to doze. It is important to completely
* finish all of the work needed before dozing since the application processor may
* be suspended at any moment once this method is called unless other wake locks
* are being held.
* </p><p>
* Call {@link #stopDozing} or {@link #finish} to stop dozing.
* </p>
*
* @see #stopDozing
* @hide experimental
*/
public void startDozing() {
if (mCanDoze && !mDozing) {
mDozing = true;
try {
mSandman.startDozing(mWindowToken);
} catch (RemoteException ex) {
// system server died
}
}
}
/**
* Stops dozing, returns to active dreaming.
* <p>
* This method reverses the effect of {@link #startDozing}. From this moment onward,
* the application processor will be kept awake as long as the dream is running
* or until the dream starts dozing again.
* </p>
*
* @see #startDozing
* @hide experimental
*/
public void stopDozing() {
if (mDozing) {
mDozing = false;
try {
mSandman.stopDozing(mWindowToken);
} catch (RemoteException ex) {
// system server died
}
}
}
/**
* Returns true if the dream will allow the system to enter a low-power state while
* it is running without actually turning off the screen. Defaults to false,
* keeping the application processor awake while the dream is running.
*
* @return True if the dream is dozing.
*
* @see #setDozing(boolean)
* @hide experimental
*/
public boolean isDozing() {
return mDozing;
}
/**
* Gets an object that may be used to access low-level hardware features that a
* dream may use to provide a richer user experience while dozing.
*
* @return An instance of {@link DozeHardware} or null if this device does not offer
* hardware support for dozing.
*
* @hide experimental
*/
public DozeHardware getDozeHardware() {
if (mCanDoze && mDozeHardware == null && mWindowToken != null) {
try {
IDozeHardware hardware = mSandman.getDozeHardware(mWindowToken);
if (hardware != null) {
mDozeHardware = new DozeHardware(hardware);
}
} catch (RemoteException ex) {
// system server died
}
}
return mDozeHardware;
}
/**
* Called when this Dream is constructed.
*/
@Override
public void onCreate() {
if (mDebug) Slog.v(TAG, "onCreate() on thread " + Thread.currentThread().getId());
super.onCreate();
}
/**
* Called when the dream's window has been created and is visible and animation may now begin.
*/
public void onDreamingStarted() {
if (mDebug) Slog.v(TAG, "onDreamingStarted()");
// hook for subclasses
}
/**
* Called when this Dream is stopped, either by external request or by calling finish(),
* before the window has been removed.
*/
public void onDreamingStopped() {
if (mDebug) Slog.v(TAG, "onDreamingStopped()");
// hook for subclasses
}
/** {@inheritDoc} */
@Override
public final IBinder onBind(Intent intent) {
if (mDebug) Slog.v(TAG, "onBind() intent = " + intent);
return new DreamServiceWrapper();
}
/**
* Stops the dream and detaches from the window.
* <p>
* When the dream ends, the system will be allowed to go to sleep fully unless there
* is a reason for it to be awake such as recent user activity or wake locks being held.
* </p>
*/
public final void finish() {
if (mDebug) Slog.v(TAG, "finish()");
finishInternal();
}
/** {@inheritDoc} */
@Override
public void onDestroy() {
if (mDebug) Slog.v(TAG, "onDestroy()");
// hook for subclasses
// Just in case destroy came in before detach, let's take care of that now
detach();
super.onDestroy();
}
// end public api
/**
* Called by DreamController.stopDream() when the Dream is about to be unbound and destroyed.
*
* Must run on mHandler.
*/
private final void detach() {
if (mStarted) {
if (mDebug) Slog.v(TAG, "detach(): Calling onDreamingStopped()");
mStarted = false;
onDreamingStopped();
}
if (mWindow != null) {
// force our window to be removed synchronously
if (mDebug) Slog.v(TAG, "detach(): Removing window from window manager");
mWindow.getWindowManager().removeViewImmediate(mWindow.getDecorView());
mWindow = null;
}
if (mWindowToken != null) {
// the following will print a log message if it finds any other leaked windows
WindowManagerGlobal.getInstance().closeAll(mWindowToken,
this.getClass().getName(), "Dream");
mWindowToken = null;
}
}
/**
* Called when the Dream is ready to be shown.
*
* Must run on mHandler.
*
* @param windowToken A window token that will allow a window to be created in the correct layer.
*/
private final void attach(IBinder windowToken, boolean canDoze) {
if (mWindowToken != null) {
Slog.e(TAG, "attach() called when already attached with token=" + mWindowToken);
return;
}
if (mFinished) {
Slog.w(TAG, "attach() called after dream already finished");
try {
mSandman.finishSelf(windowToken);
} catch (RemoteException ex) {
// system server died
}
return;
}
if (mDebug) Slog.v(TAG, "Attached on thread " + Thread.currentThread().getId());
mWindowToken = windowToken;
mCanDoze = canDoze;
mWindow = PolicyManager.makeNewWindow(this);
mWindow.setCallback(this);
mWindow.requestFeature(Window.FEATURE_NO_TITLE);
mWindow.setBackgroundDrawable(new ColorDrawable(0xFF000000));
mWindow.setFormat(PixelFormat.OPAQUE);
if (mDebug) Slog.v(TAG, String.format("Attaching window token: %s to window of type %s",
windowToken, WindowManager.LayoutParams.TYPE_DREAM));
WindowManager.LayoutParams lp = mWindow.getAttributes();
lp.type = WindowManager.LayoutParams.TYPE_DREAM;
lp.token = windowToken;
lp.windowAnimations = com.android.internal.R.style.Animation_Dream;
lp.flags |= ( WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN
| WindowManager.LayoutParams.FLAG_LAYOUT_INSET_DECOR
| WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED
| WindowManager.LayoutParams.FLAG_DISMISS_KEYGUARD
| WindowManager.LayoutParams.FLAG_ALLOW_LOCK_WHILE_SCREEN_ON
| (mFullscreen ? WindowManager.LayoutParams.FLAG_FULLSCREEN : 0)
| (mScreenBright ? WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON : 0)
);
mWindow.setAttributes(lp);
mWindow.setWindowManager(null, windowToken, "dream", true);
applySystemUiVisibilityFlags(
(mLowProfile ? View.SYSTEM_UI_FLAG_LOW_PROFILE : 0),
View.SYSTEM_UI_FLAG_LOW_PROFILE);
try {
getWindowManager().addView(mWindow.getDecorView(), mWindow.getAttributes());
} catch (WindowManager.BadTokenException ex) {
// This can happen because the dream manager service will remove the token
// immediately without necessarily waiting for the dream to start.
// We should receive a finish message soon.
Slog.i(TAG, "attach() called after window token already removed, dream will "
+ "finish soon");
mWindow = null;
return;
}
// We need to defer calling onDreamingStarted until after onWindowAttached,
// which is posted to the handler by addView, so we post onDreamingStarted
// to the handler also. Need to watch out here in case detach occurs before
// this callback is invoked.
mHandler.post(new Runnable() {
@Override
public void run() {
if (mWindow != null) {
if (mDebug) Slog.v(TAG, "Calling onDreamingStarted()");
mStarted = true;
onDreamingStarted();
}
}
});
}
private void safelyFinish() {
if (mDebug) Slog.v(TAG, "safelyFinish()");
finish();
if (!mFinished) {
Slog.w(TAG, "Bad dream, did not call super.finish()");
finishInternal();
}
}
private void finishInternal() {
if (mDebug) Slog.v(TAG, "finishInternal() mFinished = " + mFinished);
if (!mFinished) {
mFinished = true;
if (mWindowToken == null) {
Slog.w(TAG, "Finish was called before the dream was attached.");
} else {
try {
mSandman.finishSelf(mWindowToken);
} catch (RemoteException ex) {
// system server died
}
}
stopSelf(); // if launched via any other means
}
}
private boolean getWindowFlagValue(int flag, boolean defaultValue) {
return mWindow == null ? defaultValue : (mWindow.getAttributes().flags & flag) != 0;
}
private void applyWindowFlags(int flags, int mask) {
if (mWindow != null) {
WindowManager.LayoutParams lp = mWindow.getAttributes();
lp.flags = applyFlags(lp.flags, flags, mask);
mWindow.setAttributes(lp);
mWindow.getWindowManager().updateViewLayout(mWindow.getDecorView(), lp);
}
}
private boolean getSystemUiVisibilityFlagValue(int flag, boolean defaultValue) {
View v = mWindow == null ? null : mWindow.getDecorView();
return v == null ? defaultValue : (v.getSystemUiVisibility() & flag) != 0;
}
private void applySystemUiVisibilityFlags(int flags, int mask) {
View v = mWindow == null ? null : mWindow.getDecorView();
if (v != null) {
v.setSystemUiVisibility(applyFlags(v.getSystemUiVisibility(), flags, mask));
}
}
private int applyFlags(int oldFlags, int flags, int mask) {
return (oldFlags&~mask) | (flags&mask);
}
@Override
protected void dump(FileDescriptor fd, PrintWriter pw, String[] args) {
DumpUtils.dumpAsync(mHandler, new Dump() {
@Override
public void dump(PrintWriter pw) {
pw.print(TAG + ": ");
if (mWindowToken == null) {
pw.println("stopped");
} else {
pw.println("running (token=" + mWindowToken + ")");
}
pw.println(" window: " + mWindow);
pw.print(" flags:");
if (isInteractive()) pw.print(" interactive");
if (isLowProfile()) pw.print(" lowprofile");
if (isFullscreen()) pw.print(" fullscreen");
if (isScreenBright()) pw.print(" bright");
if (isDozing()) pw.print(" dozing");
pw.println();
}
}, pw, 1000);
}
private final class DreamServiceWrapper extends IDreamService.Stub {
@Override
public void attach(final IBinder windowToken, final boolean canDoze) {
mHandler.post(new Runnable() {
@Override
public void run() {
DreamService.this.attach(windowToken, canDoze);
}
});
}
@Override
public void detach() {
mHandler.post(new Runnable() {
@Override
public void run() {
DreamService.this.detach();
}
});
}
}
}
| |
/**
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.dag.compiler.planner;
import java.text.MessageFormat;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Deque;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.asakusafw.dag.compiler.model.plan.InputSpec;
import com.asakusafw.dag.compiler.model.plan.OutputSpec;
import com.asakusafw.dag.compiler.model.plan.VertexSpec;
import com.asakusafw.dag.compiler.planner.PlanningContext.Option;
import com.asakusafw.lang.compiler.api.CompilerOptions;
import com.asakusafw.lang.compiler.api.JobflowProcessor;
import com.asakusafw.lang.compiler.common.AttributeContainer;
import com.asakusafw.lang.compiler.common.BasicDiagnostic;
import com.asakusafw.lang.compiler.common.Diagnostic;
import com.asakusafw.lang.compiler.common.DiagnosticException;
import com.asakusafw.lang.compiler.model.description.TypeDescription;
import com.asakusafw.lang.compiler.model.graph.CoreOperator;
import com.asakusafw.lang.compiler.model.graph.CoreOperator.CoreOperatorKind;
import com.asakusafw.lang.compiler.model.graph.ExternalInput;
import com.asakusafw.lang.compiler.model.graph.ExternalOutput;
import com.asakusafw.lang.compiler.model.graph.Group;
import com.asakusafw.lang.compiler.model.graph.Jobflow;
import com.asakusafw.lang.compiler.model.graph.MarkerOperator;
import com.asakusafw.lang.compiler.model.graph.Operator;
import com.asakusafw.lang.compiler.model.graph.Operator.OperatorKind;
import com.asakusafw.lang.compiler.model.graph.OperatorArgument;
import com.asakusafw.lang.compiler.model.graph.OperatorGraph;
import com.asakusafw.lang.compiler.model.graph.OperatorInput;
import com.asakusafw.lang.compiler.model.graph.OperatorInput.InputUnit;
import com.asakusafw.lang.compiler.model.graph.OperatorOutput;
import com.asakusafw.lang.compiler.model.graph.OperatorProperty;
import com.asakusafw.lang.compiler.model.graph.Operators;
import com.asakusafw.lang.compiler.model.graph.UserOperator;
import com.asakusafw.lang.compiler.model.info.ExternalOutputInfo;
import com.asakusafw.lang.compiler.model.info.JobflowInfo;
import com.asakusafw.lang.compiler.optimizer.OperatorCharacterizers;
import com.asakusafw.lang.compiler.optimizer.OperatorRewriters;
import com.asakusafw.lang.compiler.optimizer.adapter.OptimizerContextAdapter;
import com.asakusafw.lang.compiler.optimizer.basic.OperatorClass;
import com.asakusafw.lang.compiler.optimizer.basic.OperatorClass.InputType;
import com.asakusafw.lang.compiler.planning.OperatorEquivalence;
import com.asakusafw.lang.compiler.planning.Plan;
import com.asakusafw.lang.compiler.planning.PlanAssembler;
import com.asakusafw.lang.compiler.planning.PlanDetail;
import com.asakusafw.lang.compiler.planning.PlanMarker;
import com.asakusafw.lang.compiler.planning.PlanMarkers;
import com.asakusafw.lang.compiler.planning.Planning;
import com.asakusafw.lang.compiler.planning.SubPlan;
import com.asakusafw.lang.compiler.planning.util.GraphStatistics;
import com.asakusafw.lang.utils.common.Invariants;
import com.asakusafw.lang.utils.common.Optionals;
import com.asakusafw.utils.graph.Graph;
import com.asakusafw.utils.graph.Graphs;
/**
* Utilities for execution planning on Asakusa DAG compiler.
* The elements in the created plan will have the following {@link AttributeContainer#getAttribute(Class) attributes}:
* <ul>
* <li> {@link VertexSpec} -
* for {@link SubPlan}
* </li>
* <li> {@link InputSpec} -
* for {@link com.asakusafw.lang.compiler.planning.SubPlan.Input SubPlan.Input}
* </li>
* <li> {@link OutputSpec} -
* for {@link com.asakusafw.lang.compiler.planning.SubPlan.Output SubPlan.Output}
* </li>
* </ul>
* @since 0.4.0
* @version 0.5.2
*/
public final class DagPlanning {
static final Logger LOG = LoggerFactory.getLogger(DagPlanning.class);
private static final int REDUCTION_STEP_LIMIT = 1_000;
/**
* The compiler property key prefix of planning options.
* @see Option
*/
public static final String KEY_OPTION_PREFIX = "dag.planning.option."; //$NON-NLS-1$
private DagPlanning() {
return;
}
/**
* Builds an execution plan for the target jobflow.
* Note that, this does not modifies the operator graph in the target jobflow.
* @param parent the current jobflow processing context
* @param jobflow the target jobflow
* @return the detail of created plan
*/
public static PlanDetail plan(JobflowProcessor.Context parent, Jobflow jobflow) {
return plan(parent, jobflow, jobflow.getOperatorGraph().copy());
}
/**
* Builds an execution plan for the target operator graph.
* Note that, the target operator graph will be modified in this invocation.
* @param parent the current jobflow processing context
* @param jobflow the target jobflow information
* @param operators the target operator graph
* @return the detail of created plan
*/
public static PlanDetail plan(JobflowProcessor.Context parent, JobflowInfo jobflow, OperatorGraph operators) {
PlanningContext context = createContext(parent, jobflow);
return plan(context, operators);
}
/**
* Creates a new planner context.
* @param parent the current jobflow processing context
* @param jobflow the target jobflow information
* @return the created context
*/
public static PlanningContext createContext(JobflowProcessor.Context parent, JobflowInfo jobflow) {
Set<Option> options = getPlanningOptions(parent.getOptions());
return createContext(parent, jobflow, options);
}
/**
* Creates a new planner context.
* @param parent the current jobflow processing context
* @param jobflow the target jobflow information
* @param options the planning options
* @return the created context
*/
public static PlanningContext createContext(
JobflowProcessor.Context parent,
JobflowInfo jobflow,
Collection<PlanningContext.Option> options) {
PlanningContext context = new PlanningContext(
new OptimizerContextAdapter(parent, jobflow.getFlowId(), DagOptimizerToolkit.INSTANCE),
options);
return context;
}
private static Set<Option> getPlanningOptions(CompilerOptions options) {
Set<Option> results = EnumSet.noneOf(Option.class);
for (Option option : Option.values()) {
if (isEnabled(options, option)) {
results.add(option);
}
}
return results;
}
private static boolean isEnabled(CompilerOptions options, Option option) {
String key = KEY_OPTION_PREFIX + option.getSymbol();
boolean enabled = options.get(key, option.isDefaultEnabled());
if (LOG.isTraceEnabled()) {
LOG.trace("{}={}", key, options.get(key, null)); //$NON-NLS-1$
}
LOG.debug("planning option: {}={}", option, enabled); //$NON-NLS-1$
return enabled;
}
/**
* Builds an execution plan for the target operator graph.
* Note that, the target operator graph will be modified in this invocation.
* @param context the current context
* @param operators the target operator graph
* @return the detail of created plan
*/
public static PlanDetail plan(PlanningContext context, OperatorGraph operators) {
prepareOperatorGraph(context, operators);
PlanDetail result = createPlan(context, operators);
return result;
}
/**
* Makes the target operator graph suitable for execution planning.
* @param context the current context
* @param graph the target operator graph
*/
static void prepareOperatorGraph(PlanningContext context, OperatorGraph graph) {
Planning.normalize(graph);
optimize(context, graph);
fixOperatorGraph(context, graph);
insertPlanMarkers(context, graph);
Planning.simplifyTerminators(graph);
validate(graph);
}
private static void validate(OperatorGraph graph) {
Graph<Operator> dependencies = Planning.toDependencyGraph(graph);
Set<Set<Operator>> circuits = Graphs.findCircuit(dependencies);
if (circuits.isEmpty() == false) {
List<Diagnostic> diagnostics = new ArrayList<>();
for (Set<Operator> loop : circuits) {
diagnostics.add(new BasicDiagnostic(Diagnostic.Level.ERROR, MessageFormat.format(
"operator graph must be acyclic: {0}",
loop)).with(graph));
}
throw new DiagnosticException(diagnostics);
}
}
private static void fixOperatorGraph(PlanningContext context, OperatorGraph graph) {
Map<Operator, OperatorClass> characteristics = OperatorCharacterizers.apply(
context.getOptimizerContext(),
context.getEstimator(),
context.getClassifier(),
graph.getOperators(false));
graph.getOperators(false).stream()
.filter(it -> it.getOperatorKind() == OperatorKind.USER)
.sorted(Planning.OPERATOR_ORDER)
.map(characteristics::get)
.filter(it -> isFixTarget(it))
.forEach(info -> {
LOG.debug("fixing operator: {}", info.getOperator());
Operator replacement = fixOperator(info);
Operators.replace(info.getOperator(), replacement);
graph.add(replacement);
graph.remove(info.getOperator());
});
graph.rebuild();
}
private static boolean isFixTarget(OperatorClass info) {
for (OperatorInput port : info.getOperator().getInputs()) {
InputUnit adjust = computeInputUnit(info, port);
if (adjust != port.getInputUnit()) {
return true;
}
}
return false;
}
private static InputUnit computeInputUnit(OperatorClass info, OperatorInput port) {
if (info.getSecondaryInputs().contains(port)) {
return InputUnit.WHOLE;
} else if (info.getPrimaryInputType() == InputType.RECORD) {
return InputUnit.RECORD;
} else if (info.getPrimaryInputType() == InputType.GROUP) {
return InputUnit.GROUP;
} else {
return port.getInputUnit(); // don't care
}
}
private static Operator fixOperator(OperatorClass info) {
UserOperator operator = (UserOperator) info.getOperator();
UserOperator.Builder builder = UserOperator
.builder(operator.getAnnotation(), operator.getMethod(), operator.getImplementationClass());
for (OperatorProperty property : operator.getProperties()) {
switch (property.getPropertyKind()) {
case INPUT: {
OperatorInput port = (OperatorInput) property;
builder.input(port, c -> c.unit(computeInputUnit(info, port)));
break;
}
case OUTPUT:
builder.output((OperatorOutput) property);
break;
case ARGUMENT:
builder.argument((OperatorArgument) property);
break;
default:
throw new AssertionError(property);
}
}
operator.getAttributeEntries().forEach(builder::attribute);
builder.constraint(operator.getConstraints());
return builder.build();
}
private static void optimize(PlanningContext context, OperatorGraph graph) {
int step = 0;
boolean changed;
do {
step++;
LOG.debug("optimize step#{}", step);
Planning.removeDeadFlow(graph);
if (step > REDUCTION_STEP_LIMIT) {
LOG.warn(MessageFormat.format(
"the number of optimization steps exceeded limit: {0}",
REDUCTION_STEP_LIMIT));
break;
}
OperatorGraph.Snapshot before = graph.getSnapshot();
OperatorRewriters.apply(
context.getOptimizerContext(),
context.getEstimator(),
context.getRewriter(),
graph);
OperatorGraph.Snapshot after = graph.getSnapshot();
changed = before.equals(after) == false;
} while (changed);
}
static void insertPlanMarkers(PlanningContext context, OperatorGraph graph) {
rewriteCheckpointOperators(graph);
Map<Operator, OperatorClass> characteristics = OperatorCharacterizers.apply(
context.getOptimizerContext(),
context.getEstimator(),
context.getClassifier(),
graph.getOperators(false));
for (OperatorClass info : characteristics.values()) {
insertPlanMarkerForPreparingGroup(info);
insertPlanMarkerForPreparingBroadcast(info);
if (context.getOptions().contains(Option.CHECKPOINT_AFTER_EXTERNAL_INPUTS)) {
insertPlanMarkerForEnsuringExternalInput(info);
}
insertPlanMarkerForPreparingExternalOutput(info);
}
if (context.getOptions().contains(Option.REMOVE_CYCLIC_BROADCASTS)) {
removeCyclicBroadcast(graph);
}
graph.rebuild();
}
private static void rewriteCheckpointOperators(OperatorGraph graph) {
for (Operator operator : graph.getOperators(true)) {
if (operator.getOperatorKind() != OperatorKind.CORE
|| ((CoreOperator) operator).getCoreOperatorKind() != CoreOperatorKind.CHECKPOINT) {
continue;
}
PlanMarkers.insert(PlanMarker.CHECKPOINT, operator.getInput(0));
Operators.remove(operator);
graph.remove(operator);
}
}
private static void insertPlanMarkerForPreparingGroup(OperatorClass info) {
if (info.getPrimaryInputType() != OperatorClass.InputType.GROUP) {
return;
}
for (OperatorInput port : info.getPrimaryInputs()) {
if (isEmpty(port) == false) {
assert port.getInputUnit() == InputUnit.GROUP;
boolean aggregate = info.getAttributes(port).contains(OperatorClass.InputAttribute.AGGREATE);
EdgeInfo edge = new EdgeInfo(
port.getDataType(),
port.getGroup(),
aggregate ? info.getOperator() : null);
Operators.insert(MarkerOperator.builder(port.getDataType())
.attribute(PlanMarker.class, PlanMarker.GATHER)
.attribute(EdgeInfo.class, edge)
.build(), port);
}
}
}
private static void insertPlanMarkerForPreparingBroadcast(OperatorClass info) {
for (OperatorInput port : info.getSecondaryInputs()) {
if (isEmpty(port) == false) {
assert port.getInputUnit() == InputUnit.WHOLE;
EdgeInfo edge = new EdgeInfo(port.getDataType(), port.getGroup(), null);
Operators.insert(MarkerOperator.builder(port.getDataType())
.attribute(PlanMarker.class, PlanMarker.BROADCAST)
.attribute(EdgeInfo.class, edge)
.build(), port);
}
}
}
private static boolean isEmpty(OperatorInput port) {
for (Operator upstream : Operators.getPredecessors(Collections.singleton(port))) {
PlanMarker marker = PlanMarkers.get(upstream);
if (marker != PlanMarker.BEGIN) {
return false;
}
}
return true;
}
private static void insertPlanMarkerForEnsuringExternalInput(OperatorClass info) {
if (info.getOperator().getOperatorKind() != OperatorKind.INPUT) {
return;
}
ExternalInput input = (ExternalInput) info.getOperator();
PlanMarkers.insert(PlanMarker.CHECKPOINT, input.getOperatorPort());
}
private static void insertPlanMarkerForPreparingExternalOutput(OperatorClass info) {
if (info.getOperator().getOperatorKind() != OperatorKind.OUTPUT) {
return;
}
ExternalOutput output = (ExternalOutput) info.getOperator();
PlanMarkers.insert(PlanMarker.CHECKPOINT, output.getOperatorPort());
}
private static void removeCyclicBroadcast(OperatorGraph graph) {
int step = 0;
while (true) {
step++;
if (step > REDUCTION_STEP_LIMIT) {
LOG.warn(MessageFormat.format(
"removing cyclic broadcast step was exceeded: {0}",
REDUCTION_STEP_LIMIT));
break;
}
graph.rebuild();
MarkerOperator target = Planning.findPotentiallyCyclicBroadcast(graph.getOperators(false));
if (target == null) {
break;
}
List<OperatorInput> targets = Operators.getSuccessors(target).stream()
.flatMap(it -> it.getInputs().stream())
.filter(it -> Operators.getPredecessors(Collections.singleton(it)).stream()
.noneMatch(PlanMarkers::exists))
.collect(Collectors.toList());
Invariants.require(targets.isEmpty() == false);
LOG.debug("resolving cyclic broadcast dependencies: {}", targets);
targets.forEach(it -> PlanMarkers.insert(PlanMarker.CHECKPOINT, it));
}
}
static PlanDetail createPlan(PlanningContext context, OperatorGraph normalized) {
PlanDetail primitive = createPrimitivePlan(context, normalized);
validate(primitive);
PlanDetail unified = unifySubPlans(context, primitive);
validate(unified);
SubPlanAnalyzer analyzer = SubPlanAnalyzer.newInstance(context, unified, normalized);
decoratePlan(context, unified.getPlan(), analyzer);
return unified;
}
private static void validate(PlanDetail detail) {
Graph<SubPlan> dependencies = Planning.toDependencyGraph(detail.getPlan());
Set<Set<SubPlan>> circuits = Graphs.findCircuit(dependencies);
if (circuits.isEmpty() == false) {
List<Diagnostic> diagnostics = new ArrayList<>();
for (Set<SubPlan> loop : circuits) {
diagnostics.add(new BasicDiagnostic(Diagnostic.Level.ERROR, MessageFormat.format(
"plan must be acyclic: {0}",
loop)).with(dependencies));
}
throw new DiagnosticException(diagnostics);
}
}
private static PlanDetail createPrimitivePlan(PlanningContext context, OperatorGraph graph) {
PlanDetail primitive = Planning.createPrimitivePlan(graph);
return primitive;
}
private static PlanDetail unifySubPlans(PlanningContext context, PlanDetail primitive) {
PlanAssembler assembler = Planning.startAssemblePlan(primitive)
.withTrivialOutputElimination(true)
.withRedundantOutputElimination(true)
.withDuplicateCheckpointElimination(true)
.withUnionPushDown(true)
.withSortResult(true);
OperatorEquivalence equivalence = PlanAssembler.DEFAULT_EQUIVALENCE;
if (context.getOptions().contains(Option.UNIFY_SUBPLAN_IO)) {
equivalence = new CustomEquivalence();
}
assembler.withCustomEquivalence(equivalence);
Collection<SubPlanGroup> groups = classify(primitive);
for (SubPlanGroup group : groups) {
assembler.add(group.elements);
}
PlanDetail plan = assembler.build();
return plan;
}
private static Collection<SubPlanGroup> classify(PlanDetail primitive) {
List<SubPlanGroup> groups = new ArrayList<>();
for (SubPlan subplan : primitive.getPlan().getElements()) {
groups.add(SubPlanGroup.of(primitive, subplan));
}
groups = combineGroups(groups);
Graph<SubPlan> dependencies = Planning.toDependencyGraph(primitive.getPlan());
groups = splitGroups(dependencies, groups);
return groups;
}
private static List<SubPlanGroup> combineGroups(List<SubPlanGroup> groups) {
Map<Set<Operator>, SubPlanGroup> map = new LinkedHashMap<>();
for (SubPlanGroup group : groups) {
SubPlanGroup buddy = map.get(group.commonSources);
if (buddy == null) {
map.put(group.commonSources, group);
} else {
buddy.elements.addAll(group.elements);
}
}
return new ArrayList<>(map.values());
}
private static List<SubPlanGroup> splitGroups(Graph<SubPlan> dependencies, List<SubPlanGroup> groups) {
List<SubPlanGroup> results = new ArrayList<>(groups);
List<SubPlanGroup> purged = new ArrayList<>();
while (true) {
for (SubPlanGroup group : results) {
SubPlanGroup g = splitGroup(dependencies, group);
if (g != null) {
purged.add(g);
}
}
if (purged.isEmpty()) {
break;
} else {
results.addAll(purged);
purged.clear();
}
}
return results;
}
private static SubPlanGroup splitGroup(Graph<SubPlan> dependencies, SubPlanGroup group) {
assert group.elements.isEmpty() == false;
if (group.elements.size() <= 1) {
return null;
}
Set<SubPlan> blockers = computeBlockers(dependencies, group);
List<SubPlan> purged = new ArrayList<>();
for (Iterator<SubPlan> iter = group.elements.iterator(); iter.hasNext();) {
SubPlan element = iter.next();
if (blockers.contains(element)) {
purged.add(element);
iter.remove();
}
}
assert group.elements.isEmpty() == false;
if (purged.isEmpty()) {
return null;
}
SubPlanGroup result = new SubPlanGroup(group.commonSources);
result.elements.addAll(purged);
return result;
}
private static Set<SubPlan> computeBlockers(Graph<SubPlan> dependencies, SubPlanGroup group) {
Set<SubPlan> saw = new HashSet<>();
Deque<SubPlan> work = new ArrayDeque<>(group.elements);
while (work.isEmpty() == false) {
SubPlan first = work.removeFirst();
for (SubPlan blocker : dependencies.getConnected(first)) {
if (saw.contains(blocker)) {
continue;
}
saw.add(blocker);
work.add(blocker);
}
}
return saw;
}
private static void decoratePlan(PlanningContext context, Plan plan, SubPlanAnalyzer analyzer) {
attachCoreInfo(plan, analyzer);
if (context.getOptions().contains(Option.GRAPH_STATISTICS)) {
attachGraphStatistics(plan);
}
}
private static void attachCoreInfo(Plan plan, SubPlanAnalyzer analyzer) {
for (SubPlan sub : plan.getElements()) {
VertexSpec info = analyzer.analyze(sub);
assert info.getOrigin() == sub;
sub.putAttribute(VertexSpec.class, info);
}
for (SubPlan sub : plan.getElements()) {
for (SubPlan.Input input : sub.getInputs()) {
InputSpec info = analyzer.analyze(input);
assert info.getOrigin() == input;
input.putAttribute(InputSpec.class, info);
}
for (SubPlan.Output output : sub.getOutputs()) {
OutputSpec info = analyzer.analyze(output);
assert info.getOrigin() == output;
output.putAttribute(OutputSpec.class, info);
}
}
}
private static void attachGraphStatistics(Plan plan) {
plan.putAttribute(GraphStatistics.class, GraphStatistics.of(Planning.toDependencyGraph(plan)));
for (SubPlan sub : plan.getElements()) {
sub.putAttribute(GraphStatistics.class, GraphStatistics.of(Planning.toDependencyGraph(sub)));
}
}
private static class SubPlanGroup {
final Set<Operator> commonSources;
final List<SubPlan> elements = new LinkedList<>();
SubPlanGroup(Set<Operator> commonSources) {
this.commonSources = Collections.unmodifiableSet(commonSources);
}
public static SubPlanGroup of(PlanDetail detail, SubPlan source) {
Set<Operator> sources = new LinkedHashSet<>();
for (SubPlan.Input input : source.getInputs()) {
PlanMarker marker = PlanMarkers.get(input.getOperator());
if (marker != PlanMarker.BROADCAST) {
sources.add(detail.getSource(input.getOperator()));
}
}
assert sources.isEmpty() == false;
SubPlanGroup group = new SubPlanGroup(sources);
group.elements.add(source);
return group;
}
}
private static class CustomEquivalence implements OperatorEquivalence {
CustomEquivalence() {
return;
}
@Override
public Object extract(SubPlan owner, Operator operator) {
SubPlan.Input input = owner.findInput(operator);
if (input != null) {
return extract(input);
}
SubPlan.Output output = owner.findOutput(operator);
if (output != null) {
return extract(output);
}
return PlanAssembler.DEFAULT_EQUIVALENCE.extract(owner, operator);
}
private static Object extract(SubPlan.Input port) {
MarkerOperator operator = port.getOperator();
PlanMarker marker = PlanMarkers.get(operator);
assert marker != null;
switch (marker) {
case CHECKPOINT:
return operator.getDataType();
case BROADCAST:
assert operator.getAttribute(EdgeInfo.class) != null;
return operator.getAttribute(EdgeInfo.class);
default:
return PlanAssembler.DEFAULT_EQUIVALENCE.extract(port.getOwner(), operator);
}
}
private static Object extract(SubPlan.Output port) {
MarkerOperator operator = port.getOperator();
PlanMarker marker = PlanMarkers.get(operator);
assert marker != null;
switch (marker) {
case CHECKPOINT:
return findOppositeExternalOutput(port)
.orElseGet(operator::getDataType);
case BROADCAST:
return operator.getDataType();
case GATHER:
assert operator.getAttribute(EdgeInfo.class) != null;
return operator.getAttribute(EdgeInfo.class);
default:
return PlanAssembler.DEFAULT_EQUIVALENCE.extract(port.getOwner(), operator);
}
}
private static Optional<Object> findOppositeExternalOutput(SubPlan.Output port) {
ExternalOutput found = null;
for (SubPlan.Input opposite : port.getOpposites()) {
for (OperatorInput input : opposite.getOperator().getOutput().getOpposites()) {
if (found == null && input.getOwner().getOperatorKind() == OperatorKind.OUTPUT) {
found = (ExternalOutput) input.getOwner();
} else {
return Optionals.empty();
}
}
}
return Optionals.of(found)
.filter(ExternalOutput::isExternal)
.map(ExternalOutput::getInfo)
.map(ExternalOutputInfo::getContents)
.filter(it -> it != null)
.map(Function.identity());
}
}
private static class EdgeInfo {
private final TypeDescription type;
private final Group partition;
private final Long aggregation;
EdgeInfo(TypeDescription type, Group partition, Operator aggregation) {
assert type != null;
this.type = type;
this.partition = partition;
this.aggregation = aggregation == null ? null : aggregation.getOriginalSerialNumber();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Objects.hashCode(type);
result = prime * result + Objects.hashCode(partition);
result = prime * result + Objects.hashCode(aggregation);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
EdgeInfo other = (EdgeInfo) obj;
if (!Objects.equals(type, other.type)) {
return false;
}
if (!Objects.equals(partition, other.partition)) {
return false;
}
if (!Objects.equals(aggregation, other.aggregation)) {
return false;
}
return true;
}
@Override
public String toString() {
return MessageFormat.format(
"EdgeInfo(group={0}, aggregation={1})", //$NON-NLS-1$
partition,
aggregation != null);
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/services/keyword_plan_service.proto
package com.google.ads.googleads.v9.services;
/**
* <pre>
* Forecast metrics.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.services.ForecastMetrics}
*/
public final class ForecastMetrics extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v9.services.ForecastMetrics)
ForecastMetricsOrBuilder {
private static final long serialVersionUID = 0L;
// Use ForecastMetrics.newBuilder() to construct.
private ForecastMetrics(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ForecastMetrics() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ForecastMetrics();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ForecastMetrics(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 57: {
bitField0_ |= 0x00000001;
impressions_ = input.readDouble();
break;
}
case 65: {
bitField0_ |= 0x00000002;
ctr_ = input.readDouble();
break;
}
case 72: {
bitField0_ |= 0x00000004;
averageCpc_ = input.readInt64();
break;
}
case 81: {
bitField0_ |= 0x00000008;
clicks_ = input.readDouble();
break;
}
case 88: {
bitField0_ |= 0x00000010;
costMicros_ = input.readInt64();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.services.KeywordPlanServiceProto.internal_static_google_ads_googleads_v9_services_ForecastMetrics_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.services.KeywordPlanServiceProto.internal_static_google_ads_googleads_v9_services_ForecastMetrics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.services.ForecastMetrics.class, com.google.ads.googleads.v9.services.ForecastMetrics.Builder.class);
}
private int bitField0_;
public static final int IMPRESSIONS_FIELD_NUMBER = 7;
private double impressions_;
/**
* <pre>
* Impressions
* </pre>
*
* <code>optional double impressions = 7;</code>
* @return Whether the impressions field is set.
*/
@java.lang.Override
public boolean hasImpressions() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Impressions
* </pre>
*
* <code>optional double impressions = 7;</code>
* @return The impressions.
*/
@java.lang.Override
public double getImpressions() {
return impressions_;
}
public static final int CTR_FIELD_NUMBER = 8;
private double ctr_;
/**
* <pre>
* Ctr
* </pre>
*
* <code>optional double ctr = 8;</code>
* @return Whether the ctr field is set.
*/
@java.lang.Override
public boolean hasCtr() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Ctr
* </pre>
*
* <code>optional double ctr = 8;</code>
* @return The ctr.
*/
@java.lang.Override
public double getCtr() {
return ctr_;
}
public static final int AVERAGE_CPC_FIELD_NUMBER = 9;
private long averageCpc_;
/**
* <pre>
* AVG cpc
* </pre>
*
* <code>optional int64 average_cpc = 9;</code>
* @return Whether the averageCpc field is set.
*/
@java.lang.Override
public boolean hasAverageCpc() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* AVG cpc
* </pre>
*
* <code>optional int64 average_cpc = 9;</code>
* @return The averageCpc.
*/
@java.lang.Override
public long getAverageCpc() {
return averageCpc_;
}
public static final int CLICKS_FIELD_NUMBER = 10;
private double clicks_;
/**
* <pre>
* Clicks
* </pre>
*
* <code>optional double clicks = 10;</code>
* @return Whether the clicks field is set.
*/
@java.lang.Override
public boolean hasClicks() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* <pre>
* Clicks
* </pre>
*
* <code>optional double clicks = 10;</code>
* @return The clicks.
*/
@java.lang.Override
public double getClicks() {
return clicks_;
}
public static final int COST_MICROS_FIELD_NUMBER = 11;
private long costMicros_;
/**
* <pre>
* Cost
* </pre>
*
* <code>optional int64 cost_micros = 11;</code>
* @return Whether the costMicros field is set.
*/
@java.lang.Override
public boolean hasCostMicros() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* <pre>
* Cost
* </pre>
*
* <code>optional int64 cost_micros = 11;</code>
* @return The costMicros.
*/
@java.lang.Override
public long getCostMicros() {
return costMicros_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeDouble(7, impressions_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeDouble(8, ctr_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeInt64(9, averageCpc_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeDouble(10, clicks_);
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeInt64(11, costMicros_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(7, impressions_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(8, ctr_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(9, averageCpc_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(10, clicks_);
}
if (((bitField0_ & 0x00000010) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(11, costMicros_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v9.services.ForecastMetrics)) {
return super.equals(obj);
}
com.google.ads.googleads.v9.services.ForecastMetrics other = (com.google.ads.googleads.v9.services.ForecastMetrics) obj;
if (hasImpressions() != other.hasImpressions()) return false;
if (hasImpressions()) {
if (java.lang.Double.doubleToLongBits(getImpressions())
!= java.lang.Double.doubleToLongBits(
other.getImpressions())) return false;
}
if (hasCtr() != other.hasCtr()) return false;
if (hasCtr()) {
if (java.lang.Double.doubleToLongBits(getCtr())
!= java.lang.Double.doubleToLongBits(
other.getCtr())) return false;
}
if (hasAverageCpc() != other.hasAverageCpc()) return false;
if (hasAverageCpc()) {
if (getAverageCpc()
!= other.getAverageCpc()) return false;
}
if (hasClicks() != other.hasClicks()) return false;
if (hasClicks()) {
if (java.lang.Double.doubleToLongBits(getClicks())
!= java.lang.Double.doubleToLongBits(
other.getClicks())) return false;
}
if (hasCostMicros() != other.hasCostMicros()) return false;
if (hasCostMicros()) {
if (getCostMicros()
!= other.getCostMicros()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasImpressions()) {
hash = (37 * hash) + IMPRESSIONS_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getImpressions()));
}
if (hasCtr()) {
hash = (37 * hash) + CTR_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getCtr()));
}
if (hasAverageCpc()) {
hash = (37 * hash) + AVERAGE_CPC_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getAverageCpc());
}
if (hasClicks()) {
hash = (37 * hash) + CLICKS_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getClicks()));
}
if (hasCostMicros()) {
hash = (37 * hash) + COST_MICROS_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getCostMicros());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v9.services.ForecastMetrics parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.ForecastMetrics parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.ForecastMetrics parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.ForecastMetrics parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.ForecastMetrics parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.ForecastMetrics parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.ForecastMetrics parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.ForecastMetrics parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.ForecastMetrics parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.ForecastMetrics parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.ForecastMetrics parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.ForecastMetrics parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v9.services.ForecastMetrics prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Forecast metrics.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.services.ForecastMetrics}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.services.ForecastMetrics)
com.google.ads.googleads.v9.services.ForecastMetricsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.services.KeywordPlanServiceProto.internal_static_google_ads_googleads_v9_services_ForecastMetrics_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.services.KeywordPlanServiceProto.internal_static_google_ads_googleads_v9_services_ForecastMetrics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.services.ForecastMetrics.class, com.google.ads.googleads.v9.services.ForecastMetrics.Builder.class);
}
// Construct using com.google.ads.googleads.v9.services.ForecastMetrics.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
impressions_ = 0D;
bitField0_ = (bitField0_ & ~0x00000001);
ctr_ = 0D;
bitField0_ = (bitField0_ & ~0x00000002);
averageCpc_ = 0L;
bitField0_ = (bitField0_ & ~0x00000004);
clicks_ = 0D;
bitField0_ = (bitField0_ & ~0x00000008);
costMicros_ = 0L;
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v9.services.KeywordPlanServiceProto.internal_static_google_ads_googleads_v9_services_ForecastMetrics_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.ForecastMetrics getDefaultInstanceForType() {
return com.google.ads.googleads.v9.services.ForecastMetrics.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v9.services.ForecastMetrics build() {
com.google.ads.googleads.v9.services.ForecastMetrics result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.ForecastMetrics buildPartial() {
com.google.ads.googleads.v9.services.ForecastMetrics result = new com.google.ads.googleads.v9.services.ForecastMetrics(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.impressions_ = impressions_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.ctr_ = ctr_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.averageCpc_ = averageCpc_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.clicks_ = clicks_;
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.costMicros_ = costMicros_;
to_bitField0_ |= 0x00000010;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v9.services.ForecastMetrics) {
return mergeFrom((com.google.ads.googleads.v9.services.ForecastMetrics)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v9.services.ForecastMetrics other) {
if (other == com.google.ads.googleads.v9.services.ForecastMetrics.getDefaultInstance()) return this;
if (other.hasImpressions()) {
setImpressions(other.getImpressions());
}
if (other.hasCtr()) {
setCtr(other.getCtr());
}
if (other.hasAverageCpc()) {
setAverageCpc(other.getAverageCpc());
}
if (other.hasClicks()) {
setClicks(other.getClicks());
}
if (other.hasCostMicros()) {
setCostMicros(other.getCostMicros());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v9.services.ForecastMetrics parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v9.services.ForecastMetrics) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private double impressions_ ;
/**
* <pre>
* Impressions
* </pre>
*
* <code>optional double impressions = 7;</code>
* @return Whether the impressions field is set.
*/
@java.lang.Override
public boolean hasImpressions() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Impressions
* </pre>
*
* <code>optional double impressions = 7;</code>
* @return The impressions.
*/
@java.lang.Override
public double getImpressions() {
return impressions_;
}
/**
* <pre>
* Impressions
* </pre>
*
* <code>optional double impressions = 7;</code>
* @param value The impressions to set.
* @return This builder for chaining.
*/
public Builder setImpressions(double value) {
bitField0_ |= 0x00000001;
impressions_ = value;
onChanged();
return this;
}
/**
* <pre>
* Impressions
* </pre>
*
* <code>optional double impressions = 7;</code>
* @return This builder for chaining.
*/
public Builder clearImpressions() {
bitField0_ = (bitField0_ & ~0x00000001);
impressions_ = 0D;
onChanged();
return this;
}
private double ctr_ ;
/**
* <pre>
* Ctr
* </pre>
*
* <code>optional double ctr = 8;</code>
* @return Whether the ctr field is set.
*/
@java.lang.Override
public boolean hasCtr() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Ctr
* </pre>
*
* <code>optional double ctr = 8;</code>
* @return The ctr.
*/
@java.lang.Override
public double getCtr() {
return ctr_;
}
/**
* <pre>
* Ctr
* </pre>
*
* <code>optional double ctr = 8;</code>
* @param value The ctr to set.
* @return This builder for chaining.
*/
public Builder setCtr(double value) {
bitField0_ |= 0x00000002;
ctr_ = value;
onChanged();
return this;
}
/**
* <pre>
* Ctr
* </pre>
*
* <code>optional double ctr = 8;</code>
* @return This builder for chaining.
*/
public Builder clearCtr() {
bitField0_ = (bitField0_ & ~0x00000002);
ctr_ = 0D;
onChanged();
return this;
}
private long averageCpc_ ;
/**
* <pre>
* AVG cpc
* </pre>
*
* <code>optional int64 average_cpc = 9;</code>
* @return Whether the averageCpc field is set.
*/
@java.lang.Override
public boolean hasAverageCpc() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* AVG cpc
* </pre>
*
* <code>optional int64 average_cpc = 9;</code>
* @return The averageCpc.
*/
@java.lang.Override
public long getAverageCpc() {
return averageCpc_;
}
/**
* <pre>
* AVG cpc
* </pre>
*
* <code>optional int64 average_cpc = 9;</code>
* @param value The averageCpc to set.
* @return This builder for chaining.
*/
public Builder setAverageCpc(long value) {
bitField0_ |= 0x00000004;
averageCpc_ = value;
onChanged();
return this;
}
/**
* <pre>
* AVG cpc
* </pre>
*
* <code>optional int64 average_cpc = 9;</code>
* @return This builder for chaining.
*/
public Builder clearAverageCpc() {
bitField0_ = (bitField0_ & ~0x00000004);
averageCpc_ = 0L;
onChanged();
return this;
}
private double clicks_ ;
/**
* <pre>
* Clicks
* </pre>
*
* <code>optional double clicks = 10;</code>
* @return Whether the clicks field is set.
*/
@java.lang.Override
public boolean hasClicks() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* <pre>
* Clicks
* </pre>
*
* <code>optional double clicks = 10;</code>
* @return The clicks.
*/
@java.lang.Override
public double getClicks() {
return clicks_;
}
/**
* <pre>
* Clicks
* </pre>
*
* <code>optional double clicks = 10;</code>
* @param value The clicks to set.
* @return This builder for chaining.
*/
public Builder setClicks(double value) {
bitField0_ |= 0x00000008;
clicks_ = value;
onChanged();
return this;
}
/**
* <pre>
* Clicks
* </pre>
*
* <code>optional double clicks = 10;</code>
* @return This builder for chaining.
*/
public Builder clearClicks() {
bitField0_ = (bitField0_ & ~0x00000008);
clicks_ = 0D;
onChanged();
return this;
}
private long costMicros_ ;
/**
* <pre>
* Cost
* </pre>
*
* <code>optional int64 cost_micros = 11;</code>
* @return Whether the costMicros field is set.
*/
@java.lang.Override
public boolean hasCostMicros() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* <pre>
* Cost
* </pre>
*
* <code>optional int64 cost_micros = 11;</code>
* @return The costMicros.
*/
@java.lang.Override
public long getCostMicros() {
return costMicros_;
}
/**
* <pre>
* Cost
* </pre>
*
* <code>optional int64 cost_micros = 11;</code>
* @param value The costMicros to set.
* @return This builder for chaining.
*/
public Builder setCostMicros(long value) {
bitField0_ |= 0x00000010;
costMicros_ = value;
onChanged();
return this;
}
/**
* <pre>
* Cost
* </pre>
*
* <code>optional int64 cost_micros = 11;</code>
* @return This builder for chaining.
*/
public Builder clearCostMicros() {
bitField0_ = (bitField0_ & ~0x00000010);
costMicros_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.services.ForecastMetrics)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v9.services.ForecastMetrics)
private static final com.google.ads.googleads.v9.services.ForecastMetrics DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v9.services.ForecastMetrics();
}
public static com.google.ads.googleads.v9.services.ForecastMetrics getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ForecastMetrics>
PARSER = new com.google.protobuf.AbstractParser<ForecastMetrics>() {
@java.lang.Override
public ForecastMetrics parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ForecastMetrics(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ForecastMetrics> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ForecastMetrics> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.ForecastMetrics getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.cluster;
import java.io.Externalizable;
import java.io.File;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.ObjectStreamException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteCluster;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cluster.ClusterGroup;
import org.apache.ignite.cluster.ClusterGroupEmptyException;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.cluster.ClusterStartNodeResult;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteComponentType;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.util.future.GridCompoundFuture;
import org.apache.ignite.internal.util.future.GridFinishedFuture;
import org.apache.ignite.internal.util.future.IgniteFutureImpl;
import org.apache.ignite.internal.util.nodestart.IgniteRemoteStartSpecification;
import org.apache.ignite.internal.util.nodestart.IgniteSshHelper;
import org.apache.ignite.internal.util.nodestart.StartNodeCallable;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.typedef.CI1;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.A;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.lang.IgnitePredicate;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.internal.IgniteNodeAttributes.ATTR_IPS;
import static org.apache.ignite.internal.IgniteNodeAttributes.ATTR_MACS;
import static org.apache.ignite.internal.util.nodestart.IgniteNodeStartUtils.parseFile;
import static org.apache.ignite.internal.util.nodestart.IgniteNodeStartUtils.specifications;
/**
*
*/
public class IgniteClusterImpl extends ClusterGroupAdapter implements IgniteClusterEx, Externalizable {
/** */
private static final long serialVersionUID = 0L;
/** */
private IgniteConfiguration cfg;
/** Node local store. */
@GridToStringExclude
private ConcurrentMap nodeLoc;
/** Client reconnect future. */
private IgniteFuture<?> reconnecFut;
/**
* Required by {@link Externalizable}.
*/
public IgniteClusterImpl() {
// No-op.
}
/**
* @param ctx Kernal context.
*/
public IgniteClusterImpl(GridKernalContext ctx) {
super(ctx, null, (IgnitePredicate<ClusterNode>)null);
cfg = ctx.config();
nodeLoc = new ClusterNodeLocalMapImpl(ctx);
}
/** {@inheritDoc} */
@Override public ClusterGroup forLocal() {
guard();
try {
return new ClusterGroupAdapter(ctx, null, Collections.singleton(cfg.getNodeId()));
}
finally {
unguard();
}
}
/** {@inheritDoc} */
@Override public ClusterNode localNode() {
guard();
try {
ClusterNode node = ctx.discovery().localNode();
assert node != null;
return node;
}
finally {
unguard();
}
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public <K, V> ConcurrentMap<K, V> nodeLocalMap() {
return nodeLoc;
}
/** {@inheritDoc} */
@Override public boolean pingNode(UUID nodeId) {
A.notNull(nodeId, "nodeId");
guard();
try {
return ctx.discovery().pingNode(nodeId);
}
catch (IgniteCheckedException e) {
throw U.convertException(e);
}
finally {
unguard();
}
}
/** {@inheritDoc} */
@Override public long topologyVersion() {
guard();
try {
return ctx.discovery().topologyVersion();
}
finally {
unguard();
}
}
/** {@inheritDoc} */
@Override public Collection<ClusterNode> topology(long topVer) throws UnsupportedOperationException {
guard();
try {
return ctx.discovery().topology(topVer);
}
finally {
unguard();
}
}
/** {@inheritDoc} */
@Override public <K> Map<ClusterNode, Collection<K>> mapKeysToNodes(@Nullable String cacheName,
@Nullable Collection<? extends K> keys)
throws IgniteException
{
if (F.isEmpty(keys))
return Collections.emptyMap();
guard();
try {
return ctx.affinity().mapKeysToNodes(cacheName, keys);
}
catch (IgniteCheckedException e) {
throw U.convertException(e);
}
finally {
unguard();
}
}
/** {@inheritDoc} */
@Override public <K> ClusterNode mapKeyToNode(@Nullable String cacheName, K key) throws IgniteException {
A.notNull(key, "key");
guard();
try {
return ctx.affinity().mapKeyToNode(cacheName, key);
}
catch (IgniteCheckedException e) {
throw U.convertException(e);
}
finally {
unguard();
}
}
/** {@inheritDoc} */
@Override public Collection<ClusterStartNodeResult> startNodes(File file,
boolean restart,
int timeout,
int maxConn)
throws IgniteException
{
try {
return startNodesAsync0(file, restart, timeout, maxConn).get();
}
catch (IgniteCheckedException e) {
throw U.convertException(e);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Collection<ClusterStartNodeResult>> startNodesAsync(File file, boolean restart,
int timeout, int maxConn) throws IgniteException {
return new IgniteFutureImpl<>(startNodesAsync0(file, restart, timeout, maxConn));
}
/** {@inheritDoc} */
@Override public Collection<ClusterStartNodeResult> startNodes(Collection<Map<String, Object>> hosts,
@Nullable Map<String, Object> dflts,
boolean restart,
int timeout,
int maxConn)
throws IgniteException
{
try {
return startNodesAsync0(hosts, dflts, restart, timeout, maxConn).get();
}
catch (IgniteCheckedException e) {
throw U.convertException(e);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Collection<ClusterStartNodeResult>> startNodesAsync(
Collection<Map<String, Object>> hosts, @Nullable Map<String, Object> dflts,
boolean restart, int timeout, int maxConn) throws IgniteException {
return new IgniteFutureImpl<>(startNodesAsync0(hosts, dflts, restart, timeout, maxConn));
}
/** {@inheritDoc} */
@Override public void stopNodes() throws IgniteException {
guard();
try {
compute().execute(IgniteKillTask.class, false);
}
finally {
unguard();
}
}
/** {@inheritDoc} */
@Override public void stopNodes(Collection<UUID> ids) throws IgniteException {
guard();
try {
ctx.grid().compute(forNodeIds(ids)).execute(IgniteKillTask.class, false);
}
finally {
unguard();
}
}
/** {@inheritDoc} */
@Override public void restartNodes() throws IgniteException {
guard();
try {
compute().execute(IgniteKillTask.class, true);
}
finally {
unguard();
}
}
/** {@inheritDoc} */
@Override public void restartNodes(Collection<UUID> ids) throws IgniteException {
guard();
try {
ctx.grid().compute(forNodeIds(ids)).execute(IgniteKillTask.class, true);
}
finally {
unguard();
}
}
/** {@inheritDoc} */
@Override public void resetMetrics() {
guard();
try {
ctx.jobMetric().reset();
ctx.io().resetMetrics();
ctx.task().resetMetrics();
}
finally {
unguard();
}
}
/** {@inheritDoc} */
@Override public IgniteCluster withAsync() {
return new IgniteClusterAsyncImpl(this);
}
/** {@inheritDoc} */
@Override public boolean isAsync() {
return false;
}
/** {@inheritDoc} */
@Override public <R> IgniteFuture<R> future() {
throw new IllegalStateException("Asynchronous mode is not enabled.");
}
/**
* @param file Configuration file.
* @param restart Whether to stop existing nodes.
* @param timeout Connection timeout.
* @param maxConn Number of parallel SSH connections to one host.
* @return Future with results.
* @see IgniteCluster#startNodes(java.io.File, boolean, int, int)
*/
IgniteInternalFuture<Collection<ClusterStartNodeResult>> startNodesAsync0(File file,
boolean restart,
int timeout,
int maxConn)
{
A.notNull(file, "file");
A.ensure(file.exists(), "file doesn't exist.");
A.ensure(file.isFile(), "file is a directory.");
try {
IgniteBiTuple<Collection<Map<String, Object>>, Map<String, Object>> t = parseFile(file);
return startNodesAsync0(t.get1(), t.get2(), restart, timeout, maxConn);
}
catch (IgniteCheckedException e) {
return new GridFinishedFuture<>(e);
}
}
/**
* @param hosts Startup parameters.
* @param dflts Default values.
* @param restart Whether to stop existing nodes
* @param timeout Connection timeout in milliseconds.
* @param maxConn Number of parallel SSH connections to one host.
* @return Future with results.
* @see IgniteCluster#startNodes(java.util.Collection, java.util.Map, boolean, int, int)
*/
IgniteInternalFuture<Collection<ClusterStartNodeResult>> startNodesAsync0(
Collection<Map<String, Object>> hosts,
@Nullable Map<String, Object> dflts,
boolean restart,
int timeout,
int maxConn)
{
A.notNull(hosts, "hosts");
guard();
try {
IgniteSshHelper sshHelper = IgniteComponentType.SSH.create(false);
Map<String, Collection<IgniteRemoteStartSpecification>> specsMap = specifications(hosts, dflts);
Map<String, ConcurrentLinkedQueue<StartNodeCallable>> runMap = new HashMap<>();
int nodeCallCnt = 0;
for (String host : specsMap.keySet()) {
InetAddress addr;
try {
addr = InetAddress.getByName(host);
}
catch (UnknownHostException e) {
throw new IgniteCheckedException("Invalid host name: " + host, e);
}
Collection<? extends ClusterNode> neighbors = null;
if (addr.isLoopbackAddress())
neighbors = neighbors();
else {
for (Collection<ClusterNode> p : U.neighborhood(nodes()).values()) {
ClusterNode node = F.first(p);
if (node.<String>attribute(ATTR_IPS).contains(addr.getHostAddress())) {
neighbors = p;
break;
}
}
}
int startIdx = 1;
if (neighbors != null) {
if (restart && !neighbors.isEmpty()) {
try {
ctx.grid().compute(forNodes(neighbors)).execute(IgniteKillTask.class, false);
}
catch (ClusterGroupEmptyException ignored) {
// No-op, nothing to restart.
}
}
else
startIdx = neighbors.size() + 1;
}
ConcurrentLinkedQueue<StartNodeCallable> nodeRuns = new ConcurrentLinkedQueue<>();
runMap.put(host, nodeRuns);
for (IgniteRemoteStartSpecification spec : specsMap.get(host)) {
assert spec.host().equals(host);
for (int i = startIdx; i <= spec.nodes(); i++) {
nodeRuns.add(sshHelper.nodeStartCallable(spec, timeout));
nodeCallCnt++;
}
}
}
// If there is nothing to start, return finished future with empty result.
if (nodeCallCnt == 0)
return new GridFinishedFuture<Collection<ClusterStartNodeResult>>(
Collections.<ClusterStartNodeResult>emptyList());
// Exceeding max line width for readability.
GridCompoundFuture<ClusterStartNodeResult, Collection<ClusterStartNodeResult>> fut =
new GridCompoundFuture<>(CU.<ClusterStartNodeResult>objectsReducer());
AtomicInteger cnt = new AtomicInteger(nodeCallCnt);
// Limit maximum simultaneous connection number per host.
for (ConcurrentLinkedQueue<StartNodeCallable> queue : runMap.values()) {
for (int i = 0; i < maxConn; i++) {
if (!runNextNodeCallable(queue, fut, cnt))
break;
}
}
return fut;
}
catch (IgniteCheckedException e) {
return new GridFinishedFuture<>(e);
}
finally {
unguard();
}
}
/**
* Gets the all grid nodes that reside on the same physical computer as local grid node.
* Local grid node is excluded.
* <p>
* Detection of the same physical computer is based on comparing set of network interface MACs.
* If two nodes have the same set of MACs, Ignite considers these nodes running on the same
* physical computer.
* @return Grid nodes that reside on the same physical computer as local grid node.
*/
private Collection<ClusterNode> neighbors() {
Collection<ClusterNode> neighbors = new ArrayList<>(1);
String macs = localNode().attribute(ATTR_MACS);
assert macs != null;
for (ClusterNode n : forOthers(localNode()).nodes()) {
if (macs.equals(n.attribute(ATTR_MACS)))
neighbors.add(n);
}
return neighbors;
}
/**
* Runs next callable from host node start queue.
*
* @param queue Queue of tasks to poll from.
* @param comp Compound future that comprise all started node tasks.
* @param cnt Atomic counter to check if all futures are added to compound future.
* @return {@code True} if task was started, {@code false} if queue was empty.
*/
private boolean runNextNodeCallable(final ConcurrentLinkedQueue<StartNodeCallable> queue,
final GridCompoundFuture<ClusterStartNodeResult, Collection<ClusterStartNodeResult>>
comp,
final AtomicInteger cnt)
{
StartNodeCallable call = queue.poll();
if (call == null)
return false;
IgniteInternalFuture<ClusterStartNodeResult> fut = ctx.closure().callLocalSafe(call, true);
comp.add(fut);
if (cnt.decrementAndGet() == 0)
comp.markInitialized();
fut.listen(new CI1<IgniteInternalFuture<ClusterStartNodeResult>>() {
@Override public void apply(IgniteInternalFuture<ClusterStartNodeResult> f) {
runNextNodeCallable(queue, comp, cnt);
}
});
return true;
}
/**
* Clears node local map.
*/
public void clearNodeMap() {
nodeLoc.clear();
}
/**
* @param reconnecFut Reconnect future.
*/
public void clientReconnectFuture(IgniteFuture<?> reconnecFut) {
this.reconnecFut = reconnecFut;
}
/** {@inheritDoc} */
@Nullable @Override public IgniteFuture<?> clientReconnectFuture() {
return reconnecFut;
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
ctx = (GridKernalContext)in.readObject();
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject(ctx);
}
/** {@inheritDoc} */
@Override protected Object readResolve() throws ObjectStreamException {
return ctx.grid().cluster();
}
/** {@inheritDoc} */
public String toString() {
return "IgniteCluster [igniteInstanceName=" + ctx.igniteInstanceName() + ']';
}
}
| |
package seedu.stask.model;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import edu.emory.mathcs.backport.java.util.Collections;
import javafx.collections.ObservableList;
import seedu.stask.model.tag.Tag;
import seedu.stask.model.tag.UniqueTagList;
import seedu.stask.model.task.ReadOnlyTask;
import seedu.stask.model.task.Task;
import seedu.stask.model.task.UniqueTaskList;
/**
* Wraps all data at the address-book level
* Duplicates are not allowed (by .equals comparison)
*/
public class TaskBook implements ReadOnlyTaskBook {
private final UniqueTaskList datedTasks;
private final UniqueTaskList undatedTasks;
private final UniqueTagList tags;
public static enum TaskType {DATED, UNDATED};
{
datedTasks = new UniqueTaskList();
undatedTasks = new UniqueTaskList();
tags = new UniqueTagList();
}
public TaskBook() {}
/**
* Persons and Tags are copied into this taskbook
*/
public TaskBook(ReadOnlyTaskBook toBeCopied) {
this(toBeCopied.getUniqueDatedTaskList(), toBeCopied.getUniqueUndatedTaskList(), toBeCopied.getUniqueTagList());
}
/**
* Persons and Tags are copied into this taskbook
*/
public TaskBook(UniqueTaskList persons, UniqueTaskList undatedTasks, UniqueTagList tags) {
resetData(persons.getInternalList(), undatedTasks.getInternalList(), tags.getInternalList());
}
public static ReadOnlyTaskBook getEmptyAddressBook() {
return new TaskBook();
}
//// list overwrite operations
public ObservableList<Task> getDatedTasks() {
sortDatedTaskLists();
return datedTasks.getInternalList();
}
public ObservableList<Task> getUndatedTasks() {
sortUndatedTaskLists();
return undatedTasks.getInternalList();
}
private void sortUndatedTaskLists(){
undatedTasks.sort(Task.Comparators.NAME);
}
private void sortDatedTaskLists(){
datedTasks.sort(Task.Comparators.DATE);
}
public void setDatedTasks(List<Task> datedTasks) {
this.datedTasks.getInternalList().setAll(datedTasks);
}
public void setUndatedTasks(List<Task> undatedTasks) {
this.undatedTasks.getInternalList().setAll(undatedTasks);
}
public void setTags(Collection<Tag> tags) {
this.tags.getInternalList().setAll(tags);
}
public void resetData(Collection<? extends ReadOnlyTask> newTasks,
Collection<? extends ReadOnlyTask> newUndatedTasks,
Collection<Tag> newTags) {
setDatedTasks(newTasks.stream().map(Task::new).collect(Collectors.toList()));
setUndatedTasks(newUndatedTasks.stream().map(Task::new).collect(Collectors.toList()));
setTags(newTags);
}
public void resetData(ReadOnlyTaskBook newData) {
resetData(newData.getDatedTaskList(), newData.getUndatedTaskList(), newData.getTagList());
}
//// task-level operations
/**
* Adds a task to the task book.
* Also checks the new task's tags and updates {@link #tags} with any new tags found,
* and updates the Tag objects in the task to point to those in {@link #tags}.
*
* @throws UniqueTaskList.DuplicateTaskException if an equivalent task already exists.
*/
public int addTask(Task p) {
syncTagsWithMasterList(p);
if (checkIfDated(p)){
return datedTasks.add(p);
}
else{
return undatedTasks.add(p);
}
}
private boolean checkIfDated(ReadOnlyTask d){
if(d.getDatetime().toString().equals("")){
return false;
}
else{
return true;
}
}
/**
* Ensures that every tag in this task:
* - exists in the master list {@link #tags}
* - points to a Tag object in the master list
*/
private void syncTagsWithMasterList(Task task) {
final UniqueTagList taskTags = task.getTags();
tags.mergeFrom(taskTags);
// Create map with values = tag object references in the master list
// used for checking person tag references
final Map<Tag, Tag> masterTagObjects = new HashMap<>();
tags.forEach(tag -> masterTagObjects.put(tag, tag));
// Rebuild the list of person tags to point to the relevant tags in the master tag list.
final Set<Tag> correctTagReferences = new HashSet<>();
taskTags.forEach(tag -> correctTagReferences.add(masterTagObjects.get(tag)));
task.setTags(new UniqueTagList(correctTagReferences));
}
public boolean removeTask(ReadOnlyTask key) throws UniqueTaskList.TaskNotFoundException {
if (datedTasks.contains(key)) {
datedTasks.remove(key);
return true;
}
else if (undatedTasks.contains(key)){
undatedTasks.remove(key);
return true;
}
else {
throw new UniqueTaskList.TaskNotFoundException();
}
}
public boolean completeTask(ReadOnlyTask key) throws UniqueTaskList.TaskNotFoundException {
if (datedTasks.contains(key)) {
datedTasks.complete(key);
return true;
}
else if (undatedTasks.contains(key)){
undatedTasks.complete(key);
return true;
}
else {
throw new UniqueTaskList.TaskNotFoundException();
}
}
public boolean uncompleteTask(ReadOnlyTask key) throws UniqueTaskList.TaskNotFoundException {
if (datedTasks.contains(key)) {
datedTasks.uncomplete(key);
return true;
}
else if (undatedTasks.contains(key)){
undatedTasks.uncomplete(key);
return true;
}
else {
throw new UniqueTaskList.TaskNotFoundException();
}
}
//@@author A0139024M
/**
* Set deadline task as overdue
* @param target
* @return
* @throws UniqueTaskList.TaskNotFoundException
*/
public boolean setTaskOverdue(Task target) throws UniqueTaskList.TaskNotFoundException {
if (datedTasks.contains(target)) {
return datedTasks.overdue(target);
}
else {
throw new UniqueTaskList.TaskNotFoundException();
}
}
/**
* Reset the status of dated Task to None
* @param target
* @return
* @throws UniqueTaskList.TaskNotFoundException
*/
public boolean postponeTask(Task target) throws UniqueTaskList.TaskNotFoundException {
if (datedTasks.contains(target)) {
return datedTasks.postponed(target);
}
else {
throw new UniqueTaskList.TaskNotFoundException();
}
}
/**
* Set Event task as expired
* @param target
* @return
* @throws UniqueTaskList.TaskNotFoundException
*/
public boolean setExpire(Task target) throws UniqueTaskList.TaskNotFoundException {
if (datedTasks.contains(target)) {
return datedTasks.expire(target);
}
else {
throw new UniqueTaskList.TaskNotFoundException();
}
}
/**
* Clear all unwanted status (Expire/Overdue) status on floating/undated task
* @param undatedTarget
* @return
* @throws UniqueTaskList.TaskNotFoundException
*/
public boolean resetFloatingTaskStatus(Task undatedTarget) throws UniqueTaskList.TaskNotFoundException {
if (undatedTasks.contains(undatedTarget)) {
return undatedTasks.postponed(undatedTarget);
}
else {
throw new UniqueTaskList.TaskNotFoundException();
}
}
//@@author
//// tag-level operations
public void addTag(Tag t) throws UniqueTagList.DuplicateTagException {
tags.add(t);
}
//// util methods
@Override
public String toString() {
return datedTasks.getInternalList().size() + " datedTasks, " +
undatedTasks.getInternalList().size() + " undatedTasks, "+
tags.getInternalList().size() + " tags";
// TODO: refine later
}
//this gets called when ModelManager.indicateTaskBookChanged()
@Override
public List<ReadOnlyTask> getDatedTaskList() {
sortDatedTaskLists();
return Collections.unmodifiableList(datedTasks.getInternalList());
}
//this also gets called when ModelManager.indicateTaskBookChanged()
@Override
public List<ReadOnlyTask> getUndatedTaskList() {
sortUndatedTaskLists();
return Collections.unmodifiableList(undatedTasks.getInternalList());
}
@Override
public List<Tag> getTagList() {
return Collections.unmodifiableList(tags.getInternalList());
}
@Override
public UniqueTaskList getUniqueDatedTaskList() {
sortDatedTaskLists();
return this.datedTasks;
}
@Override
public UniqueTaskList getUniqueUndatedTaskList() {
sortUndatedTaskLists();
return this.undatedTasks;
}
@Override
public UniqueTagList getUniqueTagList() {
return this.tags;
}
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof TaskBook // instanceof handles nulls
&& this.datedTasks.equals(((TaskBook) other).datedTasks)
&& this.undatedTasks.equals(((TaskBook) other).undatedTasks)
&& this.tags.equals(((TaskBook) other).tags));
}
@Override
public int hashCode() {
// use this method for custom fields hashing instead of implementing your own
return Objects.hash(datedTasks, undatedTasks, tags);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.maven;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.camel.catalog.CamelCatalog;
import org.apache.camel.catalog.DefaultCamelCatalog;
import org.apache.camel.catalog.EndpointValidationResult;
import org.apache.camel.catalog.LanguageValidationResult;
import org.apache.camel.catalog.lucene.LuceneSuggestionStrategy;
import org.apache.camel.catalog.maven.MavenVersionManager;
import org.apache.camel.parser.RouteBuilderParser;
import org.apache.camel.parser.XmlRouteParser;
import org.apache.camel.parser.model.CamelEndpointDetails;
import org.apache.camel.parser.model.CamelRouteDetails;
import org.apache.camel.parser.model.CamelSimpleExpressionDetails;
import org.apache.camel.support.PatternHelper;
import org.apache.camel.util.StringHelper;
import org.apache.maven.model.Dependency;
import org.apache.maven.model.Resource;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import org.codehaus.mojo.exec.AbstractExecMojo;
import org.jboss.forge.roaster.Roaster;
import org.jboss.forge.roaster.model.JavaType;
import org.jboss.forge.roaster.model.source.JavaClassSource;
/**
* Parses the source code and validates the Camel routes has valid endpoint uris and simple expressions.
*/
@Mojo(name = "validate", threadSafe = true)
public class ValidateMojo extends AbstractExecMojo {
/**
* The maven project.
*/
@Parameter(property = "project", required = true, readonly = true)
protected MavenProject project;
/**
* Whether to fail if invalid Camel endpoints was found. By default the plugin logs the errors at WARN level
*
*/
@Parameter(property = "camel.failOnError", defaultValue = "false")
private boolean failOnError;
/**
* Whether to log endpoint URIs which was un-parsable and therefore not possible to validate
*
*/
@Parameter(property = "camel.logUnparseable", defaultValue = "false")
private boolean logUnparseable;
/**
* Whether to include Java files to be validated for invalid Camel endpoints
*
*/
@Parameter(property = "camel.includeJava", defaultValue = "true")
private boolean includeJava;
/**
* Whether to include XML files to be validated for invalid Camel endpoints
*
*/
@Parameter(property = "camel.includeXml", defaultValue = "true")
private boolean includeXml;
/**
* Whether to include test source code
*
*/
@Parameter(property = "camel.includeTest", defaultValue = "false")
private boolean includeTest;
/**
* To filter the names of java and xml files to only include files matching any of the given list of patterns (wildcard and regular expression).
* Multiple values can be separated by comma.
*/
@Parameter(property = "camel.includes")
private String includes;
/**
* To filter the names of java and xml files to exclude files matching any of the given list of patterns (wildcard and regular expression).
* Multiple values can be separated by comma.
*/
@Parameter(property = "camel.excludes")
private String excludes;
/**
* Whether to ignore unknown components
*
*/
@Parameter(property = "camel.ignoreUnknownComponent", defaultValue = "true")
private boolean ignoreUnknownComponent;
/**
* Whether to ignore incapable of parsing the endpoint uri
*
*/
@Parameter(property = "camel.ignoreIncapable", defaultValue = "true")
private boolean ignoreIncapable;
/**
* Whether to ignore deprecated options being used in the endpoint uri
*
*/
@Parameter(property = "camel.ignoreDeprecated", defaultValue = "true")
private boolean ignoreDeprecated;
/**
* Whether to ignore components that uses lenient properties. When this is true, then the uri validation is stricter
* but would fail on properties that are not part of the component but in the uri because of using lenient properties.
* For example using the HTTP components to provide query parameters in the endpoint uri.
*
*/
@Parameter(property = "camel.ignoreLenientProperties", defaultValue = "true")
private boolean ignoreLenientProperties;
/**
* Whether to show all endpoints and simple expressions (both invalid and valid).
*
*/
@Parameter(property = "camel.showAll", defaultValue = "false")
private boolean showAll;
/**
* Whether to allow downloading Camel catalog version from the internet. This is needed if the project
* uses a different Camel version than this plugin is using by default.
*
*/
@Parameter(property = "camel.downloadVersion", defaultValue = "true")
private boolean downloadVersion;
/**
* Whether to validate for duplicate route ids. Route ids should be unique and if there are duplicates
* then Camel will fail to startup.
*
*/
@Parameter(property = "camel.duplicateRouteId", defaultValue = "true")
private boolean duplicateRouteId;
/**
* Whether to validate direct/seda endpoints sending to non existing consumers.
*
*/
@Parameter(property = "camel.directOrSedaPairCheck", defaultValue = "true")
private boolean directOrSedaPairCheck;
// CHECKSTYLE:OFF
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
CamelCatalog catalog = new DefaultCamelCatalog();
// add activemq as known component
catalog.addComponent("activemq", "org.apache.activemq.camel.component.ActiveMQComponent");
// enable did you mean
catalog.setSuggestionStrategy(new LuceneSuggestionStrategy());
// enable loading other catalog versions dynamically
catalog.setVersionManager(new MavenVersionManager());
// enable caching
catalog.enableCache();
String detectedVersion = findCamelVersion(project);
if (detectedVersion != null) {
getLog().info("Detected Camel version used in project: " + detectedVersion);
}
if (downloadVersion) {
String catalogVersion = catalog.getCatalogVersion();
String version = findCamelVersion(project);
if (version != null && !version.equals(catalogVersion)) {
// the project uses a different Camel version so attempt to load it
getLog().info("Downloading Camel version: " + version);
boolean loaded = catalog.loadVersion(version);
if (!loaded) {
getLog().warn("Error downloading Camel version: " + version);
}
}
}
if (catalog.getLoadedVersion() != null) {
getLog().info("Validating using downloaded Camel version: " + catalog.getLoadedVersion());
} else {
getLog().info("Validating using Camel version: " + catalog.getCatalogVersion());
}
List<CamelEndpointDetails> endpoints = new ArrayList<>();
List<CamelSimpleExpressionDetails> simpleExpressions = new ArrayList<>();
List<CamelRouteDetails> routeIds = new ArrayList<>();
Set<File> javaFiles = new LinkedHashSet<>();
Set<File> xmlFiles = new LinkedHashSet<>();
// find all java route builder classes
if (includeJava) {
List list = project.getCompileSourceRoots();
for (Object obj : list) {
String dir = (String) obj;
findJavaFiles(new File(dir), javaFiles);
}
if (includeTest) {
list = project.getTestCompileSourceRoots();
for (Object obj : list) {
String dir = (String) obj;
findJavaFiles(new File(dir), javaFiles);
}
}
}
// find all xml routes
if (includeXml) {
List list = project.getResources();
for (Object obj : list) {
Resource dir = (Resource) obj;
findXmlFiles(new File(dir.getDirectory()), xmlFiles);
}
if (includeTest) {
list = project.getTestResources();
for (Object obj : list) {
Resource dir = (Resource) obj;
findXmlFiles(new File(dir.getDirectory()), xmlFiles);
}
}
}
for (File file : javaFiles) {
if (matchFile(file)) {
try {
List<CamelEndpointDetails> fileEndpoints = new ArrayList<>();
List<CamelRouteDetails> fileRouteIds = new ArrayList<>();
List<CamelSimpleExpressionDetails> fileSimpleExpressions = new ArrayList<>();
List<String> unparsable = new ArrayList<>();
// parse the java source code and find Camel RouteBuilder classes
String fqn = file.getPath();
String baseDir = ".";
JavaType out = Roaster.parse(file);
// we should only parse java classes (not interfaces and enums etc)
if (out instanceof JavaClassSource) {
JavaClassSource clazz = (JavaClassSource) out;
RouteBuilderParser.parseRouteBuilderEndpoints(clazz, baseDir, fqn, fileEndpoints, unparsable, includeTest);
RouteBuilderParser.parseRouteBuilderSimpleExpressions(clazz, baseDir, fqn, fileSimpleExpressions);
if (duplicateRouteId) {
RouteBuilderParser.parseRouteBuilderRouteIds(clazz, baseDir, fqn, fileRouteIds);
}
// add what we found in this file to the total list
endpoints.addAll(fileEndpoints);
simpleExpressions.addAll(fileSimpleExpressions);
routeIds.addAll(fileRouteIds);
// was there any unparsable?
if (logUnparseable && !unparsable.isEmpty()) {
for (String uri : unparsable) {
getLog().warn("Cannot parse endpoint uri " + uri + " in java file " + file);
}
}
}
} catch (Exception e) {
getLog().warn("Error parsing java file " + file + " code due " + e.getMessage(), e);
}
}
}
for (File file : xmlFiles) {
if (matchFile(file)) {
try {
List<CamelEndpointDetails> fileEndpoints = new ArrayList<>();
List<CamelSimpleExpressionDetails> fileSimpleExpressions = new ArrayList<>();
List<CamelRouteDetails> fileRouteIds = new ArrayList<>();
// parse the xml source code and find Camel routes
String fqn = file.getPath();
String baseDir = ".";
InputStream is = new FileInputStream(file);
XmlRouteParser.parseXmlRouteEndpoints(is, baseDir, fqn, fileEndpoints);
is.close();
// need a new stream
is = new FileInputStream(file);
XmlRouteParser.parseXmlRouteSimpleExpressions(is, baseDir, fqn, fileSimpleExpressions);
is.close();
if (duplicateRouteId) {
// need a new stream
is = new FileInputStream(file);
XmlRouteParser.parseXmlRouteRouteIds(is, baseDir, fqn, fileRouteIds);
is.close();
}
// add what we found in this file to the total list
endpoints.addAll(fileEndpoints);
simpleExpressions.addAll(fileSimpleExpressions);
routeIds.addAll(fileRouteIds);
} catch (Exception e) {
getLog().warn("Error parsing xml file " + file + " code due " + e.getMessage(), e);
}
}
}
// endpoint uris
int endpointErrors = 0;
int unknownComponents = 0;
int incapableErrors = 0;
int deprecatedOptions = 0;
for (CamelEndpointDetails detail : endpoints) {
getLog().debug("Validating endpoint: " + detail.getEndpointUri());
EndpointValidationResult result = catalog.validateEndpointProperties(detail.getEndpointUri(), ignoreLenientProperties);
int deprecated = result.getDeprecated() != null ? result.getDeprecated().size() : 0;
deprecatedOptions += deprecated;
boolean ok = result.isSuccess();
if (!ok && ignoreUnknownComponent && result.getUnknownComponent() != null) {
// if we failed due unknown component then be okay if we should ignore that
unknownComponents++;
ok = true;
}
if (!ok && ignoreIncapable && result.getIncapable() != null) {
// if we failed due incapable then be okay if we should ignore that
incapableErrors++;
ok = true;
}
if (ok && !ignoreDeprecated && deprecated > 0) {
ok = false;
}
if (!ok) {
if (result.getUnknownComponent() != null) {
unknownComponents++;
} else if (result.getIncapable() != null) {
incapableErrors++;
} else {
endpointErrors++;
}
StringBuilder sb = new StringBuilder();
sb.append("Endpoint validation error at: ");
if (detail.getClassName() != null && detail.getLineNumber() != null) {
// this is from java code
sb.append(detail.getClassName());
if (detail.getMethodName() != null) {
sb.append(".").append(detail.getMethodName());
}
sb.append("(").append(asSimpleClassName(detail.getClassName())).append(".java:");
sb.append(detail.getLineNumber()).append(")");
} else if (detail.getLineNumber() != null) {
// this is from xml
String fqn = stripRootPath(asRelativeFile(detail.getFileName()));
if (fqn.endsWith(".xml")) {
fqn = fqn.substring(0, fqn.length() - 4);
fqn = asPackageName(fqn);
}
sb.append(fqn);
sb.append("(").append(asSimpleClassName(fqn)).append(".xml:");
sb.append(detail.getLineNumber()).append(")");
} else {
sb.append(detail.getFileName());
}
sb.append("\n\n");
String out = result.summaryErrorMessage(false, ignoreDeprecated);
sb.append(out);
sb.append("\n\n");
getLog().warn(sb.toString());
} else if (showAll) {
StringBuilder sb = new StringBuilder();
sb.append("Endpoint validation passsed at: ");
if (detail.getClassName() != null && detail.getLineNumber() != null) {
// this is from java code
sb.append(detail.getClassName());
if (detail.getMethodName() != null) {
sb.append(".").append(detail.getMethodName());
}
sb.append("(").append(asSimpleClassName(detail.getClassName())).append(".java:");
sb.append(detail.getLineNumber()).append(")");
} else if (detail.getLineNumber() != null) {
// this is from xml
String fqn = stripRootPath(asRelativeFile(detail.getFileName()));
if (fqn.endsWith(".xml")) {
fqn = fqn.substring(0, fqn.length() - 4);
fqn = asPackageName(fqn);
}
sb.append(fqn);
sb.append("(").append(asSimpleClassName(fqn)).append(".xml:");
sb.append(detail.getLineNumber()).append(")");
} else {
sb.append(detail.getFileName());
}
sb.append("\n");
sb.append("\n\t").append(result.getUri());
sb.append("\n\n");
getLog().info(sb.toString());
}
}
String endpointSummary;
if (endpointErrors == 0) {
int ok = endpoints.size() - endpointErrors - incapableErrors - unknownComponents;
endpointSummary = String.format("Endpoint validation success: (%s = passed, %s = invalid, %s = incapable, %s = unknown components, %s = deprecated options)",
ok, endpointErrors, incapableErrors, unknownComponents, deprecatedOptions);
} else {
int ok = endpoints.size() - endpointErrors - incapableErrors - unknownComponents;
endpointSummary = String.format("Endpoint validation error: (%s = passed, %s = invalid, %s = incapable, %s = unknown components, %s = deprecated options)",
ok, endpointErrors, incapableErrors, unknownComponents, deprecatedOptions);
}
if (endpointErrors > 0) {
getLog().warn(endpointSummary);
} else {
getLog().info(endpointSummary);
}
// simple
int simpleErrors = validateSimple(catalog, simpleExpressions);
String simpleSummary;
if (simpleErrors == 0) {
int ok = simpleExpressions.size() - simpleErrors;
simpleSummary = String.format("Simple validation success: (%s = passed, %s = invalid)", ok, simpleErrors);
} else {
int ok = simpleExpressions.size() - simpleErrors;
simpleSummary = String.format("Simple validation error: (%s = passed, %s = invalid)", ok, simpleErrors);
}
if (simpleErrors > 0) {
getLog().warn(simpleSummary);
} else {
getLog().info(simpleSummary);
}
// endpoint pairs
int sedaDirectErrors = 0;
String sedaDirectSummary = "";
if (directOrSedaPairCheck) {
long sedaDirectEndpoints = countEndpointPairs(endpoints, "direct") + countEndpointPairs(endpoints, "seda");
sedaDirectErrors += validateEndpointPairs(endpoints, "direct") + validateEndpointPairs(endpoints, "seda");
if (sedaDirectErrors == 0) {
sedaDirectSummary = String.format("Endpoint pair (seda/direct) validation success (%s = pairs)", sedaDirectEndpoints);
} else {
sedaDirectSummary = String.format("Endpoint pair (seda/direct) validation error: (%s = pairs, %s = non-pairs)", sedaDirectEndpoints, sedaDirectErrors);
}
if (sedaDirectErrors > 0) {
getLog().warn(sedaDirectSummary);
} else {
getLog().info(sedaDirectSummary);
}
}
// route id
int duplicateRouteIdErrors = validateDuplicateRouteId(routeIds);
String routeIdSummary = "";
if (duplicateRouteId) {
if (duplicateRouteIdErrors == 0) {
routeIdSummary = String.format("Duplicate route id validation success (%s = ids)", routeIds.size());
} else {
routeIdSummary = String.format("Duplicate route id validation error: (%s = ids, %s = duplicates)", routeIds.size(), duplicateRouteIdErrors);
}
if (duplicateRouteIdErrors > 0) {
getLog().warn(routeIdSummary);
} else {
getLog().info(routeIdSummary);
}
}
if (failOnError && (endpointErrors > 0 || simpleErrors > 0 || duplicateRouteIdErrors > 0) || sedaDirectErrors > 0) {
throw new MojoExecutionException(endpointSummary + "\n" + simpleSummary + "\n" + routeIdSummary + "\n" + sedaDirectSummary);
}
}
private int countEndpointPairs(List<CamelEndpointDetails> endpoints, String scheme) {
int pairs = 0;
Set<CamelEndpointDetails> consumers = endpoints.stream().filter(e -> e.isConsumerOnly() && e.getEndpointUri().startsWith(scheme + ":")).collect(Collectors.toSet());
Set<CamelEndpointDetails> producers = endpoints.stream().filter(e -> e.isProducerOnly() && e.getEndpointUri().startsWith(scheme + ":")).collect(Collectors.toSet());
// find all pairs, eg producers that has a consumer (no need to check for opposite)
for (CamelEndpointDetails p : producers) {
boolean any = consumers.stream().anyMatch(c -> matchEndpointPath(p.getEndpointUri(), c.getEndpointUri()));
if (any) {
pairs++;
}
}
return pairs;
}
private int validateEndpointPairs(List<CamelEndpointDetails> endpoints, String scheme) {
int errors = 0;
Set<CamelEndpointDetails> consumers = endpoints.stream().filter(e -> e.isConsumerOnly() && e.getEndpointUri().startsWith(scheme + ":")).collect(Collectors.toSet());
Set<CamelEndpointDetails> producers = endpoints.stream().filter(e -> e.isProducerOnly() && e.getEndpointUri().startsWith(scheme + ":")).collect(Collectors.toSet());
// are there any producers that do not have a consumer pair
for (CamelEndpointDetails detail : producers) {
boolean none = consumers.stream().noneMatch(c -> matchEndpointPath(detail.getEndpointUri(), c.getEndpointUri()));
if (none) {
errors++;
StringBuilder sb = new StringBuilder();
sb.append("Endpoint pair (seda/direct) validation error at: ");
if (detail.getClassName() != null && detail.getLineNumber() != null) {
// this is from java code
sb.append(detail.getClassName());
if (detail.getMethodName() != null) {
sb.append(".").append(detail.getMethodName());
}
sb.append("(").append(asSimpleClassName(detail.getClassName())).append(".java:");
sb.append(detail.getLineNumber()).append(")");
} else if (detail.getLineNumber() != null) {
// this is from xml
String fqn = stripRootPath(asRelativeFile(detail.getFileName()));
if (fqn.endsWith(".xml")) {
fqn = fqn.substring(0, fqn.length() - 4);
fqn = asPackageName(fqn);
}
sb.append(fqn);
sb.append("(").append(asSimpleClassName(fqn)).append(".xml:");
sb.append(detail.getLineNumber()).append(")");
} else {
sb.append(detail.getFileName());
}
sb.append("\n");
sb.append("\n\t").append(detail.getEndpointUri());
sb.append("\n\n\t\t\t\t").append(endpointPathSummaryError(detail));
sb.append("\n\n");
getLog().warn(sb.toString());
} else if (showAll) {
StringBuilder sb = new StringBuilder();
sb.append("Endpoint pair (seda/direct) validation passed at: ");
if (detail.getClassName() != null && detail.getLineNumber() != null) {
// this is from java code
sb.append(detail.getClassName());
if (detail.getMethodName() != null) {
sb.append(".").append(detail.getMethodName());
}
sb.append("(").append(asSimpleClassName(detail.getClassName())).append(".java:");
sb.append(detail.getLineNumber()).append(")");
} else if (detail.getLineNumber() != null) {
// this is from xml
String fqn = stripRootPath(asRelativeFile(detail.getFileName()));
if (fqn.endsWith(".xml")) {
fqn = fqn.substring(0, fqn.length() - 4);
fqn = asPackageName(fqn);
}
sb.append(fqn);
sb.append("(").append(asSimpleClassName(fqn)).append(".xml:");
sb.append(detail.getLineNumber()).append(")");
} else {
sb.append(detail.getFileName());
}
sb.append("\n");
sb.append("\n\t").append(detail.getEndpointUri());
sb.append("\n\n");
getLog().info(sb.toString());
}
}
// NOTE: are there any consumers that do not have a producer pair
// You can have a consumer which you send to from outside a Camel route such as via ProducerTemplate
return errors;
}
private static String endpointPathSummaryError(CamelEndpointDetails detail) {
String uri = detail.getEndpointUri();
String p = uri.contains("?") ? StringHelper.before(uri, "?") : uri;
String path = StringHelper.after(p, ":");
return path + "\t" + "Sending to non existing " + detail.getEndpointComponentName() + " queue name";
}
private static boolean matchEndpointPath(String uri, String uri2) {
String p = uri.contains("?") ? StringHelper.before(uri, "?") : uri;
String p2 = uri2.contains("?") ? StringHelper.before(uri2, "?") : uri2;
p = p.trim();
p2 = p2.trim();
return p.equals(p2);
}
private int validateSimple(CamelCatalog catalog, List<CamelSimpleExpressionDetails> simpleExpressions) {
int simpleErrors = 0;
for (CamelSimpleExpressionDetails detail : simpleExpressions) {
LanguageValidationResult result;
boolean predicate = detail.isPredicate();
if (predicate) {
getLog().debug("Validating simple predicate: " + detail.getSimple());
result = catalog.validateLanguagePredicate(null, "simple", detail.getSimple());
} else {
getLog().debug("Validating simple expression: " + detail.getSimple());
result = catalog.validateLanguagePredicate(null, "simple", detail.getSimple());
}
if (!result.isSuccess()) {
simpleErrors++;
StringBuilder sb = new StringBuilder();
sb.append("Simple validation error at: ");
if (detail.getClassName() != null && detail.getLineNumber() != null) {
// this is from java code
sb.append(detail.getClassName());
if (detail.getMethodName() != null) {
sb.append(".").append(detail.getMethodName());
}
sb.append("(").append(asSimpleClassName(detail.getClassName())).append(".java:");
sb.append(detail.getLineNumber()).append(")");
} else if (detail.getLineNumber() != null) {
// this is from xml
String fqn = stripRootPath(asRelativeFile(detail.getFileName()));
if (fqn.endsWith(".xml")) {
fqn = fqn.substring(0, fqn.length() - 4);
fqn = asPackageName(fqn);
}
sb.append(fqn);
sb.append("(").append(asSimpleClassName(fqn)).append(".xml:");
sb.append(detail.getLineNumber()).append(")");
} else {
sb.append(detail.getFileName());
}
sb.append("\n");
String[] lines = result.getError().split("\n");
for (String line : lines) {
sb.append("\n\t").append(line);
}
sb.append("\n");
getLog().warn(sb.toString());
} else if (showAll) {
StringBuilder sb = new StringBuilder();
sb.append("Simple validation passed at: ");
if (detail.getClassName() != null && detail.getLineNumber() != null) {
// this is from java code
sb.append(detail.getClassName());
if (detail.getMethodName() != null) {
sb.append(".").append(detail.getMethodName());
}
sb.append("(").append(asSimpleClassName(detail.getClassName())).append(".java:");
sb.append(detail.getLineNumber()).append(")");
} else if (detail.getLineNumber() != null) {
// this is from xml
String fqn = stripRootPath(asRelativeFile(detail.getFileName()));
if (fqn.endsWith(".xml")) {
fqn = fqn.substring(0, fqn.length() - 4);
fqn = asPackageName(fqn);
}
sb.append(fqn);
sb.append("(").append(asSimpleClassName(fqn)).append(".xml:");
sb.append(detail.getLineNumber()).append(")");
} else {
sb.append(detail.getFileName());
}
sb.append("\n");
sb.append("\n\t").append(result.getText());
sb.append("\n\n");
getLog().info(sb.toString());
}
}
return simpleErrors;
}
private int validateDuplicateRouteId(List<CamelRouteDetails> routeIds) {
int duplicateRouteIdErrors = 0;
if (duplicateRouteId) {
// filter out all non uniques
for (CamelRouteDetails detail : routeIds) {
// skip empty route ids
if (detail.getRouteId() == null || "".equals(detail.getRouteId())) {
continue;
}
int count = countRouteId(routeIds, detail.getRouteId());
if (count > 1) {
duplicateRouteIdErrors++;
StringBuilder sb = new StringBuilder();
sb.append("Duplicate route id validation error at: ");
if (detail.getClassName() != null && detail.getLineNumber() != null) {
// this is from java code
sb.append(detail.getClassName());
if (detail.getMethodName() != null) {
sb.append(".").append(detail.getMethodName());
}
sb.append("(").append(asSimpleClassName(detail.getClassName())).append(".java:");
sb.append(detail.getLineNumber()).append(")");
} else if (detail.getLineNumber() != null) {
// this is from xml
String fqn = stripRootPath(asRelativeFile(detail.getFileName()));
if (fqn.endsWith(".xml")) {
fqn = fqn.substring(0, fqn.length() - 4);
fqn = asPackageName(fqn);
}
sb.append(fqn);
sb.append("(").append(asSimpleClassName(fqn)).append(".xml:");
sb.append(detail.getLineNumber()).append(")");
} else {
sb.append(detail.getFileName());
}
sb.append("\n");
sb.append("\n\t").append(detail.getRouteId());
sb.append("\n\n");
getLog().warn(sb.toString());
} else if (showAll) {
StringBuilder sb = new StringBuilder();
sb.append("Duplicate route id validation passed at: ");
if (detail.getClassName() != null && detail.getLineNumber() != null) {
// this is from java code
sb.append(detail.getClassName());
if (detail.getMethodName() != null) {
sb.append(".").append(detail.getMethodName());
}
sb.append("(").append(asSimpleClassName(detail.getClassName())).append(".java:");
sb.append(detail.getLineNumber()).append(")");
} else if (detail.getLineNumber() != null) {
// this is from xml
String fqn = stripRootPath(asRelativeFile(detail.getFileName()));
if (fqn.endsWith(".xml")) {
fqn = fqn.substring(0, fqn.length() - 4);
fqn = asPackageName(fqn);
}
sb.append(fqn);
sb.append("(").append(asSimpleClassName(fqn)).append(".xml:");
sb.append(detail.getLineNumber()).append(")");
} else {
sb.append(detail.getFileName());
}
sb.append("\n");
sb.append("\n\t").append(detail.getRouteId());
sb.append("\n\n");
getLog().info(sb.toString());
}
}
}
return duplicateRouteIdErrors;
}
// CHECKSTYLE:ON
private static int countRouteId(List<CamelRouteDetails> details, String routeId) {
int answer = 0;
for (CamelRouteDetails detail : details) {
if (routeId.equals(detail.getRouteId())) {
answer++;
}
}
return answer;
}
private static String findCamelVersion(MavenProject project) {
Dependency candidate = null;
List list = project.getDependencies();
for (Object obj : list) {
Dependency dep = (Dependency) obj;
if ("org.apache.camel".equals(dep.getGroupId())) {
if ("camel-core".equals(dep.getArtifactId())) {
// favor camel-core
candidate = dep;
break;
} else {
candidate = dep;
}
}
}
if (candidate != null) {
return candidate.getVersion();
}
return null;
}
private void findJavaFiles(File dir, Set<File> javaFiles) {
File[] files = dir.isDirectory() ? dir.listFiles() : null;
if (files != null) {
for (File file : files) {
if (file.getName().endsWith(".java")) {
javaFiles.add(file);
} else if (file.isDirectory()) {
findJavaFiles(file, javaFiles);
}
}
}
}
private void findXmlFiles(File dir, Set<File> xmlFiles) {
File[] files = dir.isDirectory() ? dir.listFiles() : null;
if (files != null) {
for (File file : files) {
if (file.getName().endsWith(".xml")) {
xmlFiles.add(file);
} else if (file.isDirectory()) {
findXmlFiles(file, xmlFiles);
}
}
}
}
private boolean matchFile(File file) {
if (excludes == null && includes == null) {
return true;
}
// exclude take precedence
if (excludes != null) {
for (String exclude : excludes.split(",")) {
exclude = exclude.trim();
// try both with and without directory in the name
String fqn = stripRootPath(asRelativeFile(file.getAbsolutePath()));
boolean match = PatternHelper.matchPattern(fqn, exclude) || PatternHelper.matchPattern(file.getName(), exclude);
if (match) {
return false;
}
}
}
// include
if (includes != null) {
for (String include : includes.split(",")) {
include = include.trim();
// try both with and without directory in the name
String fqn = stripRootPath(asRelativeFile(file.getAbsolutePath()));
boolean match = PatternHelper.matchPattern(fqn, include) || PatternHelper.matchPattern(file.getName(), include);
if (match) {
return true;
}
}
// did not match any includes
return false;
}
// was not excluded nor failed include so its accepted
return true;
}
private String asRelativeFile(String name) {
String answer = name;
String base = project.getBasedir().getAbsolutePath();
if (name.startsWith(base)) {
answer = name.substring(base.length());
// skip leading slash for relative path
if (answer.startsWith(File.separator)) {
answer = answer.substring(1);
}
}
return answer;
}
private String stripRootPath(String name) {
// strip out any leading source / resource directory
List list = project.getCompileSourceRoots();
for (Object obj : list) {
String dir = (String) obj;
dir = asRelativeFile(dir);
if (name.startsWith(dir)) {
return name.substring(dir.length() + 1);
}
}
list = project.getTestCompileSourceRoots();
for (Object obj : list) {
String dir = (String) obj;
dir = asRelativeFile(dir);
if (name.startsWith(dir)) {
return name.substring(dir.length() + 1);
}
}
List resources = project.getResources();
for (Object obj : resources) {
Resource resource = (Resource) obj;
String dir = asRelativeFile(resource.getDirectory());
if (name.startsWith(dir)) {
return name.substring(dir.length() + 1);
}
}
resources = project.getTestResources();
for (Object obj : resources) {
Resource resource = (Resource) obj;
String dir = asRelativeFile(resource.getDirectory());
if (name.startsWith(dir)) {
return name.substring(dir.length() + 1);
}
}
return name;
}
private static String asPackageName(String name) {
return name.replace(File.separator, ".");
}
private static String asSimpleClassName(String className) {
int dot = className.lastIndexOf('.');
if (dot > 0) {
return className.substring(dot + 1);
} else {
return className;
}
}
}
| |
// Copyright (C) 2017 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.acceptance;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.ImmutableSet;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.AccountGroup;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.server.account.AccountCache;
import com.google.gerrit.server.account.GroupCache;
import com.google.gerrit.server.account.GroupIncludeCache;
import com.google.gerrit.server.config.AllUsersName;
import com.google.gerrit.server.config.AllUsersNameProvider;
import com.google.gerrit.server.index.account.AccountIndexer;
import com.google.gerrit.server.index.group.GroupIndexer;
import com.google.gerrit.server.project.ProjectCache;
import com.google.gerrit.server.util.time.TimeUtil;
import com.google.gerrit.testing.GerritBaseTests;
import com.google.gerrit.testing.InMemoryRepositoryManager;
import com.google.gerrit.testing.TestTimeUtil;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import org.easymock.EasyMock;
import org.eclipse.jgit.lib.CommitBuilder;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.RefUpdate;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevWalk;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class ProjectResetterTest extends GerritBaseTests {
private InMemoryRepositoryManager repoManager;
private Project.NameKey project;
private Repository repo;
@Before
public void setUp() throws Exception {
repoManager = new InMemoryRepositoryManager();
project = new Project.NameKey("foo");
repo = repoManager.createRepository(project);
}
@Before
public void setTimeForTesting() {
TestTimeUtil.resetWithClockStep(1, TimeUnit.SECONDS);
}
@After
public void resetTime() {
TestTimeUtil.useSystemTime();
}
@Test
public void resetAllRefs() throws Exception {
Ref matchingRef = createRef("refs/any/test");
try (ProjectResetter resetProject =
builder().build(new ProjectResetter.Config().reset(project))) {
updateRef(matchingRef);
}
// The matching refs are reset to the old state.
assertRef(matchingRef);
}
@Test
public void onlyResetMatchingRefs() throws Exception {
Ref matchingRef = createRef("refs/match/test");
Ref anotherMatchingRef = createRef("refs/another-match/test");
Ref nonMatchingRef = createRef("refs/no-match/test");
Ref updatedNonMatchingRef;
try (ProjectResetter resetProject =
builder()
.build(
new ProjectResetter.Config()
.reset(project, "refs/match/*", "refs/another-match/*"))) {
updateRef(matchingRef);
updateRef(anotherMatchingRef);
updatedNonMatchingRef = updateRef(nonMatchingRef);
}
// The matching refs are reset to the old state.
assertRef(matchingRef);
assertRef(anotherMatchingRef);
// The non-matching ref is not reset, hence it still has the updated state.
assertRef(updatedNonMatchingRef);
}
@Test
public void onlyDeleteNewlyCreatedMatchingRefs() throws Exception {
Ref matchingRef;
Ref anotherMatchingRef;
Ref nonMatchingRef;
try (ProjectResetter resetProject =
builder()
.build(
new ProjectResetter.Config()
.reset(project, "refs/match/*", "refs/another-match/*"))) {
matchingRef = createRef("refs/match/test");
anotherMatchingRef = createRef("refs/another-match/test");
nonMatchingRef = createRef("refs/no-match/test");
}
// The matching refs are deleted since they didn't exist before.
assertDeletedRef(matchingRef);
assertDeletedRef(anotherMatchingRef);
// The non-matching ref is not deleted.
assertRef(nonMatchingRef);
}
@Test
public void onlyResetMatchingRefsMultipleProjects() throws Exception {
Project.NameKey project2 = new Project.NameKey("bar");
Repository repo2 = repoManager.createRepository(project2);
Ref matchingRefProject1 = createRef("refs/foo/test");
Ref nonMatchingRefProject1 = createRef("refs/bar/test");
Ref matchingRefProject2 = createRef(repo2, "refs/bar/test");
Ref nonMatchingRefProject2 = createRef(repo2, "refs/foo/test");
Ref updatedNonMatchingRefProject1;
Ref updatedNonMatchingRefProject2;
try (ProjectResetter resetProject =
builder()
.build(
new ProjectResetter.Config()
.reset(project, "refs/foo/*")
.reset(project2, "refs/bar/*"))) {
updateRef(matchingRefProject1);
updatedNonMatchingRefProject1 = updateRef(nonMatchingRefProject1);
updateRef(repo2, matchingRefProject2);
updatedNonMatchingRefProject2 = updateRef(repo2, nonMatchingRefProject2);
}
// The matching refs are reset to the old state.
assertRef(matchingRefProject1);
assertRef(repo2, matchingRefProject2);
// The non-matching refs are not reset, hence they still has the updated states.
assertRef(updatedNonMatchingRefProject1);
assertRef(repo2, updatedNonMatchingRefProject2);
}
@Test
public void onlyDeleteNewlyCreatedMatchingRefsMultipleProjects() throws Exception {
Project.NameKey project2 = new Project.NameKey("bar");
Repository repo2 = repoManager.createRepository(project2);
Ref matchingRefProject1;
Ref nonMatchingRefProject1;
Ref matchingRefProject2;
Ref nonMatchingRefProject2;
try (ProjectResetter resetProject =
builder()
.build(
new ProjectResetter.Config()
.reset(project, "refs/foo/*")
.reset(project2, "refs/bar/*"))) {
matchingRefProject1 = createRef("refs/foo/test");
nonMatchingRefProject1 = createRef("refs/bar/test");
matchingRefProject2 = createRef(repo2, "refs/bar/test");
nonMatchingRefProject2 = createRef(repo2, "refs/foo/test");
}
// The matching refs are deleted since they didn't exist before.
assertDeletedRef(matchingRefProject1);
assertDeletedRef(repo2, matchingRefProject2);
// The non-matching ref is not deleted.
assertRef(nonMatchingRefProject1);
assertRef(repo2, nonMatchingRefProject2);
}
@Test
public void onlyDeleteNewlyCreatedWithOverlappingRefPatterns() throws Exception {
Ref matchingRef;
try (ProjectResetter resetProject =
builder()
.build(
new ProjectResetter.Config().reset(project, "refs/match/*", "refs/match/test"))) {
// This ref matches 2 ref pattern, ProjectResetter should try to delete it only once.
matchingRef = createRef("refs/match/test");
}
// The matching ref is deleted since it didn't exist before.
assertDeletedRef(matchingRef);
}
@Test
public void projectEvictionIfRefsMetaConfigIsReset() throws Exception {
Project.NameKey project2 = new Project.NameKey("bar");
Repository repo2 = repoManager.createRepository(project2);
Ref metaConfig = createRef(repo2, RefNames.REFS_CONFIG);
ProjectCache projectCache = EasyMock.createNiceMock(ProjectCache.class);
projectCache.evict(project2);
EasyMock.expectLastCall();
EasyMock.replay(projectCache);
Ref nonMetaConfig = createRef("refs/heads/master");
try (ProjectResetter resetProject =
builder(null, null, null, null, null, null, projectCache)
.build(new ProjectResetter.Config().reset(project).reset(project2))) {
updateRef(nonMetaConfig);
updateRef(repo2, metaConfig);
}
EasyMock.verify(projectCache);
}
@Test
public void projectEvictionIfRefsMetaConfigIsDeleted() throws Exception {
Project.NameKey project2 = new Project.NameKey("bar");
Repository repo2 = repoManager.createRepository(project2);
ProjectCache projectCache = EasyMock.createNiceMock(ProjectCache.class);
projectCache.evict(project2);
EasyMock.expectLastCall();
EasyMock.replay(projectCache);
try (ProjectResetter resetProject =
builder(null, null, null, null, null, null, projectCache)
.build(new ProjectResetter.Config().reset(project).reset(project2))) {
createRef("refs/heads/master");
createRef(repo2, RefNames.REFS_CONFIG);
}
EasyMock.verify(projectCache);
}
@Test
public void accountEvictionIfUserBranchIsReset() throws Exception {
Account.Id accountId = new Account.Id(1);
Project.NameKey allUsers = new Project.NameKey(AllUsersNameProvider.DEFAULT);
Repository allUsersRepo = repoManager.createRepository(allUsers);
Ref userBranch = createRef(allUsersRepo, RefNames.refsUsers(accountId));
AccountCache accountCache = EasyMock.createNiceMock(AccountCache.class);
accountCache.evict(accountId);
EasyMock.expectLastCall();
EasyMock.replay(accountCache);
AccountIndexer accountIndexer = EasyMock.createNiceMock(AccountIndexer.class);
accountIndexer.index(accountId);
EasyMock.expectLastCall();
EasyMock.replay(accountIndexer);
// Non-user branch because it's not in All-Users.
Ref nonUserBranch = createRef(RefNames.refsUsers(new Account.Id(2)));
try (ProjectResetter resetProject =
builder(null, accountCache, accountIndexer, null, null, null, null)
.build(new ProjectResetter.Config().reset(project).reset(allUsers))) {
updateRef(nonUserBranch);
updateRef(allUsersRepo, userBranch);
}
EasyMock.verify(accountCache, accountIndexer);
}
@Test
public void accountEvictionIfUserBranchIsDeleted() throws Exception {
Account.Id accountId = new Account.Id(1);
Project.NameKey allUsers = new Project.NameKey(AllUsersNameProvider.DEFAULT);
Repository allUsersRepo = repoManager.createRepository(allUsers);
AccountCache accountCache = EasyMock.createNiceMock(AccountCache.class);
accountCache.evict(accountId);
EasyMock.expectLastCall();
EasyMock.replay(accountCache);
AccountIndexer accountIndexer = EasyMock.createNiceMock(AccountIndexer.class);
accountIndexer.index(accountId);
EasyMock.expectLastCall();
EasyMock.replay(accountIndexer);
try (ProjectResetter resetProject =
builder(null, accountCache, accountIndexer, null, null, null, null)
.build(new ProjectResetter.Config().reset(project).reset(allUsers))) {
// Non-user branch because it's not in All-Users.
createRef(RefNames.refsUsers(new Account.Id(2)));
createRef(allUsersRepo, RefNames.refsUsers(accountId));
}
EasyMock.verify(accountCache, accountIndexer);
}
@Test
public void accountEvictionIfExternalIdsBranchIsReset() throws Exception {
Account.Id accountId = new Account.Id(1);
Project.NameKey allUsers = new Project.NameKey(AllUsersNameProvider.DEFAULT);
Repository allUsersRepo = repoManager.createRepository(allUsers);
Ref externalIds = createRef(allUsersRepo, RefNames.REFS_EXTERNAL_IDS);
createRef(allUsersRepo, RefNames.refsUsers(accountId));
Account.Id accountId2 = new Account.Id(2);
AccountCache accountCache = EasyMock.createNiceMock(AccountCache.class);
accountCache.evict(accountId);
EasyMock.expectLastCall();
accountCache.evict(accountId2);
EasyMock.expectLastCall();
EasyMock.replay(accountCache);
AccountIndexer accountIndexer = EasyMock.createNiceMock(AccountIndexer.class);
accountIndexer.index(accountId);
EasyMock.expectLastCall();
accountIndexer.index(accountId2);
EasyMock.expectLastCall();
EasyMock.replay(accountIndexer);
// Non-user branch because it's not in All-Users.
Ref nonUserBranch = createRef(RefNames.refsUsers(new Account.Id(3)));
try (ProjectResetter resetProject =
builder(null, accountCache, accountIndexer, null, null, null, null)
.build(new ProjectResetter.Config().reset(project).reset(allUsers))) {
updateRef(nonUserBranch);
updateRef(allUsersRepo, externalIds);
createRef(allUsersRepo, RefNames.refsUsers(accountId2));
}
EasyMock.verify(accountCache, accountIndexer);
}
@Test
public void accountEvictionIfExternalIdsBranchIsDeleted() throws Exception {
Account.Id accountId = new Account.Id(1);
Project.NameKey allUsers = new Project.NameKey(AllUsersNameProvider.DEFAULT);
Repository allUsersRepo = repoManager.createRepository(allUsers);
createRef(allUsersRepo, RefNames.refsUsers(accountId));
Account.Id accountId2 = new Account.Id(2);
AccountCache accountCache = EasyMock.createNiceMock(AccountCache.class);
accountCache.evict(accountId);
EasyMock.expectLastCall();
accountCache.evict(accountId2);
EasyMock.expectLastCall();
EasyMock.replay(accountCache);
AccountIndexer accountIndexer = EasyMock.createNiceMock(AccountIndexer.class);
accountIndexer.index(accountId);
EasyMock.expectLastCall();
accountIndexer.index(accountId2);
EasyMock.expectLastCall();
EasyMock.replay(accountIndexer);
// Non-user branch because it's not in All-Users.
Ref nonUserBranch = createRef(RefNames.refsUsers(new Account.Id(3)));
try (ProjectResetter resetProject =
builder(null, accountCache, accountIndexer, null, null, null, null)
.build(new ProjectResetter.Config().reset(project).reset(allUsers))) {
updateRef(nonUserBranch);
createRef(allUsersRepo, RefNames.REFS_EXTERNAL_IDS);
createRef(allUsersRepo, RefNames.refsUsers(accountId2));
}
EasyMock.verify(accountCache, accountIndexer);
}
@Test
public void accountEvictionFromAccountCreatorIfUserBranchIsDeleted() throws Exception {
Account.Id accountId = new Account.Id(1);
Project.NameKey allUsers = new Project.NameKey(AllUsersNameProvider.DEFAULT);
Repository allUsersRepo = repoManager.createRepository(allUsers);
AccountCreator accountCreator = EasyMock.createNiceMock(AccountCreator.class);
accountCreator.evict(ImmutableSet.of(accountId));
EasyMock.expectLastCall();
EasyMock.replay(accountCreator);
try (ProjectResetter resetProject =
builder(accountCreator, null, null, null, null, null, null)
.build(new ProjectResetter.Config().reset(project).reset(allUsers))) {
createRef(allUsersRepo, RefNames.refsUsers(accountId));
}
EasyMock.verify(accountCreator);
}
@Test
public void groupEviction() throws Exception {
AccountGroup.UUID uuid1 = new AccountGroup.UUID("abcd1");
AccountGroup.UUID uuid2 = new AccountGroup.UUID("abcd2");
AccountGroup.UUID uuid3 = new AccountGroup.UUID("abcd3");
Project.NameKey allUsers = new Project.NameKey(AllUsersNameProvider.DEFAULT);
Repository allUsersRepo = repoManager.createRepository(allUsers);
GroupCache cache = EasyMock.createNiceMock(GroupCache.class);
GroupIndexer indexer = EasyMock.createNiceMock(GroupIndexer.class);
GroupIncludeCache includeCache = EasyMock.createNiceMock(GroupIncludeCache.class);
cache.evict(uuid2);
indexer.index(uuid2);
includeCache.evictParentGroupsOf(uuid2);
cache.evict(uuid3);
indexer.index(uuid3);
includeCache.evictParentGroupsOf(uuid3);
EasyMock.expectLastCall();
EasyMock.replay(cache, indexer);
createRef(allUsersRepo, RefNames.refsGroups(uuid1));
Ref ref2 = createRef(allUsersRepo, RefNames.refsGroups(uuid2));
try (ProjectResetter resetProject =
builder(null, null, null, cache, includeCache, indexer, null)
.build(new ProjectResetter.Config().reset(project).reset(allUsers))) {
updateRef(allUsersRepo, ref2);
createRef(allUsersRepo, RefNames.refsGroups(uuid3));
}
EasyMock.verify(cache, indexer);
}
private Ref createRef(String ref) throws IOException {
return createRef(repo, ref);
}
private Ref createRef(Repository repo, String ref) throws IOException {
try (ObjectInserter oi = repo.newObjectInserter();
RevWalk rw = new RevWalk(repo)) {
ObjectId emptyCommit = createCommit(repo);
RefUpdate updateRef = repo.updateRef(ref);
updateRef.setExpectedOldObjectId(ObjectId.zeroId());
updateRef.setNewObjectId(emptyCommit);
assertThat(updateRef.update(rw)).isEqualTo(RefUpdate.Result.NEW);
return repo.exactRef(ref);
}
}
private Ref updateRef(Ref ref) throws IOException {
return updateRef(repo, ref);
}
private Ref updateRef(Repository repo, Ref ref) throws IOException {
try (ObjectInserter oi = repo.newObjectInserter();
RevWalk rw = new RevWalk(repo)) {
ObjectId emptyCommit = createCommit(repo);
RefUpdate updateRef = repo.updateRef(ref.getName());
updateRef.setExpectedOldObjectId(ref.getObjectId());
updateRef.setNewObjectId(emptyCommit);
updateRef.setForceUpdate(true);
assertThat(updateRef.update(rw)).isEqualTo(RefUpdate.Result.FORCED);
Ref updatedRef = repo.exactRef(ref.getName());
assertThat(updatedRef.getObjectId()).isNotEqualTo(ref.getObjectId());
return updatedRef;
}
}
private void assertRef(Ref ref) throws IOException {
assertRef(repo, ref);
}
private void assertRef(Repository repo, Ref ref) throws IOException {
assertThat(repo.exactRef(ref.getName()).getObjectId()).isEqualTo(ref.getObjectId());
}
private void assertDeletedRef(Ref ref) throws IOException {
assertDeletedRef(repo, ref);
}
private void assertDeletedRef(Repository repo, Ref ref) throws IOException {
assertThat(repo.exactRef(ref.getName())).isNull();
}
private ObjectId createCommit(Repository repo) throws IOException {
try (ObjectInserter oi = repo.newObjectInserter()) {
PersonIdent ident =
new PersonIdent(new PersonIdent("Foo Bar", "foo.bar@baz.com"), TimeUtil.nowTs());
CommitBuilder cb = new CommitBuilder();
cb.setTreeId(oi.insert(Constants.OBJ_TREE, new byte[] {}));
cb.setCommitter(ident);
cb.setAuthor(ident);
cb.setMessage("Test commit");
ObjectId commit = oi.insert(cb);
oi.flush();
return commit;
}
}
private ProjectResetter.Builder builder() {
return builder(null, null, null, null, null, null, null);
}
private ProjectResetter.Builder builder(
@Nullable AccountCreator accountCreator,
@Nullable AccountCache accountCache,
@Nullable AccountIndexer accountIndexer,
@Nullable GroupCache groupCache,
@Nullable GroupIncludeCache groupIncludeCache,
@Nullable GroupIndexer groupIndexer,
@Nullable ProjectCache projectCache) {
return new ProjectResetter.Builder(
repoManager,
new AllUsersName(AllUsersNameProvider.DEFAULT),
accountCreator,
accountCache,
accountIndexer,
groupCache,
groupIncludeCache,
groupIndexer,
projectCache);
}
}
| |
/*
* Copyright (C) 2010 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.util.concurrent;
import static org.truth0.Truth.ASSERT;
import com.google.common.testing.NullPointerTester;
import junit.framework.TestCase;
import java.lang.Thread.UncaughtExceptionHandler;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
/**
* Tests for ThreadFactoryBuilder.
*
* @author Kurt Alfred Kluever
* @author Martin Buchholz
*/
public class ThreadFactoryBuilderTest extends TestCase {
private final Runnable monitoredRunnable = new Runnable() {
@Override public void run() {
completed = true;
}
};
private static final UncaughtExceptionHandler UNCAUGHT_EXCEPTION_HANDLER =
new UncaughtExceptionHandler() {
@Override public void uncaughtException(Thread t, Throwable e) {
// No-op
}
};
private ThreadFactoryBuilder builder;
private volatile boolean completed = false;
@Override public void setUp() {
builder = new ThreadFactoryBuilder();
}
public void testThreadFactoryBuilder_defaults() throws InterruptedException {
ThreadFactory threadFactory = builder.build();
Thread thread = threadFactory.newThread(monitoredRunnable);
checkThreadPoolName(thread, 1);
Thread defaultThread =
Executors.defaultThreadFactory().newThread(monitoredRunnable);
assertEquals(defaultThread.isDaemon(), thread.isDaemon());
assertEquals(defaultThread.getPriority(), thread.getPriority());
assertSame(defaultThread.getThreadGroup(), thread.getThreadGroup());
assertSame(defaultThread.getUncaughtExceptionHandler(),
thread.getUncaughtExceptionHandler());
assertFalse(completed);
thread.start();
thread.join();
assertTrue(completed);
// Creating a new thread from the same ThreadFactory will have the same
// pool ID but a thread ID of 2.
Thread thread2 = threadFactory.newThread(monitoredRunnable);
checkThreadPoolName(thread2, 2);
assertEquals(
thread.getName().substring(0, thread.getName().lastIndexOf('-')),
thread2.getName().substring(0, thread.getName().lastIndexOf('-')));
// Building again should give us a different pool ID.
ThreadFactory threadFactory2 = builder.build();
Thread thread3 = threadFactory2.newThread(monitoredRunnable);
checkThreadPoolName(thread3, 1);
ASSERT.that(
thread2.getName().substring(0, thread.getName().lastIndexOf('-')))
.isNotEqualTo(
thread3.getName().substring(0, thread.getName().lastIndexOf('-')));
}
private static void checkThreadPoolName(Thread thread, int threadId) {
assertTrue(thread.getName().matches("^pool-\\d+-thread-" + threadId + "$"));
}
public void testNameFormatWithPercentS_custom() {
String format = "super-duper-thread-%s";
ThreadFactory factory = builder.setNameFormat(format).build();
for (int i = 0; i < 11; i++) {
assertEquals(String.format(format, i),
factory.newThread(monitoredRunnable).getName());
}
}
public void testNameFormatWithPercentD_custom() {
String format = "super-duper-thread-%d";
ThreadFactory factory = builder.setNameFormat(format).build();
for (int i = 0; i < 11; i++) {
assertEquals(String.format(format, i),
factory.newThread(monitoredRunnable).getName());
}
}
public void testDaemon_false() {
ThreadFactory factory = builder.setDaemon(false).build();
Thread thread = factory.newThread(monitoredRunnable);
assertFalse(thread.isDaemon());
}
public void testDaemon_true() {
ThreadFactory factory = builder.setDaemon(true).build();
Thread thread = factory.newThread(monitoredRunnable);
assertTrue(thread.isDaemon());
}
public void testPriority_custom() {
for (int i = Thread.MIN_PRIORITY; i <= Thread.MAX_PRIORITY; i++) {
ThreadFactory factory = builder.setPriority(i).build();
Thread thread = factory.newThread(monitoredRunnable);
assertEquals(i, thread.getPriority());
}
}
public void testPriority_tooLow() {
try {
builder.setPriority(Thread.MIN_PRIORITY - 1);
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testPriority_tooHigh() {
try {
builder.setPriority(Thread.MAX_PRIORITY + 1);
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testUncaughtExceptionHandler_custom() {
assertEquals(UNCAUGHT_EXCEPTION_HANDLER,
builder.setUncaughtExceptionHandler(UNCAUGHT_EXCEPTION_HANDLER).build()
.newThread(monitoredRunnable).getUncaughtExceptionHandler());
}
public void testBuildMutateBuild() {
ThreadFactory factory1 = builder.setPriority(1).build();
assertEquals(1, factory1.newThread(monitoredRunnable).getPriority());
ThreadFactory factory2 = builder.setPriority(2).build();
assertEquals(1, factory1.newThread(monitoredRunnable).getPriority());
assertEquals(2, factory2.newThread(monitoredRunnable).getPriority());
}
public void testBuildTwice() {
builder.build(); // this is allowed
builder.build(); // this is *also* allowed
}
public void testBuildMutate() {
ThreadFactory factory1 = builder.setPriority(1).build();
assertEquals(1, factory1.newThread(monitoredRunnable).getPriority());
builder.setPriority(2); // change the state of the builder
assertEquals(1, factory1.newThread(monitoredRunnable).getPriority());
}
public void testThreadFactory() throws InterruptedException {
final String THREAD_NAME = "ludicrous speed";
final int THREAD_PRIORITY = 1;
final boolean THREAD_DAEMON = false;
ThreadFactory backingThreadFactory = new ThreadFactory() {
@Override public Thread newThread(Runnable r) {
Thread thread = new Thread(r);
thread.setName(THREAD_NAME);
thread.setPriority(THREAD_PRIORITY);
thread.setDaemon(THREAD_DAEMON);
thread.setUncaughtExceptionHandler(UNCAUGHT_EXCEPTION_HANDLER);
return thread;
}
};
Thread thread = builder.setThreadFactory(backingThreadFactory).build()
.newThread(monitoredRunnable);
assertEquals(THREAD_NAME, thread.getName());
assertEquals(THREAD_PRIORITY, thread.getPriority());
assertEquals(THREAD_DAEMON, thread.isDaemon());
assertSame(UNCAUGHT_EXCEPTION_HANDLER,
thread.getUncaughtExceptionHandler());
assertSame(Thread.State.NEW, thread.getState());
assertFalse(completed);
thread.start();
thread.join();
assertTrue(completed);
}
public void testNulls() {
NullPointerTester npTester = new NullPointerTester();
npTester.testAllPublicConstructors(ThreadFactoryBuilder.class);
npTester.testAllPublicStaticMethods(ThreadFactoryBuilder.class);
npTester.testAllPublicInstanceMethods(builder);
}
}
| |
/*
* Copyright (c) 2010-2016. Axon Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.axonframework.test.saga;
import org.axonframework.commandhandling.gateway.CommandGatewayFactory;
import org.axonframework.commandhandling.gateway.DefaultCommandGateway;
import org.axonframework.common.ReflectionUtils;
import org.axonframework.eventhandling.EventBus;
import org.axonframework.eventhandling.EventMessage;
import org.axonframework.eventhandling.GenericEventMessage;
import org.axonframework.eventhandling.SimpleEventBus;
import org.axonframework.eventhandling.saga.AnnotatedSagaManager;
import org.axonframework.eventhandling.saga.SagaRepository;
import org.axonframework.eventhandling.saga.repository.AnnotatedSagaRepository;
import org.axonframework.eventhandling.saga.repository.inmemory.InMemorySagaStore;
import org.axonframework.eventsourcing.GenericDomainEventMessage;
import org.axonframework.messaging.unitofwork.DefaultUnitOfWork;
import org.axonframework.test.FixtureExecutionException;
import org.axonframework.test.eventscheduler.StubEventScheduler;
import org.axonframework.test.matchers.FieldFilter;
import org.axonframework.test.matchers.IgnoreField;
import org.axonframework.test.utils.AutowiredResourceInjector;
import org.axonframework.test.utils.CallbackBehavior;
import org.axonframework.test.utils.RecordingCommandBus;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.time.Duration;
import java.time.Instant;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.stream.StreamSupport;
import static java.lang.String.format;
import static org.axonframework.common.ReflectionUtils.fieldsOf;
/**
* Fixture for testing Annotated Sagas based on events and time passing. This fixture allows resources to be configured
* for the sagas to use.
*
* @author Allard Buijze
* @since 1.1
*/
public class SagaTestFixture<T> implements FixtureConfiguration, ContinuedGivenState {
private final StubEventScheduler eventScheduler;
private final AnnotatedSagaManager<T> sagaManager;
private final List<Object> registeredResources = new LinkedList<>();
private final Map<Object, AggregateEventPublisherImpl> aggregatePublishers = new HashMap<>();
private final FixtureExecutionResultImpl<T> fixtureExecutionResult;
private final RecordingCommandBus commandBus;
private final MutableFieldFilter fieldFilters = new MutableFieldFilter();
private boolean transienceCheckEnabled = true;
/**
* Creates an instance of the AnnotatedSagaTestFixture to test sagas of the given {@code sagaType}.
*
* @param sagaType The type of saga under test
*/
@SuppressWarnings({"unchecked"})
public SagaTestFixture(Class<T> sagaType) {
eventScheduler = new StubEventScheduler();
EventBus eventBus = new SimpleEventBus();
InMemorySagaStore sagaStore = new InMemorySagaStore();
SagaRepository<T> sagaRepository = new AnnotatedSagaRepository<>(
sagaType, sagaStore, new TransienceValidatingResourceInjector());
sagaManager = new AnnotatedSagaManager<>(sagaType, sagaRepository);
sagaManager.setSuppressExceptions(false);
registeredResources.add(eventBus);
commandBus = new RecordingCommandBus();
registeredResources.add(commandBus);
registeredResources.add(eventScheduler);
registeredResources.add(new DefaultCommandGateway(commandBus));
fixtureExecutionResult = new FixtureExecutionResultImpl<>(sagaStore, eventScheduler, eventBus, commandBus,
sagaType, fieldFilters);
}
/**
* Handles the given {@code event} in the scope of a Unit of Work. If handling the event results in an exception
* the exception will be wrapped in a {@link FixtureExecutionException}.
*
* @param event The event message to handle
*/
protected void handleInSaga(EventMessage<?> event) {
try {
DefaultUnitOfWork.startAndGet(event).executeWithResult(() -> sagaManager.handle(event));
} catch (Exception e) {
throw new FixtureExecutionException("Exception occurred while handling an event", e);
}
}
@Override
public FixtureConfiguration withTransienceCheckDisabled() {
this.transienceCheckEnabled = false;
return this;
}
@Override
public FixtureExecutionResult whenTimeElapses(Duration elapsedTime) {
try {
fixtureExecutionResult.startRecording();
eventScheduler.advanceTimeBy(elapsedTime, this::handleInSaga);
} catch (Exception e) {
throw new FixtureExecutionException("Exception occurred while trying to advance time " +
"and handle scheduled events", e);
}
return fixtureExecutionResult;
}
@Override
public FixtureExecutionResult whenTimeAdvancesTo(Instant newDateTime) {
try {
fixtureExecutionResult.startRecording();
eventScheduler.advanceTimeTo(newDateTime, this::handleInSaga);
} catch (Exception e) {
throw new FixtureExecutionException("Exception occurred while trying to advance time " +
"and handle scheduled events", e);
}
return fixtureExecutionResult;
}
@Override
public void registerResource(Object resource) {
registeredResources.add(resource);
}
@Override
public void setCallbackBehavior(CallbackBehavior callbackBehavior) {
commandBus.setCallbackBehavior(callbackBehavior);
}
@Override
public GivenAggregateEventPublisher givenAggregate(String aggregateIdentifier) {
return getPublisherFor(aggregateIdentifier);
}
@Override
public ContinuedGivenState givenAPublished(Object event) {
handleInSaga(timeCorrectedEventMessage(event));
return this;
}
@Override
public WhenState givenNoPriorActivity() {
return this;
}
@Override
public GivenAggregateEventPublisher andThenAggregate(String aggregateIdentifier) {
return givenAggregate(aggregateIdentifier);
}
@Override
public ContinuedGivenState andThenTimeElapses(final Duration elapsedTime) throws Exception {
eventScheduler.advanceTimeBy(elapsedTime, this::handleInSaga);
return this;
}
@Override
public ContinuedGivenState andThenTimeAdvancesTo(final Instant newDateTime) throws Exception {
eventScheduler.advanceTimeTo(newDateTime, this::handleInSaga);
return this;
}
@Override
public ContinuedGivenState andThenAPublished(Object event) throws Exception {
handleInSaga(timeCorrectedEventMessage(event));
return this;
}
@Override
public WhenAggregateEventPublisher whenAggregate(String aggregateIdentifier) {
fixtureExecutionResult.startRecording();
return getPublisherFor(aggregateIdentifier);
}
@Override
public FixtureExecutionResult whenPublishingA(Object event) {
fixtureExecutionResult.startRecording();
handleInSaga(timeCorrectedEventMessage(event));
return fixtureExecutionResult;
}
private EventMessage<Object> timeCorrectedEventMessage(Object event) {
EventMessage<?> msg = GenericEventMessage.asEventMessage(event);
return new GenericEventMessage<>(msg.getIdentifier(), msg.getPayload(), msg.getMetaData(), currentTime());
}
@Override
public Instant currentTime() {
return eventScheduler.getCurrentDateTime();
}
@Override
public <I> I registerCommandGateway(Class<I> gatewayInterface) {
return registerCommandGateway(gatewayInterface, null);
}
@Override
public <I> I registerCommandGateway(Class<I> gatewayInterface, final I stubImplementation) {
CommandGatewayFactory factory = new StubAwareCommandGatewayFactory(stubImplementation,
SagaTestFixture.this.commandBus);
final I gateway = factory.createGateway(gatewayInterface);
registerResource(gateway);
return gateway;
}
@Override
public FixtureConfiguration registerFieldFilter(FieldFilter fieldFilter) {
this.fieldFilters.add(fieldFilter);
return this;
}
@Override
public FixtureConfiguration registerIgnoredField(Class<?> declaringClass, String fieldName) {
return registerFieldFilter(new IgnoreField(declaringClass, fieldName));
}
private AggregateEventPublisherImpl getPublisherFor(String aggregateIdentifier) {
if (!aggregatePublishers.containsKey(aggregateIdentifier)) {
aggregatePublishers.put(aggregateIdentifier, new AggregateEventPublisherImpl(aggregateIdentifier));
}
return aggregatePublishers.get(aggregateIdentifier);
}
/**
* CommandGatewayFactory that is aware of a stub implementation that defines the behavior for the callback.
*/
private static class StubAwareCommandGatewayFactory extends CommandGatewayFactory {
private final Object stubImplementation;
public StubAwareCommandGatewayFactory(Object stubImplementation, RecordingCommandBus commandBus) {
super(commandBus);
this.stubImplementation = stubImplementation;
}
@Override
protected <R> InvocationHandler<R> wrapToWaitForResult(final InvocationHandler<CompletableFuture<R>> delegate) {
return new ReturnResultFromStub<>(delegate, stubImplementation);
}
@Override
protected <R> InvocationHandler<R> wrapToReturnWithFixedTimeout(
InvocationHandler<CompletableFuture<R>> delegate,
long timeout, TimeUnit timeUnit) {
return new ReturnResultFromStub<>(delegate, stubImplementation);
}
@Override
protected <R> InvocationHandler<R> wrapToReturnWithTimeoutInArguments(
InvocationHandler<CompletableFuture<R>> delegate,
int timeoutIndex, int timeUnitIndex) {
return new ReturnResultFromStub<>(delegate, stubImplementation);
}
}
/**
* Invocation handler that uses a stub implementation (of not {@code null}) to define the value to return from
* a handler invocation. If none is provided, the returned future is checked for a value. If that future is not
* "done" (for example because no callback behavior was provided), it returns {@code null}.
*
* @param <R> The return type of the method invocation
*/
private static class ReturnResultFromStub<R> implements CommandGatewayFactory.InvocationHandler<R> {
private final CommandGatewayFactory.InvocationHandler<CompletableFuture<R>> dispatcher;
private final Object stubGateway;
public ReturnResultFromStub(CommandGatewayFactory.InvocationHandler<CompletableFuture<R>> dispatcher,
Object stubGateway) {
this.dispatcher = dispatcher;
this.stubGateway = stubGateway;
}
@SuppressWarnings("unchecked")
@Override
public R invoke(Object proxy, Method invokedMethod, Object[] args) throws Exception {
Future<R> future = dispatcher.invoke(proxy, invokedMethod, args);
if (stubGateway != null) {
return (R) invokedMethod.invoke(stubGateway, args);
}
if (future.isDone()) {
return future.get();
}
return null;
}
}
private class AggregateEventPublisherImpl implements GivenAggregateEventPublisher, WhenAggregateEventPublisher {
private final String aggregateIdentifier, type;
private int sequenceNumber = 0;
public AggregateEventPublisherImpl(String aggregateIdentifier) {
this.aggregateIdentifier = aggregateIdentifier;
this.type = "Stub_" + aggregateIdentifier;
}
@Override
public ContinuedGivenState published(Object... events) {
publish(events);
return SagaTestFixture.this;
}
@Override
public FixtureExecutionResult publishes(Object event) {
publish(event);
return fixtureExecutionResult;
}
private void publish(Object... events) {
for (Object event : events) {
EventMessage<?> eventMessage = GenericEventMessage.asEventMessage(event);
handleInSaga(new GenericDomainEventMessage<>(type, aggregateIdentifier,
sequenceNumber++,
eventMessage.getPayload(),
eventMessage.getMetaData(),
eventMessage.getIdentifier(),
currentTime()));
}
}
}
private class MutableFieldFilter implements FieldFilter {
private final List<FieldFilter> filters = new ArrayList<>();
@Override
public boolean accept(Field field) {
for (FieldFilter filter : filters) {
if (!filter.accept(field)) {
return false;
}
}
return true;
}
public void add(FieldFilter fieldFilter) {
filters.add(fieldFilter);
}
}
private class TransienceValidatingResourceInjector extends AutowiredResourceInjector {
public TransienceValidatingResourceInjector() {
super(registeredResources);
}
@Override
public void injectResources(Object saga) {
super.injectResources(saga);
if (transienceCheckEnabled) {
StreamSupport.stream(fieldsOf(saga.getClass()).spliterator(), false)
.filter(f -> !Modifier.isTransient(f.getModifiers()))
.filter(f -> registeredResources.contains(ReflectionUtils.getFieldValue(f, saga)))
.findFirst()
.ifPresent(field -> {
throw new AssertionError(format("Field %s.%s is injected with a resource, " +
"but it doesn't have the 'transient' modifier.\n" +
"Mark field as 'transient' or disable this check using:\n" +
"fixture.withTransienceCheckDisabled()",
field.getDeclaringClass(), field.getName()));
});
}
}
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.lib.security.http;
import com.streamsets.datacollector.util.Configuration;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import java.net.URLEncoder;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.when;
public class TestAbstractSSOService {
class ForTestSSOService extends AbstractSSOService {
@Override
public void register(Map<String, String> attributes) {
}
@Override
protected SSOPrincipal validateUserTokenWithSecurityService(String authToken) throws ForbiddenException {
throw new ForbiddenException(Collections.emptyMap());
}
@Override
protected SSOPrincipal validateAppTokenWithSecurityService(String authToken, String componentId)
throws ForbiddenException {
throw new ForbiddenException(Collections.emptyMap());
}
}
@Test
public void testConfiguration() {
ForTestSSOService service = Mockito.spy(new ForTestSSOService());
Configuration conf = new Configuration();
service.setConfiguration(conf);
Mockito
.verify(service)
.initializePrincipalCaches(eq(AbstractSSOService.SECURITY_SERVICE_VALIDATE_AUTH_TOKEN_FREQ_DEFAULT *
1000));
conf.set(AbstractSSOService.SECURITY_SERVICE_VALIDATE_AUTH_TOKEN_FREQ_CONFIG, 30);
service.setConfiguration(conf);
Mockito.verify(service).initializePrincipalCaches(eq(30 * 1000L));
Assert.assertNotNull(service.getUserPrincipalCache());
Assert.assertNotNull(service.getAppPrincipalCache());
service.setLoginPageUrl("http://foo");
Assert.assertEquals("http://foo", service.getLoginPageUrl());
service.setLogoutUrl("http://bar");
Assert.assertEquals("http://bar", service.getLogoutUrl());
}
@Test
public void testCreateRedirectToLoginUrl() throws Exception {
ForTestSSOService service = new ForTestSSOService();
service.setConfiguration(new Configuration());
service.setLoginPageUrl("http://foo");
String initialRedirUrl =
"http://foo" + "?" + SSOConstants.REQUESTED_URL_PARAM + "=" + URLEncoder.encode("http://bar", "UTF-8");
String repeatedRedirUrl = "http://foo" +
"?" +
SSOConstants.REQUESTED_URL_PARAM +
"=" +
URLEncoder.encode("http://bar", "UTF-8") +
"&" +
SSOConstants.REPEATED_REDIRECT_PARAM +
"=";
Assert.assertEquals(initialRedirUrl, service.createRedirectToLoginUrl("http://bar", false));
Assert.assertEquals(repeatedRedirUrl, service.createRedirectToLoginUrl("http://bar", true));
}
@Test
public void testValidateUserToken() throws Exception {
ForTestSSOService service = Mockito.spy(new ForTestSSOService());
service.setConfiguration(new Configuration()); //60 sec cache
//invalid, unknown
Assert.assertNull(service.validateUserToken("x"));
Mockito.verify(service).validateUserTokenWithSecurityService(eq("x"));
//invalid, cached
Mockito.reset(service);
Assert.assertNull(service.validateUserToken("x"));
Mockito.verify(service, Mockito.never()).validateUserTokenWithSecurityService(eq("x"));
//valid, unknown
SSOPrincipal principal = Mockito.mock(SSOPrincipal.class);
Mockito.doReturn(principal).when(service).validateUserTokenWithSecurityService(eq("a"));
Assert.assertEquals(principal, service.validateUserToken("a"));
Mockito.verify(service).validateUserTokenWithSecurityService(eq("a"));
//valid, cached
Mockito.reset(service);
Assert.assertEquals(principal, service.validateUserToken("a"));
Mockito.verify(service, Mockito.never()).validateUserTokenWithSecurityService(eq("a"));
service.initializePrincipalCaches(1); //1 millisec cache
//valid, unknown
Mockito.doReturn(principal).when(service).validateUserTokenWithSecurityService(eq("b"));
Assert.assertEquals(principal, service.validateUserToken("b"));
Mockito.verify(service).validateUserTokenWithSecurityService(eq("b"));
// cache expired
Thread.sleep(2);
//valid, unknown
Mockito.reset(service);
Mockito.doReturn(principal).when(service).validateUserTokenWithSecurityService(eq("b"));
Assert.assertEquals(principal, service.validateUserToken("b"));
Mockito.verify(service).validateUserTokenWithSecurityService(eq("b"));
}
@Test
public void testInvalidateUserToken() throws Exception {
ForTestSSOService service = Mockito.spy(new ForTestSSOService());
service.setConfiguration(new Configuration()); // 60 sec cache
// unknown
service.invalidateUserToken("x");
Assert.assertNull(service.validateUserToken("x"));
// valid, unknown
Mockito
.doReturn(Mockito.mock(SSOPrincipal.class))
.when(service)
.validateUserTokenWithSecurityService(Mockito.eq("y"));
service.invalidateUserToken("y");
Assert.assertNull(service.validateUserToken("y"));
// valid, cached
Mockito
.doReturn(Mockito.mock(SSOPrincipal.class))
.when(service).validateUserTokenWithSecurityService(eq("z"));
Assert.assertNotNull(service.validateUserToken("z"));
service.invalidateUserToken("z");
Assert.assertNull(service.validateUserToken("z"));
}
@Test
public void testValidateAppToken() throws Exception {
ForTestSSOService service = Mockito.spy(new ForTestSSOService());
service.setConfiguration(new Configuration()); //60 sec cache
//invalid, unknown
Assert.assertNull(service.validateAppToken("x", "c"));
Mockito.verify(service).validateAppTokenWithSecurityService(eq("x"), eq("c"));
//invalid, cached
Mockito.reset(service);
Assert.assertNull(service.validateAppToken("x", "c"));
Mockito.verify(service, Mockito.never()).validateAppTokenWithSecurityService(eq("x"), eq("c"));
//valid, unknown
SSOPrincipal principal = Mockito.mock(SSOPrincipal.class);
when(principal.getPrincipalId()).thenReturn("c");
Mockito.doReturn(principal).when(service).validateAppTokenWithSecurityService(Mockito.eq("a"), Mockito.eq("c"));
Assert.assertEquals(principal, service.validateAppToken("a", "c"));
Mockito.verify(service).validateAppTokenWithSecurityService(eq("a"), eq("c"));
//valid, cached
Mockito.reset(service);
Assert.assertEquals(principal, service.validateAppToken("a", "c"));
Mockito.verify(service, Mockito.never()).validateAppTokenWithSecurityService(eq("a"), eq("c"));
//valid, incorrect component ID
Assert.assertNull(service.validateAppToken("x", "cc"));
service.initializePrincipalCaches(1); //1 millisec cache
//valid, unknown
Mockito.doReturn(principal).when(service).validateAppTokenWithSecurityService(Mockito.eq("b"), Mockito.eq("c"));
Assert.assertEquals(principal, service.validateAppToken("b", "c"));
Mockito.verify(service).validateAppTokenWithSecurityService(eq("b"), eq("c"));
// cache expired
Thread.sleep(2);
//valid, unknown
Mockito.reset(service);
Mockito.doReturn(principal).when(service).validateAppTokenWithSecurityService(Mockito.eq("b"), Mockito.eq("c"));
Assert.assertEquals(principal, service.validateAppToken("b", "c"));
Mockito.verify(service).validateAppTokenWithSecurityService(eq("b"), eq("c"));
}
@Test
public void testInvalidateAppToken() throws Exception {
ForTestSSOService service = Mockito.spy(new ForTestSSOService());
service.setConfiguration(new Configuration()); // 60 sec cache
// unknown
service.invalidateAppToken("x");
Assert.assertNull(service.validateAppToken("x", "c"));
// valid, unknown
Mockito
.doReturn(Mockito.mock(SSOPrincipal.class))
.when(service)
.validateAppTokenWithSecurityService(Mockito.eq("y"), Mockito.eq("c"));
service.invalidateAppToken("y");
Assert.assertNull(service.validateAppToken("y", "c"));
// valid, cached
SSOPrincipal principal = Mockito.mock(SSOPrincipal.class);
Mockito.when(principal.getPrincipalId()).thenReturn("c");
Mockito.doReturn(principal).when(service).validateAppTokenWithSecurityService(Mockito.eq("z"), Mockito.eq("c"));
Assert.assertNotNull(service.validateAppToken("z", "c"));
service.invalidateAppToken("z");
Assert.assertNull(service.validateAppToken("z", "c"));
}
@Test
public void testValidateTokenValidInCache() throws Exception {
AbstractSSOService service = Mockito.spy(new ForTestSSOService());
final SSOPrincipal principal = Mockito.mock(SSOPrincipal.class);
PrincipalCache cache = Mockito.mock(PrincipalCache.class);
Mockito.when(cache.get(eq("t"))).thenReturn(principal);
Assert.assertEquals(principal, service.validate(cache, null, "t", "c", "x"));
Mockito.verify(cache, Mockito.times(1)).get(eq("t"));
}
@Test
public void testValidateTokenInvalid() throws Exception {
AbstractSSOService service = Mockito.spy(new ForTestSSOService());
PrincipalCache cache = Mockito.mock(PrincipalCache.class);
Mockito.when(cache.get(eq("t"))).thenReturn(null);
Mockito.when(cache.isInvalid(eq("t"))).thenReturn(true);
Assert.assertNull(service.validate(cache, null, "t", "c", "x"));
Mockito.verify(cache, Mockito.times(1)).get(eq("t"));
}
@Test
public void testValidateTokenValidNotInCache() throws Exception {
AbstractSSOService service = Mockito.spy(new ForTestSSOService());
final SSOPrincipal principal = Mockito.mock(SSOPrincipal.class);
// token not in cache, no lock, valid token
PrincipalCache cache = Mockito.mock(PrincipalCache.class);
Mockito.when(cache.get(eq("t"))).thenReturn(null);
Mockito.when(cache.isInvalid(eq("t"))).thenReturn(false);
ConcurrentMap<String, Object> lockMap = service.getLockMap();
lockMap = Mockito.spy(lockMap);
Mockito.doReturn(lockMap).when(service).getLockMap();
Callable<SSOPrincipal> callable = new Callable<SSOPrincipal>() {
@Override
public SSOPrincipal call() throws Exception {
return principal;
}
};
Assert.assertEquals(principal, service.validate(cache, callable, "t", "c", "x"));
Mockito.verify(cache, Mockito.times(2)).get(eq("t"));
Mockito.verify(cache, Mockito.times(1)).isInvalid(eq("t"));
Mockito.verify(cache, Mockito.times(1)).put(eq("t"), eq(principal));
Mockito.verify(cache, Mockito.times(0)).invalidate(eq("t"));
Mockito.verify(lockMap, Mockito.times(1)).putIfAbsent(eq("t"), Mockito.any());
Mockito.verify(lockMap, Mockito.times(1)).remove(eq("t"));
}
@Test
public void testValidateTokenInvalidNotInCache() throws Exception {
AbstractSSOService service = Mockito.spy(new ForTestSSOService());
PrincipalCache cache = Mockito.mock(PrincipalCache.class);
Mockito.when(cache.get(eq("t"))).thenReturn(null);
Mockito.when(cache.isInvalid(eq("t"))).thenReturn(false);
ConcurrentMap<String, Object> lockMap = service.getLockMap();
lockMap = Mockito.spy(lockMap);
Mockito.doReturn(lockMap).when(service).getLockMap();
Callable<SSOPrincipal> callable = new Callable<SSOPrincipal>() {
@Override
public SSOPrincipal call() throws Exception {
throw new ForbiddenException(Collections.emptyMap());
}
};
Assert.assertNull(service.validate(cache, callable, "t", "c", "x"));
Mockito.verify(cache, Mockito.times(2)).get(eq("t"));
Mockito.verify(cache, Mockito.times(1)).isInvalid(eq("t"));
Mockito.verify(cache, Mockito.times(0)).put(eq("t"), Mockito.any(SSOPrincipal.class));
Mockito.verify(cache, Mockito.times(1)).invalidate(eq("t"));
Mockito.verify(lockMap, Mockito.times(1)).putIfAbsent(eq("t"), Mockito.any());
Mockito.verify(lockMap, Mockito.times(1)).remove(eq("t"));
}
@Test
public void testValidateSerialization() throws Exception {
final AbstractSSOService service = Mockito.spy(new ForTestSSOService());
final CountDownLatch ready = new CountDownLatch(2);
final CountDownLatch done = new CountDownLatch(1);
final PrincipalCache cache = new PrincipalCache(1000, 1000);
final SSOPrincipal principal = Mockito.mock(SSOPrincipal.class);
final Callable<SSOPrincipal> goThruCallable = new Callable<SSOPrincipal>() {
@Override
public SSOPrincipal call() throws Exception {
done.countDown();
Thread.sleep(100);
return principal;
}
};
final Callable<SSOPrincipal> neverCalledCallable = new Callable<SSOPrincipal>() {
@Override
public SSOPrincipal call() throws Exception {
Assert.fail();
return null;
}
};
Thread t2 = new Thread() {
@Override
public void run() {
ready.countDown();
try {
done.await();
} catch (InterruptedException ex) {
}
Assert.assertEquals(principal, service.validate(cache, neverCalledCallable, "t", "c", "x"));
}
};
t2.start();
Thread t3 = new Thread() {
@Override
public void run() {
ready.countDown();
try {
done.await();
} catch (InterruptedException ex) {
}
Assert.assertEquals(principal, service.validate(cache, neverCalledCallable, "t", "c", "x"));
}
};
t3.start();
ready.await();
Assert.assertEquals(principal, service.validate(cache, goThruCallable, "t", "c", "x"));
done.await();
t2.join();
t3.join();
}
@Test
public void testClearCaches() throws Exception {
AbstractSSOService service = Mockito.spy(new ForTestSSOService());
PrincipalCache userCache = Mockito.mock(PrincipalCache.class);
PrincipalCache appCache = Mockito.mock(PrincipalCache.class);
Mockito.doReturn(userCache).when(service).getUserPrincipalCache();
Mockito.doReturn(appCache).when(service).getAppPrincipalCache();
Mockito.verify(userCache, Mockito.never()).clear();
Mockito.verify(appCache, Mockito.never()).clear();
service.clearCaches();
Mockito.verify(userCache, Mockito.times(1)).clear();
Mockito.verify(appCache, Mockito.times(1)).clear();
}
}
| |
/*
* The MIT License
*
* Copyright 2017 sebastian.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package gens.ca.gameoflife1;
import general.GenState;
import general.GenModel;
import javafx.application.Platform;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.paint.Color;
/**
*
* @author sebastian
*/
public class GameOfLifeGenModel extends GenModel {
private int width;
private int height;
private int numCells; // Number of cells per row
private int numGens; // Number of generations, i.e. number of images created
private int cellSize; // Cell size in px
private long step; // period in which a single image is displayed (in ms)
private boolean randomInit;
private boolean edgesWrapped;
private double populationDensity;
private boolean waitForCanvasDisplayedInRootView;
private boolean[][] currentGen;
public GameOfLifeGenModel() {
numCells = 400;
numGens = 20;
cellSize = 1;
step = 500;
width = numCells * cellSize;
height = numCells * cellSize;
randomInit = true;
edgesWrapped = false;
populationDensity = 0.2;
}
@Override
public String getGenName() {
return "Game of Life Generator";
}
// Cell size is restricted to certain range
// Furthermore, cell size is correlated to the number of cells
// (thus the image does not exceed 4000 px in each dimension)
public void setCellSize(int cellSize) {
if (cellSize < 1 || cellSize > 50) {
throw new IllegalArgumentException("Cell Size requires an integer value between 1 and 50.");
} else if (cellSize * numCells > 4000) {
throw new IllegalArgumentException("The product of cell size and number of cells cannot exceed 4000.");
} else {
this.cellSize = cellSize;
width = numCells * cellSize;
height = numCells * cellSize;
}
}
public int getCellSize() {
return cellSize;
}
public double getPopulationDensity() {
return populationDensity;
}
public boolean getEdgesWrapped() {
return edgesWrapped;
}
public boolean getRandomInit() {
return randomInit;
}
public int getWidth() {
return width;
}
public int getHeight() {
return height;
}
public int getCells() {
return numCells;
}
public int getGens() {
return numGens;
}
public long getStep() {
return step;
}
// Step length is restricted to certain range
public void setStep(long step) {
if (step < 50 || step > 5000) {
throw new IllegalArgumentException("Step requires an integer value between 50 and 5000.");
} else {
this.step = step;
}
}
// Population density is a real number in [0,1]
public void setPopulationDensity(double density) {
if (density >= 0 && density <= 1) {
populationDensity = density;
} else {
throw new IllegalArgumentException("Population Density requires a real value between 0.0 and 1.0.");
}
}
public void setEdgesWrapped(boolean edgesWrapped) {
this.edgesWrapped = edgesWrapped;
}
public void setRandomInit(boolean rndInit) {
randomInit = rndInit;
}
// Number of cells per row is restricted to certain range
// Furthermore, number of cells is correlated to the cell size
// (thus the image does not exceed 4000 px in each dimension)
public void setCells(int value) {
if (value < 1 || value > 4000) {
throw new IllegalArgumentException("Cells requires an integer value between 1 and 4000.");
} else if (cellSize * value > 4000) {
throw new IllegalArgumentException("The product of cell size and number of cells cannot exceed 4000.");
} else {
numCells = value;
width = numCells * cellSize;
height = numCells * cellSize;
}
}
// Number of generations is restricted to certain range
public void setGens(int value) {
if (value < 1 || value > 10000) {
throw new IllegalArgumentException("Generations requires an integer value between 1 and 10000.");
} else {
numGens = value;
}
}
// Initial generation can be delivered this way
public void setInitGen(boolean[][] initGen) {
this.currentGen = initGen;
}
// Calculates the initial generation in case random initial generation is chosen
// Calculation of random initial generation correlates to population density
private boolean[][] calcInitGen() {
boolean[][] nextGen;
nextGen = new boolean[numCells][numCells];
if (randomInit) {
for (int i = 0; i < numCells; i++) {
for (int j = 0; j < numCells; j++) {
double rnd = Math.random();
nextGen[i][j] = rnd <= populationDensity;
}
}
} else {
nextGen = currentGen;
}
return nextGen;
}
// Calculation of next generation of cells
// Range of index variables depends on choice regarding wrapping of edges
private boolean[][] calcNextGen() {
if (currentGen == null) {
return calcInitGen();
}
boolean[][] nextGen;
nextGen = new boolean[numCells][numCells];
int start;
int stop;
if (edgesWrapped) {
start = 0;
stop = numCells;
} else {
start = 1;
stop = numCells - 1;
}
for (int i = start; i < stop; i++) {
for (int j = start; j < stop; j++) {
// Scan neighbourhood
int aliveNeighbours = 0;
for (int k = i - 1; k <= i + 1; k++) {
for (int l = j - 1; l <= j + 1; l++) {
int rowIndex;
int colIndex;
if (edgesWrapped) {
// Calculation of correct index (in case edge wrapping has been chosen)
if (k == -1 || k == numCells) {
rowIndex = (k + numCells) % numCells;
} else {
rowIndex = k;
}
if (l == -1 || l == numCells) {
colIndex = (l + numCells) % numCells;
} else {
colIndex = l;
}
} else {
rowIndex = k;
colIndex = l;
}
if (currentGen[rowIndex][colIndex]) {
aliveNeighbours++;
}
}
}
// Set next state of cell
if (currentGen[i][j]) {
aliveNeighbours--; // The cell itself does not count towards living neighbours
nextGen[i][j] = aliveNeighbours == 2 || aliveNeighbours == 3;
} else {
nextGen[i][j] = aliveNeighbours == 3;
}
}
}
return nextGen;
}
@Override
public void generate() {
if (randomInit) {
currentGen = null;
}
try {
for (int k = 0; k < numGens; k++) {
setGenState("Creating new canvas...");
canvas = new Canvas(width, height);
setGenState("Filling image background...");
GraphicsContext gc = canvas.getGraphicsContext2D();
gc.setFill(Color.WHITE);
gc.fillRect(0, 0, width, height);
// Check for interrupt
if (Thread.currentThread().isInterrupted()) {
return;
}
// Measure calculation time
long calcStartTime = System.nanoTime();
setGenState("Calculating generation " + (k+1) + "...");
currentGen = calcNextGen();
// Image is created from previously calculated current generation
gc.setFill(Color.BLACK);
for (int i = 0; i < currentGen.length; i++) {
for (int j = 0; j < currentGen.length; j++) {
if (currentGen[i][j] == true) {
gc.fillRect(j * cellSize, i * cellSize, cellSize, cellSize);
}
}
}
long calculationTime = System.nanoTime() - calcStartTime;
// Convert to ms
calculationTime = calculationTime / 1000000;
// Generation is delayed according to step length
long sleepTime = step - calculationTime;
if(sleepTime > 0)
Thread.sleep(sleepTime);
// In case the canvas has not been displayed yet,
// generation has to be delayed further
waitForCanvasIterationDisplayedInApp();
}
// Signal controller to enable input
setGenState(GenState.FINISHED_READY);
} catch (InterruptedException ex) {
//setGenState(GenState.FINISHED_READY);
return; // Generation is stopped if e.g. the generator window is closed
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.compiler.impl;
import com.intellij.CommonBundle;
import com.intellij.compiler.CompilerWorkspaceConfiguration;
import com.intellij.compiler.ModuleCompilerUtil;
import com.intellij.compiler.ModuleSourceSet;
import com.intellij.compiler.ProblemsView;
import com.intellij.compiler.progress.CompilerTask;
import com.intellij.compiler.server.BuildManager;
import com.intellij.compiler.server.DefaultMessageHandler;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationListener;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.compiler.*;
import com.intellij.openapi.compiler.ex.CompilerPathsEx;
import com.intellij.openapi.deployment.DeploymentUtil;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.module.EffectiveLanguageLevelUtil;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectBundle;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.ui.configuration.CommonContentEntriesEditor;
import com.intellij.openapi.roots.ui.configuration.ProjectSettingsService;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.openapi.wm.*;
import com.intellij.packaging.artifacts.Artifact;
import com.intellij.packaging.impl.compiler.ArtifactCompilerUtil;
import com.intellij.packaging.impl.compiler.ArtifactsCompiler;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.util.Chunk;
import com.intellij.util.StringBuilderSpinAllocator;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.HashMap;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.messages.MessageBus;
import com.intellij.util.text.DateFormatUtil;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import org.jetbrains.jps.api.CmdlineProtoUtil;
import org.jetbrains.jps.api.CmdlineRemoteProto;
import org.jetbrains.jps.api.GlobalOptions;
import org.jetbrains.jps.api.TaskFuture;
import org.jetbrains.jps.model.java.JavaSourceRootType;
import javax.swing.*;
import javax.swing.event.HyperlinkEvent;
import java.lang.ref.WeakReference;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static org.jetbrains.jps.api.CmdlineRemoteProto.Message.ControllerMessage.ParametersMessage.TargetTypeBuildScope;
public class CompileDriver {
private static final Logger LOG = Logger.getInstance("#com.intellij.compiler.impl.CompileDriver");
private static final Key<Boolean> COMPILATION_STARTED_AUTOMATICALLY = Key.create("compilation_started_automatically");
private static final Key<ExitStatus> COMPILE_SERVER_BUILD_STATUS = Key.create("COMPILE_SERVER_BUILD_STATUS");
private static final long ONE_MINUTE_MS = 60L * 1000L;
private final Project myProject;
private final Map<Module, String> myModuleOutputPaths = new HashMap<>();
private final Map<Module, String> myModuleTestOutputPaths = new HashMap<>();
@SuppressWarnings("deprecation") private CompilerFilter myCompilerFilter = CompilerFilter.ALL;
public CompileDriver(Project project) {
myProject = project;
}
@SuppressWarnings("deprecation")
public void setCompilerFilter(CompilerFilter compilerFilter) {
myCompilerFilter = compilerFilter == null? CompilerFilter.ALL : compilerFilter;
}
public void rebuild(CompileStatusNotification callback) {
doRebuild(callback, null, new ProjectCompileScope(myProject));
}
public void make(CompileScope scope, CompileStatusNotification callback) {
if (validateCompilerConfiguration(scope)) {
startup(scope, false, false, callback, null);
}
else {
callback.finished(true, 0, 0, DummyCompileContext.getInstance());
}
}
public boolean isUpToDate(CompileScope scope) {
if (LOG.isDebugEnabled()) {
LOG.debug("isUpToDate operation started");
}
final CompilerTask task = new CompilerTask(myProject, "Classes up-to-date check", true, false, false, isCompilationStartedAutomatically(scope));
final CompileContextImpl compileContext = new CompileContextImpl(myProject, task, scope, true, false);
final Ref<ExitStatus> result = new Ref<>();
task.start(() -> {
final ProgressIndicator indicator = compileContext.getProgressIndicator();
if (indicator.isCanceled() || myProject.isDisposed()) {
return;
}
try {
final TaskFuture future = compileInExternalProcess(compileContext, true);
if (future != null) {
while (!future.waitFor(200L, TimeUnit.MILLISECONDS)) {
if (indicator.isCanceled()) {
future.cancel(false);
}
}
}
}
catch (Throwable e) {
LOG.error(e);
}
finally {
result.set(COMPILE_SERVER_BUILD_STATUS.get(compileContext));
if (!myProject.isDisposed()) {
CompilerCacheManager.getInstance(myProject).flushCaches();
}
}
}, null);
if (LOG.isDebugEnabled()) {
LOG.debug("isUpToDate operation finished");
}
return ExitStatus.UP_TO_DATE.equals(result.get());
}
public void compile(CompileScope scope, CompileStatusNotification callback) {
if (validateCompilerConfiguration(scope)) {
startup(scope, false, true, callback, null);
}
else {
callback.finished(true, 0, 0, DummyCompileContext.getInstance());
}
}
private void doRebuild(CompileStatusNotification callback, CompilerMessage message, final CompileScope compileScope) {
if (validateCompilerConfiguration(compileScope)) {
startup(compileScope, true, false, callback, message);
}
else {
callback.finished(true, 0, 0, DummyCompileContext.getInstance());
}
}
public static void setCompilationStartedAutomatically(CompileScope scope) {
//todo[nik] pass this option as a parameter to compile/make methods instead
scope.putUserData(COMPILATION_STARTED_AUTOMATICALLY, Boolean.TRUE);
}
private static boolean isCompilationStartedAutomatically(CompileScope scope) {
return Boolean.TRUE.equals(scope.getUserData(COMPILATION_STARTED_AUTOMATICALLY));
}
private List<TargetTypeBuildScope> getBuildScopes(@NotNull CompileContextImpl compileContext,
CompileScope scope,
Collection<String> paths) {
List<TargetTypeBuildScope> scopes = new ArrayList<>();
final boolean forceBuild = !compileContext.isMake();
List<TargetTypeBuildScope> explicitScopes = CompileScopeUtil.getBaseScopeForExternalBuild(scope);
if (explicitScopes != null) {
scopes.addAll(explicitScopes);
}
else if (!compileContext.isRebuild() && !CompileScopeUtil.allProjectModulesAffected(compileContext)) {
CompileScopeUtil.addScopesForModules(Arrays.asList(scope.getAffectedModules()), scopes, forceBuild);
}
else {
scopes.addAll(CmdlineProtoUtil.createAllModulesScopes(forceBuild));
}
if (paths.isEmpty()) {
scopes = mergeScopesFromProviders(scope, scopes, forceBuild);
}
return scopes;
}
private List<TargetTypeBuildScope> mergeScopesFromProviders(CompileScope scope,
List<TargetTypeBuildScope> scopes,
boolean forceBuild) {
for (BuildTargetScopeProvider provider : BuildTargetScopeProvider.EP_NAME.getExtensions()) {
List<TargetTypeBuildScope> providerScopes = ReadAction.compute(
() -> myProject.isDisposed() ? Collections.emptyList()
: provider.getBuildTargetScopes(scope, myCompilerFilter, myProject, forceBuild));
scopes = CompileScopeUtil.mergeScopes(scopes, providerScopes);
}
return scopes;
}
@Nullable
private TaskFuture compileInExternalProcess(final @NotNull CompileContextImpl compileContext, final boolean onlyCheckUpToDate)
throws Exception {
final CompileScope scope = compileContext.getCompileScope();
final Collection<String> paths = CompileScopeUtil.fetchFiles(compileContext);
List<TargetTypeBuildScope> scopes = getBuildScopes(compileContext, scope, paths);
// need to pass scope's user data to server
final Map<String, String> builderParams;
if (onlyCheckUpToDate) {
builderParams = Collections.emptyMap();
}
else {
final Map<Key, Object> exported = scope.exportUserData();
if (!exported.isEmpty()) {
builderParams = new HashMap<>();
for (Map.Entry<Key, Object> entry : exported.entrySet()) {
final String _key = entry.getKey().toString();
final String _value = entry.getValue().toString();
builderParams.put(_key, _value);
}
}
else {
builderParams = Collections.emptyMap();
}
}
final MessageBus messageBus = myProject.getMessageBus();
final MultiMap<String, Artifact> outputToArtifact = ArtifactCompilerUtil.containsArtifacts(scopes) ? ArtifactCompilerUtil.createOutputToArtifactMap(myProject) : null;
final BuildManager buildManager = BuildManager.getInstance();
buildManager.cancelAutoMakeTasks(myProject);
return buildManager.scheduleBuild(myProject, compileContext.isRebuild(), compileContext.isMake(), onlyCheckUpToDate, scopes, paths, builderParams, new DefaultMessageHandler(myProject) {
@Override
public void buildStarted(UUID sessionId) {
}
@Override
public void sessionTerminated(final UUID sessionId) {
if (compileContext.shouldUpdateProblemsView()) {
final ProblemsView view = ProblemsView.SERVICE.getInstance(myProject);
view.clearProgress();
view.clearOldMessages(compileContext.getCompileScope(), compileContext.getSessionId());
}
}
@Override
public void handleFailure(UUID sessionId, CmdlineRemoteProto.Message.Failure failure) {
compileContext.addMessage(CompilerMessageCategory.ERROR, failure.hasDescription()? failure.getDescription() : "", null, -1, -1);
final String trace = failure.hasStacktrace()? failure.getStacktrace() : null;
if (trace != null) {
LOG.info(trace);
}
compileContext.putUserData(COMPILE_SERVER_BUILD_STATUS, ExitStatus.ERRORS);
}
@Override
protected void handleCompileMessage(UUID sessionId, CmdlineRemoteProto.Message.BuilderMessage.CompileMessage message) {
final CmdlineRemoteProto.Message.BuilderMessage.CompileMessage.Kind kind = message.getKind();
//System.out.println(compilerMessage.getText());
final String messageText = message.getText();
if (kind == CmdlineRemoteProto.Message.BuilderMessage.CompileMessage.Kind.PROGRESS) {
final ProgressIndicator indicator = compileContext.getProgressIndicator();
indicator.setText(messageText);
if (message.hasDone()) {
indicator.setFraction(message.getDone());
}
}
else {
final CompilerMessageCategory category = kind == CmdlineRemoteProto.Message.BuilderMessage.CompileMessage.Kind.ERROR ? CompilerMessageCategory.ERROR
: kind == CmdlineRemoteProto.Message.BuilderMessage.CompileMessage.Kind.WARNING ? CompilerMessageCategory.WARNING : CompilerMessageCategory.INFORMATION;
String sourceFilePath = message.hasSourceFilePath() ? message.getSourceFilePath() : null;
if (sourceFilePath != null) {
sourceFilePath = FileUtil.toSystemIndependentName(sourceFilePath);
}
final long line = message.hasLine() ? message.getLine() : -1;
final long column = message.hasColumn() ? message.getColumn() : -1;
final String srcUrl = sourceFilePath != null ? VirtualFileManager.constructUrl(LocalFileSystem.PROTOCOL, sourceFilePath) : null;
compileContext.addMessage(category, messageText, srcUrl, (int)line, (int)column);
}
}
@Override
protected void handleBuildEvent(UUID sessionId, CmdlineRemoteProto.Message.BuilderMessage.BuildEvent event) {
final CmdlineRemoteProto.Message.BuilderMessage.BuildEvent.Type eventType = event.getEventType();
switch (eventType) {
case FILES_GENERATED:
final List<CmdlineRemoteProto.Message.BuilderMessage.BuildEvent.GeneratedFile> generated = event.getGeneratedFilesList();
final CompilationStatusListener publisher = !myProject.isDisposed()? messageBus.syncPublisher(CompilerTopics.COMPILATION_STATUS) : null;
Set<String> writtenArtifactOutputPaths = outputToArtifact != null ? new THashSet<>(FileUtil.PATH_HASHING_STRATEGY) : null;
for (CmdlineRemoteProto.Message.BuilderMessage.BuildEvent.GeneratedFile generatedFile : generated) {
final String root = FileUtil.toSystemIndependentName(generatedFile.getOutputRoot());
final String relativePath = FileUtil.toSystemIndependentName(generatedFile.getRelativePath());
if (publisher != null) {
publisher.fileGenerated(root, relativePath);
}
if (outputToArtifact != null) {
Collection<Artifact> artifacts = outputToArtifact.get(root);
if (!artifacts.isEmpty()) {
for (Artifact artifact : artifacts) {
ArtifactsCompiler.addChangedArtifact(compileContext, artifact);
}
writtenArtifactOutputPaths.add(FileUtil.toSystemDependentName(DeploymentUtil.appendToPath(root, relativePath)));
}
}
}
if (writtenArtifactOutputPaths != null && !writtenArtifactOutputPaths.isEmpty()) {
ArtifactsCompiler.addWrittenPaths(compileContext, writtenArtifactOutputPaths);
}
break;
case BUILD_COMPLETED:
ExitStatus status = ExitStatus.SUCCESS;
if (event.hasCompletionStatus()) {
final CmdlineRemoteProto.Message.BuilderMessage.BuildEvent.Status completionStatus = event.getCompletionStatus();
switch (completionStatus) {
case CANCELED:
status = ExitStatus.CANCELLED;
break;
case ERRORS:
status = ExitStatus.ERRORS;
break;
case SUCCESS:
status = ExitStatus.SUCCESS;
break;
case UP_TO_DATE:
status = ExitStatus.UP_TO_DATE;
break;
}
}
compileContext.putUserDataIfAbsent(COMPILE_SERVER_BUILD_STATUS, status);
break;
case CUSTOM_BUILDER_MESSAGE:
if (event.hasCustomBuilderMessage()) {
final CmdlineRemoteProto.Message.BuilderMessage.BuildEvent.CustomBuilderMessage message = event.getCustomBuilderMessage();
if (GlobalOptions.JPS_SYSTEM_BUILDER_ID.equals(message.getBuilderId()) && GlobalOptions.JPS_UNPROCESSED_FS_CHANGES_MESSAGE_ID.equals(message.getMessageType())) {
final String text = message.getMessageText();
if (!StringUtil.isEmpty(text)) {
compileContext.addMessage(CompilerMessageCategory.INFORMATION, text, null, -1, -1);
}
}
}
break;
}
}
});
}
private void startup(final CompileScope scope,
final boolean isRebuild,
final boolean forceCompile,
final CompileStatusNotification callback,
final CompilerMessage message) {
ApplicationManager.getApplication().assertIsDispatchThread();
final String contentName = CompilerBundle.message(forceCompile ? "compiler.content.name.compile" : "compiler.content.name.make");
final boolean isUnitTestMode = ApplicationManager.getApplication().isUnitTestMode();
final CompilerTask compileTask = new CompilerTask(myProject, contentName, isUnitTestMode, true, true, isCompilationStartedAutomatically(scope));
StatusBar.Info.set("", myProject, "Compiler");
// ensure the project model seen by build process is up-to-date
myProject.save();
if (!isUnitTestMode) {
ApplicationManager.getApplication().saveSettings();
}
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
FileDocumentManager.getInstance().saveAllDocuments();
final CompileContextImpl compileContext = new CompileContextImpl(myProject, compileTask, scope, !isRebuild && !forceCompile, isRebuild);
final Runnable compileWork = () -> {
final ProgressIndicator indicator = compileContext.getProgressIndicator();
if (indicator.isCanceled() || myProject.isDisposed()) {
if (callback != null) {
callback.finished(true, 0, 0, compileContext);
}
return;
}
try {
LOG.info("COMPILATION STARTED (BUILD PROCESS)");
if (message != null) {
compileContext.addMessage(message);
}
if (isRebuild) {
CompilerUtil.runInContext(compileContext, "Clearing build system data...",
(ThrowableRunnable<Throwable>)() -> CompilerCacheManager.getInstance(myProject).clearCaches(compileContext));
}
final boolean beforeTasksOk = executeCompileTasks(compileContext, true);
final int errorCount = compileContext.getMessageCount(CompilerMessageCategory.ERROR);
if (!beforeTasksOk || errorCount > 0) {
COMPILE_SERVER_BUILD_STATUS.set(compileContext, errorCount > 0 ? ExitStatus.ERRORS : ExitStatus.CANCELLED);
return;
}
final TaskFuture future = compileInExternalProcess(compileContext, false);
if (future != null) {
while (!future.waitFor(200L, TimeUnit.MILLISECONDS)) {
if (indicator.isCanceled()) {
future.cancel(false);
}
}
if (!executeCompileTasks(compileContext, false)) {
COMPILE_SERVER_BUILD_STATUS.set(compileContext, ExitStatus.CANCELLED);
}
if (compileContext.getMessageCount(CompilerMessageCategory.ERROR) > 0) {
COMPILE_SERVER_BUILD_STATUS.set(compileContext, ExitStatus.ERRORS);
}
}
}
catch (ProcessCanceledException ignored) {
compileContext.putUserDataIfAbsent(COMPILE_SERVER_BUILD_STATUS, ExitStatus.CANCELLED);
}
catch (Throwable e) {
LOG.error(e); // todo
}
finally {
CompilerCacheManager.getInstance(myProject).flushCaches();
final long duration = notifyCompilationCompleted(compileContext, callback, COMPILE_SERVER_BUILD_STATUS.get(compileContext));
CompilerUtil.logDuration(
"\tCOMPILATION FINISHED (BUILD PROCESS); Errors: " +
compileContext.getMessageCount(CompilerMessageCategory.ERROR) +
"; warnings: " +
compileContext.getMessageCount(CompilerMessageCategory.WARNING),
duration
);
}
};
compileTask.start(compileWork, () -> {
if (isRebuild) {
final int rv = Messages.showOkCancelDialog(
myProject, "You are about to rebuild the whole project.\nRun 'Build Project' instead?", "Confirm Project Rebuild",
"Build", "Rebuild", Messages.getQuestionIcon()
);
if (rv == Messages.OK /*yes, please, do run make*/) {
startup(scope, false, false, callback, null);
return;
}
}
startup(scope, isRebuild, forceCompile, callback, message);
});
}
@Nullable @TestOnly
public static ExitStatus getExternalBuildExitStatus(CompileContext context) {
return context.getUserData(COMPILE_SERVER_BUILD_STATUS);
}
/** @noinspection SSBasedInspection*/
private long notifyCompilationCompleted(final CompileContextImpl compileContext, final CompileStatusNotification callback, final ExitStatus _status) {
final long duration = System.currentTimeMillis() - compileContext.getStartCompilationStamp();
if (!myProject.isDisposed()) {
// refresh on output roots is required in order for the order enumerator to see all roots via VFS
final Module[] affectedModules = compileContext.getCompileScope().getAffectedModules();
if (_status != ExitStatus.UP_TO_DATE && _status != ExitStatus.CANCELLED) {
// have to refresh in case of errors too, because run configuration may be set to ignore errors
Collection<String> affectedRoots = ContainerUtil.newHashSet(CompilerPathsEx.getOutputPaths(affectedModules));
if (!affectedRoots.isEmpty()) {
ProgressIndicator indicator = compileContext.getProgressIndicator();
indicator.setText("Synchronizing output directories...");
CompilerUtil.refreshOutputRoots(affectedRoots);
indicator.setText("");
}
}
}
SwingUtilities.invokeLater(() -> {
int errorCount = 0;
int warningCount = 0;
try {
errorCount = compileContext.getMessageCount(CompilerMessageCategory.ERROR);
warningCount = compileContext.getMessageCount(CompilerMessageCategory.WARNING);
}
finally {
if (callback != null) {
callback.finished(_status == ExitStatus.CANCELLED, errorCount, warningCount, compileContext);
}
}
if (!myProject.isDisposed()) {
final String statusMessage = createStatusMessage(_status, warningCount, errorCount, duration);
final MessageType messageType = errorCount > 0 ? MessageType.ERROR : warningCount > 0 ? MessageType.WARNING : MessageType.INFO;
if (duration > ONE_MINUTE_MS && CompilerWorkspaceConfiguration.getInstance(myProject).DISPLAY_NOTIFICATION_POPUP) {
ToolWindowManager.getInstance(myProject).notifyByBalloon(ToolWindowId.MESSAGES_WINDOW, messageType, statusMessage);
}
final String wrappedMessage = _status != ExitStatus.UP_TO_DATE? "<a href='#'>" + statusMessage + "</a>" : statusMessage;
final Notification notification = CompilerManager.NOTIFICATION_GROUP.createNotification(
"", wrappedMessage,
messageType.toNotificationType(),
new MessagesActivationListener(compileContext)
).setImportant(false);
compileContext.getBuildSession().registerCloseAction(notification::expire);
notification.notify(myProject);
if (_status != ExitStatus.UP_TO_DATE && compileContext.getMessageCount(null) > 0) {
final String msg = DateFormatUtil.formatDateTime(new Date()) + " - " + statusMessage;
compileContext.addMessage(CompilerMessageCategory.INFORMATION, msg, null, -1, -1);
}
}
});
return duration;
}
private static String createStatusMessage(final ExitStatus status, final int warningCount, final int errorCount, long duration) {
String message;
if (status == ExitStatus.CANCELLED) {
message = CompilerBundle.message("status.compilation.aborted");
}
else if (status == ExitStatus.UP_TO_DATE) {
message = CompilerBundle.message("status.all.up.to.date");
}
else {
if (status == ExitStatus.SUCCESS) {
message = warningCount > 0
? CompilerBundle.message("status.compilation.completed.successfully.with.warnings", warningCount)
: CompilerBundle.message("status.compilation.completed.successfully");
}
else {
message = CompilerBundle.message("status.compilation.completed.successfully.with.warnings.and.errors", errorCount, warningCount);
}
message = message + " in " + StringUtil.formatDuration(duration);
}
return message;
}
// [mike] performance optimization - this method is accessed > 15,000 times in Aurora
private String getModuleOutputPath(Module module, boolean inTestSourceContent) {
Map<Module, String> map = inTestSourceContent ? myModuleTestOutputPaths : myModuleOutputPaths;
return map.computeIfAbsent(module, k -> CompilerPaths.getModuleOutputPath(module, inTestSourceContent));
}
public void executeCompileTask(final CompileTask task, final CompileScope scope, final String contentName, final Runnable onTaskFinished) {
final CompilerTask progressManagerTask = new CompilerTask(myProject, contentName, false, false, true, isCompilationStartedAutomatically(scope));
final CompileContextImpl compileContext = new CompileContextImpl(myProject, progressManagerTask, scope, false, false);
FileDocumentManager.getInstance().saveAllDocuments();
progressManagerTask.start(() -> {
try {
task.execute(compileContext);
}
catch (ProcessCanceledException ex) {
// suppressed
}
finally {
if (onTaskFinished != null) {
onTaskFinished.run();
}
}
}, null);
}
private boolean executeCompileTasks(final CompileContext context, final boolean beforeTasks) {
if (myProject.isDisposed()) {
return false;
}
final CompilerManager manager = CompilerManager.getInstance(myProject);
final ProgressIndicator progressIndicator = context.getProgressIndicator();
progressIndicator.pushState();
try {
CompileTask[] tasks = beforeTasks ? manager.getBeforeTasks() : manager.getAfterTasks();
if (tasks.length > 0) {
progressIndicator.setText(
CompilerBundle.message(beforeTasks ? "progress.executing.precompile.tasks" : "progress.executing.postcompile.tasks"));
for (CompileTask task : tasks) {
if (!task.execute(context)) {
return false;
}
}
}
}
finally {
progressIndicator.popState();
StatusBar statusBar = WindowManager.getInstance().getStatusBar(myProject);
if (statusBar != null) {
statusBar.setInfo("");
}
if (progressIndicator instanceof CompilerTask) {
ApplicationManager.getApplication().invokeLater(((CompilerTask)progressIndicator)::showCompilerContent);
}
}
return true;
}
private boolean validateCompilerConfiguration(final CompileScope scope) {
try {
final Module[] scopeModules = scope.getAffectedModules();
final List<String> modulesWithoutOutputPathSpecified = new ArrayList<>();
final List<String> modulesWithoutJdkAssigned = new ArrayList<>();
final CompilerManager compilerManager = CompilerManager.getInstance(myProject);
for (final Module module : scopeModules) {
if (!compilerManager.isValidationEnabled(module)) {
continue;
}
final boolean hasSources = hasSources(module, JavaSourceRootType.SOURCE);
final boolean hasTestSources = hasSources(module, JavaSourceRootType.TEST_SOURCE);
if (!hasSources && !hasTestSources) {
// If module contains no sources, shouldn't have to select JDK or output directory (SCR #19333)
// todo still there may be problems with this approach if some generated files are attributed by this module
continue;
}
final Sdk jdk = ModuleRootManager.getInstance(module).getSdk();
if (jdk == null) {
modulesWithoutJdkAssigned.add(module.getName());
}
final String outputPath = getModuleOutputPath(module, false);
final String testsOutputPath = getModuleOutputPath(module, true);
if (outputPath == null && testsOutputPath == null) {
modulesWithoutOutputPathSpecified.add(module.getName());
}
else {
if (outputPath == null) {
if (hasSources) {
modulesWithoutOutputPathSpecified.add(module.getName());
}
}
if (testsOutputPath == null) {
if (hasTestSources) {
modulesWithoutOutputPathSpecified.add(module.getName());
}
}
}
}
if (!modulesWithoutJdkAssigned.isEmpty()) {
showNotSpecifiedError("error.jdk.not.specified", modulesWithoutJdkAssigned, ProjectBundle.message("modules.classpath.title"));
return false;
}
if (!modulesWithoutOutputPathSpecified.isEmpty()) {
showNotSpecifiedError("error.output.not.specified", modulesWithoutOutputPathSpecified, CommonContentEntriesEditor.NAME);
return false;
}
final List<Chunk<ModuleSourceSet>> chunks = ModuleCompilerUtil.getCyclicDependencies(myProject, Arrays.asList(scopeModules));
for (final Chunk<ModuleSourceSet> chunk : chunks) {
final Set<ModuleSourceSet> sourceSets = chunk.getNodes();
if (sourceSets.size() <= 1) {
continue; // no need to check one-module chunks
}
Sdk jdk = null;
LanguageLevel languageLevel = null;
for (final ModuleSourceSet sourceSet : sourceSets) {
Module module = sourceSet.getModule();
final Sdk moduleJdk = ModuleRootManager.getInstance(module).getSdk();
if (jdk == null) {
jdk = moduleJdk;
}
else {
if (!jdk.equals(moduleJdk)) {
showCyclicModulesHaveDifferentJdksError(ModuleSourceSet.getModules(sourceSets));
return false;
}
}
LanguageLevel moduleLanguageLevel = EffectiveLanguageLevelUtil.getEffectiveLanguageLevel(module);
if (languageLevel == null) {
languageLevel = moduleLanguageLevel;
}
else {
if (!languageLevel.equals(moduleLanguageLevel)) {
showCyclicModulesHaveDifferentLanguageLevel(ModuleSourceSet.getModules(sourceSets));
return false;
}
}
}
}
return true;
}
catch (Throwable e) {
LOG.info(e);
return false;
}
}
private void showCyclicModulesHaveDifferentLanguageLevel(Set<Module> modulesInChunk) {
Module firstModule = ContainerUtil.getFirstItem(modulesInChunk);
LOG.assertTrue(firstModule != null);
String moduleNameToSelect = firstModule.getName();
final String moduleNames = getModulesString(modulesInChunk);
Messages.showMessageDialog(myProject, CompilerBundle.message("error.chunk.modules.must.have.same.language.level", moduleNames),
CommonBundle.getErrorTitle(), Messages.getErrorIcon());
showConfigurationDialog(moduleNameToSelect, null);
}
private void showCyclicModulesHaveDifferentJdksError(Set<Module> modulesInChunk) {
Module firstModule = ContainerUtil.getFirstItem(modulesInChunk);
LOG.assertTrue(firstModule != null);
String moduleNameToSelect = firstModule.getName();
final String moduleNames = getModulesString(modulesInChunk);
Messages.showMessageDialog(myProject, CompilerBundle.message("error.chunk.modules.must.have.same.jdk", moduleNames),
CommonBundle.getErrorTitle(), Messages.getErrorIcon());
showConfigurationDialog(moduleNameToSelect, null);
}
private static String getModulesString(Collection<Module> modulesInChunk) {
final StringBuilder moduleNames = StringBuilderSpinAllocator.alloc();
try {
for (Module module : modulesInChunk) {
if (moduleNames.length() > 0) {
moduleNames.append("\n");
}
moduleNames.append("\"").append(module.getName()).append("\"");
}
return moduleNames.toString();
}
finally {
StringBuilderSpinAllocator.dispose(moduleNames);
}
}
private static boolean hasSources(Module module, final JavaSourceRootType rootType) {
return !ModuleRootManager.getInstance(module).getSourceRoots(rootType).isEmpty();
}
private void showNotSpecifiedError(@NonNls final String resourceId, List<String> modules, String editorNameToSelect) {
String nameToSelect = null;
final StringBuilder names = StringBuilderSpinAllocator.alloc();
final String message;
try {
final int maxModulesToShow = 10;
for (String name : modules.size() > maxModulesToShow ? modules.subList(0, maxModulesToShow) : modules) {
if (nameToSelect == null) {
nameToSelect = name;
}
if (names.length() > 0) {
names.append(",\n");
}
names.append("\"");
names.append(name);
names.append("\"");
}
if (modules.size() > maxModulesToShow) {
names.append(",\n...");
}
message = CompilerBundle.message(resourceId, modules.size(), names.toString());
}
finally {
StringBuilderSpinAllocator.dispose(names);
}
if (ApplicationManager.getApplication().isUnitTestMode()) {
LOG.error(message);
}
Messages.showMessageDialog(myProject, message, CommonBundle.getErrorTitle(), Messages.getErrorIcon());
showConfigurationDialog(nameToSelect, editorNameToSelect);
}
private void showConfigurationDialog(String moduleNameToSelect, String tabNameToSelect) {
ProjectSettingsService.getInstance(myProject).showModuleConfigurationDialog(moduleNameToSelect, tabNameToSelect);
}
private static class MessagesActivationListener extends NotificationListener.Adapter {
private final WeakReference<Project> myProjectRef;
private final Object myContentId;
public MessagesActivationListener(CompileContextImpl compileContext) {
myProjectRef = new WeakReference<>(compileContext.getProject());
myContentId = compileContext.getBuildSession().getContentId();
}
@Override
protected void hyperlinkActivated(@NotNull Notification notification, @NotNull HyperlinkEvent e) {
final Project project = myProjectRef.get();
if (project != null && !project.isDisposed() && CompilerTask.showCompilerContent(project, myContentId)) {
final ToolWindow tw = ToolWindowManager.getInstance(project).getToolWindow(ToolWindowId.MESSAGES_WINDOW);
if (tw != null) {
tw.activate(null, false);
}
}
else {
notification.expire();
}
}
}
}
| |
package org.bigtesting.jbehave.buddy.core.ui.widgets;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Insets;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.HashMap;
import javax.swing.JPanel;
import javax.swing.SwingUtilities;
import javax.swing.border.Border;
import javax.swing.border.CompoundBorder;
import javax.swing.border.EmptyBorder;
import javax.swing.border.MatteBorder;
import javax.swing.event.CaretEvent;
import javax.swing.event.CaretListener;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.text.AttributeSet;
import javax.swing.text.BadLocationException;
import javax.swing.text.Element;
import javax.swing.text.JTextComponent;
import javax.swing.text.StyleConstants;
import javax.swing.text.Utilities;
import org.bigtesting.jbehave.buddy.core.ui.ScreenContext;
/**
* Derived from
* http://tips4java.wordpress.com/2009/05/23/text-component-line-number/
* <p>
* This class will display line numbers for a related text component. The text
* component must use the same line height for each line. TextLineNumber
* supports wrapped lines and will highlight the line number of the current line
* in the text component.
* <p>
* This class was designed to be used as a component added to the row header of
* a JScrollPane.
*/
@SuppressWarnings("serial")
public class StepsTextPane extends JPanel implements CaretListener, DocumentListener, PropertyChangeListener {
public final static float LEFT = 0.0f;
public final static float CENTER = 0.5f;
public final static float RIGHT = 1.0f;
private final static Border OUTER = new MatteBorder(0, 0, 0, 0, Color.GRAY);
private final static int HEIGHT = Integer.MAX_VALUE - 1000000;
private static final Color STEPS_COLOR = new Color(248, 248, 248);
// Text component this TextTextLineNumber component is in sync with
private JTextComponent component;
private final ScreenContext screenContext;
/*
* Indicates whether this Font should be updated automatically when the Font
* of the related text component is changed.
*
* When true update the Font and repaint the line numbers, otherwise just
* repaint the line numbers.
*/
private boolean updateFont;
private Color currentLineForeground;
private Color lineForeground;
/*
* the alignment of the painted digits
*/
private float digitAlignment;
private int minimumDisplayDigits;
// Keep history information to reduce the number of times the component
// needs to be repainted
private int lastDigits;
private int lastHeight;
private int lastLine;
private HashMap<String, FontMetrics> fonts;
/**
* Create a line number component for a text component. This minimum display
* width will be based on 3 digits.
*
* @param component
* the related text component
*/
public StepsTextPane(JTextComponent component, ScreenContext screenContext) {
this(component, 3, screenContext);
}
/**
* Create a line number component for a text component.
*
* @param component
* the related text component
* @param minimumDisplayDigits
* the number of digits used to calculate the minimum width of
* the component
*/
public StepsTextPane(JTextComponent component, int minimumDisplayDigits, ScreenContext screenContext) {
this.component = component;
this.screenContext = screenContext;
setFont(component.getFont());
setBorderGap(5);
setCurrentLineForeground(Color.GRAY);
setLineForeground(Color.LIGHT_GRAY);
setDigitAlignment(RIGHT);
setMinimumDisplayDigits(minimumDisplayDigits);
component.getDocument().addDocumentListener(this);
component.addCaretListener(this);
component.addPropertyChangeListener("font", this);
}
/**
* The border gap is used in calculating the left and right insets of the
* border. Default value is 5.
*
* @param borderGap
* the gap in pixels
*/
private void setBorderGap(int borderGap) {
Border inner = new EmptyBorder(0, borderGap, 0, borderGap);
setBorder(new CompoundBorder(OUTER, inner));
lastDigits = 0;
setPreferredWidth();
}
/**
* Gets the current line rendering Color
*
* @return the Color used to render the current line number
*/
private Color getCurrentLineForeground() {
return currentLineForeground == null ? getForeground() : currentLineForeground;
}
/**
* The Color used to render the current line digits. Default is Coolor.RED.
*
* @param currentLineForeground
* the Color used to render the current line
*/
private void setCurrentLineForeground(Color currentLineForeground) {
this.currentLineForeground = currentLineForeground;
}
private void setLineForeground(Color color) {
this.lineForeground = color;
}
private Color getLineForeground() {
return lineForeground == null ? getForeground() : lineForeground;
}
/**
* Specify the horizontal alignment of the digits within the component.
* Common values would be:
* <ul>
* <li>TextLineNumber.LEFT
* <li>TextLineNumber.CENTER
* <li>TextLineNumber.RIGHT (default)
* </ul>
*/
private void setDigitAlignment(float digitAlignment) {
this.digitAlignment = digitAlignment > 1.0f ? 1.0f : digitAlignment < 0.0f ? -1.0f : digitAlignment;
}
/**
* Specify the mimimum number of digits used to calculate the preferred
* width of the component. Default is 3.
*
* @param minimumDisplayDigits
* the number digits used in the preferred width calculation
*/
private void setMinimumDisplayDigits(int minimumDisplayDigits) {
this.minimumDisplayDigits = minimumDisplayDigits;
setPreferredWidth();
}
/**
* Calculate the width needed to display the maximum line number
*/
private void setPreferredWidth() {
Element root = component.getDocument().getDefaultRootElement();
int lines = root.getElementCount();
int digits = Math.max(String.valueOf(lines).length(), minimumDisplayDigits);
// Update sizes when number of digits in the line number changes
if (lastDigits != digits) {
lastDigits = digits;
FontMetrics fontMetrics = getFontMetrics(getFont());
int width = fontMetrics.charWidth('0') * digits;
Insets insets = getInsets();
int preferredWidth = insets.left + insets.right + width;
Dimension d = getPreferredSize();
d.setSize(preferredWidth, HEIGHT);
setPreferredSize(d);
setSize(d);
}
}
/**
* Draw the line numbers
*/
@Override
public void paintComponent(Graphics g) {
super.paintComponent(g);
// Determine the width of the space available to draw the line number
FontMetrics fontMetrics = component.getFontMetrics(component.getFont());
Insets insets = getInsets();
int availableWidth = getSize().width - insets.left - insets.right;
// Determine the rows to draw within the clipped bounds.
Rectangle clip = g.getClipBounds();
int rowStartOffset = component.viewToModel(new Point(0, clip.y));
int endOffset = component.viewToModel(new Point(0, clip.y + clip.height));
((Graphics2D) g).setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g.setFont(component.getFont());
while (rowStartOffset <= endOffset) {
try {
// Get the line number as a string and then determine the
// "X" and "Y" offsets for drawing the string.
String lineNumber = getTextLineNumber(rowStartOffset);
int stringWidth = fontMetrics.stringWidth(lineNumber);
int x = getOffsetX(availableWidth, stringWidth) + insets.left;
int y = getOffsetY(rowStartOffset, fontMetrics);
// draw the steps indicator box
int descent = getDescent(rowStartOffset, fontMetrics);
int fontHeight = fontMetrics.getHeight() - descent;
g.setColor(STEPS_COLOR);
g.fillRect(clip.x + insets.left, y - fontHeight + descent / 2, availableWidth, fontHeight);
// draw the line number
if (isCurrentLine(rowStartOffset)) {
g.setColor(getCurrentLineForeground());
} else {
g.setColor(getLineForeground());
}
g.drawString(lineNumber, x, y);
// Move to the next row
rowStartOffset = Utilities.getRowEnd(component, rowStartOffset) + 1;
} catch (Exception e) {
screenContext.logException(e);
}
}
}
/*
* We need to know if the caret is currently positioned on the line we are
* about to paint so the line number can be highlighted.
*/
private boolean isCurrentLine(int rowStartOffset) {
int caretPosition = component.getCaretPosition();
Element root = component.getDocument().getDefaultRootElement();
return root.getElementIndex(rowStartOffset) == root.getElementIndex(caretPosition);
}
/*
* Get the line number to be drawn. The empty string will be returned when a
* line of text has wrapped.
*/
private String getTextLineNumber(int rowStartOffset) {
Element root = component.getDocument().getDefaultRootElement();
int index = root.getElementIndex(rowStartOffset);
Element line = root.getElement(index);
return (line.getStartOffset() == rowStartOffset) ? String.valueOf(index + 1) : "";
}
/*
* Determine the X offset to properly align the line number when drawn
*/
private int getOffsetX(int availableWidth, int stringWidth) {
return (int) ((availableWidth - stringWidth) * digitAlignment);
}
/*
* Determine the Y offset for the current row
*/
private int getOffsetY(int rowStartOffset, FontMetrics fontMetrics) throws BadLocationException {
// Get the bounding rectangle of the row
Rectangle r = component.modelToView(rowStartOffset);
int y = r.y + r.height;
int descent = getDescent(rowStartOffset, fontMetrics);
return y - descent;
}
private int getDescent(int rowStartOffset, FontMetrics fontMetrics) throws BadLocationException {
Rectangle r = component.modelToView(rowStartOffset);
int lineHeight = fontMetrics.getHeight();
int descent = 0;
// The text needs to be positioned above the bottom of the bounding
// rectangle based on the descent of the font(s) contained on the row.
if (r.height == lineHeight) {
// default font is being used
descent = fontMetrics.getDescent();
} else {
// We need to check all the attributes for font changes
if (fonts == null) {
fonts = new HashMap<String, FontMetrics>();
}
Element root = component.getDocument().getDefaultRootElement();
int index = root.getElementIndex(rowStartOffset);
Element line = root.getElement(index);
for (int i = 0; i < line.getElementCount(); i++) {
Element child = line.getElement(i);
AttributeSet as = child.getAttributes();
String fontFamily = (String) as.getAttribute(StyleConstants.FontFamily);
Integer fontSize = (Integer) as.getAttribute(StyleConstants.FontSize);
String key = fontFamily + fontSize;
FontMetrics fm = fonts.get(key);
if (fm == null) {
Font font = new Font(fontFamily, Font.PLAIN, fontSize);
fm = component.getFontMetrics(font);
fonts.put(key, fm);
}
descent = Math.max(descent, fm.getDescent());
}
}
return descent;
}
//
// Implement CaretListener interface
//
public void caretUpdate(CaretEvent e) {
// Get the line the caret is positioned on
int caretPosition = component.getCaretPosition();
Element root = component.getDocument().getDefaultRootElement();
int currentLine = root.getElementIndex(caretPosition);
// Need to repaint so the correct line number can be highlighted
if (lastLine != currentLine) {
repaint();
lastLine = currentLine;
}
}
//
// Implement DocumentListener interface
//
public void changedUpdate(DocumentEvent e) {
documentChanged();
}
public void insertUpdate(DocumentEvent e) {
documentChanged();
}
public void removeUpdate(DocumentEvent e) {
documentChanged();
}
/*
* A document change may affect the number of displayed lines of text.
* Therefore the lines numbers will also change.
*/
private void documentChanged() {
// Preferred size of the component has not been updated at the time
// the DocumentEvent is fired
SwingUtilities.invokeLater(new Runnable() {
public void run() {
int preferredHeight = component.getPreferredSize().height;
// Document change has caused a change in the number of lines.
// Repaint to reflect the new line numbers
if (lastHeight != preferredHeight) {
setPreferredWidth();
repaint();
lastHeight = preferredHeight;
}
}
});
}
//
// Implement PropertyChangeListener interface
//
public void propertyChange(PropertyChangeEvent evt) {
if (evt.getNewValue() instanceof Font) {
if (updateFont) {
Font newFont = (Font) evt.getNewValue();
setFont(newFont);
lastDigits = 0;
setPreferredWidth();
} else {
repaint();
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.bookkeeper.client;
import io.netty.util.concurrent.DefaultThreadFactory;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.bookkeeper.client.AsyncCallback.CreateCallback;
import org.apache.bookkeeper.client.AsyncCallback.DeleteCallback;
import org.apache.bookkeeper.client.AsyncCallback.OpenCallback;
import org.apache.bookkeeper.client.api.OpenBuilder;
import org.apache.bookkeeper.client.api.ReadHandle;
import org.apache.bookkeeper.client.impl.OpenBuilderBase;
import org.apache.bookkeeper.conf.ClientConfiguration;
import org.apache.zookeeper.ZooKeeper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Mocked version of BookKeeper client that keeps all ledgers data in memory.
*
* <p>This mocked client is meant to be used in unit tests for applications using the BookKeeper API.
*/
public class MockBookKeeper extends BookKeeper {
final ExecutorService executor = Executors.newFixedThreadPool(1, new DefaultThreadFactory("mock-bookkeeper"));
final ZooKeeper zkc;
@Override
public ZooKeeper getZkHandle() {
return zkc;
}
@Override
public ClientConfiguration getConf() {
return super.getConf();
}
Map<Long, MockLedgerHandle> ledgers = new ConcurrentHashMap<Long, MockLedgerHandle>();
AtomicLong sequence = new AtomicLong(3);
AtomicBoolean stopped = new AtomicBoolean(false);
AtomicInteger stepsToFail = new AtomicInteger(-1);
int failReturnCode = BKException.Code.OK;
int nextFailReturnCode = BKException.Code.OK;
public MockBookKeeper(ZooKeeper zkc) throws Exception {
this.zkc = zkc;
}
@Override
public LedgerHandle createLedger(DigestType digestType, byte passwd[]) throws BKException {
return createLedger(3, 2, digestType, passwd);
}
@Override
public LedgerHandle createLedger(int ensSize, int qSize, DigestType digestType, byte passwd[]) throws BKException {
return createLedger(ensSize, qSize, qSize, digestType, passwd);
}
@Override
public void asyncCreateLedger(int ensSize, int writeQuorumSize, int ackQuorumSize, final DigestType digestType,
final byte[] passwd, final CreateCallback cb, final Object ctx, Map<String, byte[]> properties) {
if (stopped.get()) {
cb.createComplete(BKException.Code.WriteException, null, ctx);
return;
}
executor.execute(new Runnable() {
public void run() {
if (getProgrammedFailStatus()) {
if (failReturnCode != BkTimeoutOperation) {
cb.createComplete(failReturnCode, null, ctx);
}
return;
}
if (stopped.get()) {
cb.createComplete(BKException.Code.WriteException, null, ctx);
return;
}
try {
long id = sequence.getAndIncrement();
log.info("Creating ledger {}", id);
MockLedgerHandle lh = new MockLedgerHandle(MockBookKeeper.this, id, digestType, passwd);
ledgers.put(id, lh);
cb.createComplete(0, lh, ctx);
} catch (Throwable t) {
t.printStackTrace();
}
}
});
}
@Override
public LedgerHandle createLedger(int ensSize, int writeQuorumSize, int ackQuorumSize, DigestType digestType,
byte[] passwd) throws BKException {
checkProgrammedFail();
if (stopped.get()) {
throw BKException.create(BKException.Code.WriteException);
}
try {
long id = sequence.getAndIncrement();
log.info("Creating ledger {}", id);
MockLedgerHandle lh = new MockLedgerHandle(this, id, digestType, passwd);
ledgers.put(id, lh);
return lh;
} catch (Throwable t) {
log.error("Exception:", t);
return null;
}
}
@Override
public void asyncCreateLedger(int ensSize, int qSize, DigestType digestType, byte[] passwd, CreateCallback cb,
Object ctx) {
asyncCreateLedger(ensSize, qSize, qSize, digestType, passwd, cb, ctx, Collections.emptyMap());
}
@Override
public void asyncOpenLedger(long lId, DigestType digestType, byte[] passwd, OpenCallback cb, Object ctx) {
if (getProgrammedFailStatus()) {
if (failReturnCode != BkTimeoutOperation) {
cb.openComplete(failReturnCode, null, ctx);
}
return;
}
if (stopped.get()) {
cb.openComplete(BKException.Code.WriteException, null, ctx);
return;
}
MockLedgerHandle lh = ledgers.get(lId);
if (lh == null) {
cb.openComplete(BKException.Code.NoSuchLedgerExistsException, null, ctx);
} else if (lh.digest != digestType) {
cb.openComplete(BKException.Code.DigestMatchException, null, ctx);
} else if (!Arrays.equals(lh.passwd, passwd)) {
cb.openComplete(BKException.Code.UnauthorizedAccessException, null, ctx);
} else {
cb.openComplete(0, lh, ctx);
}
}
@Override
public void asyncOpenLedgerNoRecovery(long lId, DigestType digestType, byte[] passwd, OpenCallback cb, Object ctx) {
asyncOpenLedger(lId, digestType, passwd, cb, ctx);
}
@Override
public void asyncDeleteLedger(long lId, DeleteCallback cb, Object ctx) {
if (getProgrammedFailStatus()) {
if (failReturnCode != BkTimeoutOperation) {
cb.deleteComplete(failReturnCode, ctx);
}
} else if (stopped.get()) {
cb.deleteComplete(BKException.Code.WriteException, ctx);
} else if (ledgers.containsKey(lId)) {
ledgers.remove(lId);
cb.deleteComplete(0, ctx);
} else {
cb.deleteComplete(BKException.Code.NoSuchLedgerExistsException, ctx);
}
}
@Override
public void deleteLedger(long lId) throws InterruptedException, BKException {
checkProgrammedFail();
if (stopped.get()) {
throw BKException.create(BKException.Code.WriteException);
}
if (!ledgers.containsKey(lId)) {
throw BKException.create(BKException.Code.NoSuchLedgerExistsException);
}
ledgers.remove(lId);
}
@Override
public void close() throws InterruptedException, BKException {
checkProgrammedFail();
shutdown();
}
@Override
public OpenBuilder newOpenLedgerOp() {
return new OpenBuilderBase() {
@Override
public CompletableFuture<ReadHandle> execute() {
CompletableFuture<ReadHandle> promise = new CompletableFuture<ReadHandle>();
if (!validate()) {
promise.completeExceptionally(new BKException.BKNoSuchLedgerExistsException());
return promise;
} else if (getProgrammedFailStatus()) {
if (failReturnCode != BkTimeoutOperation) {
promise.completeExceptionally(BKException.create(failReturnCode));
}
return promise;
} else if (stopped.get()) {
promise.completeExceptionally(new BKException.BKClientClosedException());
return promise;
}
MockLedgerHandle lh = ledgers.get(ledgerId);
if (lh == null) {
promise.completeExceptionally(new BKException.BKNoSuchLedgerExistsException());
} else if (lh.digest != DigestType.fromApiDigestType(digestType)) {
promise.completeExceptionally(new BKException.BKDigestMatchException());
} else if (!Arrays.equals(lh.passwd, password)) {
promise.completeExceptionally(new BKException.BKUnauthorizedAccessException());
} else {
promise.complete(new MockReadHandle(MockBookKeeper.this, ledgerId,
lh.getLedgerMetadata(), lh.entries));
}
return promise;
}
};
}
public void shutdown() {
try {
super.close();
} catch (Exception e) {
}
stopped.set(true);
for (MockLedgerHandle ledger : ledgers.values()) {
ledger.entries.clear();
}
ledgers.clear();
executor.shutdownNow();
}
public boolean isStopped() {
return stopped.get();
}
public Set<Long> getLedgers() {
return ledgers.keySet();
}
void checkProgrammedFail() throws BKException {
int steps = stepsToFail.getAndDecrement();
log.debug("Steps to fail: {}", steps);
if (steps <= 0) {
if (failReturnCode != BKException.Code.OK) {
int rc = failReturnCode;
failReturnCode = nextFailReturnCode;
nextFailReturnCode = BKException.Code.OK;
throw BKException.create(rc);
}
}
}
boolean getProgrammedFailStatus() {
int steps = stepsToFail.getAndDecrement();
log.debug("Steps to fail: {}", steps);
return steps == 0;
}
public void failNow(int rc) {
failNow(rc, BKException.Code.OK);
}
public void failNow(int rc, int nextErrorCode) {
failAfter(0, rc);
}
public void failAfter(int steps, int rc) {
failAfter(steps, rc, BKException.Code.OK);
}
public void failAfter(int steps, int rc, int nextErrorCode) {
stepsToFail.set(steps);
failReturnCode = rc;
this.nextFailReturnCode = nextErrorCode;
}
public void timeoutAfter(int steps) {
stepsToFail.set(steps);
failReturnCode = BkTimeoutOperation;
}
private static final int BkTimeoutOperation = 1000;
private static final Logger log = LoggerFactory.getLogger(MockBookKeeper.class);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.