gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.logging.v2;
import static com.google.cloud.logging.v2.ConfigClient.ListBucketsPagedResponse;
import static com.google.cloud.logging.v2.ConfigClient.ListExclusionsPagedResponse;
import static com.google.cloud.logging.v2.ConfigClient.ListSinksPagedResponse;
import static com.google.cloud.logging.v2.ConfigClient.ListViewsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.ClientSettings;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.cloud.logging.v2.stub.ConfigServiceV2StubSettings;
import com.google.logging.v2.CmekSettings;
import com.google.logging.v2.CopyLogEntriesMetadata;
import com.google.logging.v2.CopyLogEntriesRequest;
import com.google.logging.v2.CopyLogEntriesResponse;
import com.google.logging.v2.CreateBucketRequest;
import com.google.logging.v2.CreateExclusionRequest;
import com.google.logging.v2.CreateSinkRequest;
import com.google.logging.v2.CreateViewRequest;
import com.google.logging.v2.DeleteBucketRequest;
import com.google.logging.v2.DeleteExclusionRequest;
import com.google.logging.v2.DeleteSinkRequest;
import com.google.logging.v2.DeleteViewRequest;
import com.google.logging.v2.GetBucketRequest;
import com.google.logging.v2.GetCmekSettingsRequest;
import com.google.logging.v2.GetExclusionRequest;
import com.google.logging.v2.GetSettingsRequest;
import com.google.logging.v2.GetSinkRequest;
import com.google.logging.v2.GetViewRequest;
import com.google.logging.v2.ListBucketsRequest;
import com.google.logging.v2.ListBucketsResponse;
import com.google.logging.v2.ListExclusionsRequest;
import com.google.logging.v2.ListExclusionsResponse;
import com.google.logging.v2.ListSinksRequest;
import com.google.logging.v2.ListSinksResponse;
import com.google.logging.v2.ListViewsRequest;
import com.google.logging.v2.ListViewsResponse;
import com.google.logging.v2.LogBucket;
import com.google.logging.v2.LogExclusion;
import com.google.logging.v2.LogSink;
import com.google.logging.v2.LogView;
import com.google.logging.v2.Settings;
import com.google.logging.v2.UndeleteBucketRequest;
import com.google.logging.v2.UpdateBucketRequest;
import com.google.logging.v2.UpdateCmekSettingsRequest;
import com.google.logging.v2.UpdateExclusionRequest;
import com.google.logging.v2.UpdateSettingsRequest;
import com.google.logging.v2.UpdateSinkRequest;
import com.google.logging.v2.UpdateViewRequest;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link ConfigClient}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (logging.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the total timeout of getBucket to 30 seconds:
*
* <pre>{@code
* ConfigSettings.Builder configSettingsBuilder = ConfigSettings.newBuilder();
* configSettingsBuilder
* .getBucketSettings()
* .setRetrySettings(
* configSettingsBuilder
* .getBucketSettings()
* .getRetrySettings()
* .toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30))
* .build());
* ConfigSettings configSettings = configSettingsBuilder.build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class ConfigSettings extends ClientSettings<ConfigSettings> {
/** Returns the object with the settings used for calls to listBuckets. */
public PagedCallSettings<ListBucketsRequest, ListBucketsResponse, ListBucketsPagedResponse>
listBucketsSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).listBucketsSettings();
}
/** Returns the object with the settings used for calls to getBucket. */
public UnaryCallSettings<GetBucketRequest, LogBucket> getBucketSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).getBucketSettings();
}
/** Returns the object with the settings used for calls to createBucket. */
public UnaryCallSettings<CreateBucketRequest, LogBucket> createBucketSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).createBucketSettings();
}
/** Returns the object with the settings used for calls to updateBucket. */
public UnaryCallSettings<UpdateBucketRequest, LogBucket> updateBucketSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).updateBucketSettings();
}
/** Returns the object with the settings used for calls to deleteBucket. */
public UnaryCallSettings<DeleteBucketRequest, Empty> deleteBucketSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).deleteBucketSettings();
}
/** Returns the object with the settings used for calls to undeleteBucket. */
public UnaryCallSettings<UndeleteBucketRequest, Empty> undeleteBucketSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).undeleteBucketSettings();
}
/** Returns the object with the settings used for calls to listViews. */
public PagedCallSettings<ListViewsRequest, ListViewsResponse, ListViewsPagedResponse>
listViewsSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).listViewsSettings();
}
/** Returns the object with the settings used for calls to getView. */
public UnaryCallSettings<GetViewRequest, LogView> getViewSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).getViewSettings();
}
/** Returns the object with the settings used for calls to createView. */
public UnaryCallSettings<CreateViewRequest, LogView> createViewSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).createViewSettings();
}
/** Returns the object with the settings used for calls to updateView. */
public UnaryCallSettings<UpdateViewRequest, LogView> updateViewSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).updateViewSettings();
}
/** Returns the object with the settings used for calls to deleteView. */
public UnaryCallSettings<DeleteViewRequest, Empty> deleteViewSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).deleteViewSettings();
}
/** Returns the object with the settings used for calls to listSinks. */
public PagedCallSettings<ListSinksRequest, ListSinksResponse, ListSinksPagedResponse>
listSinksSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).listSinksSettings();
}
/** Returns the object with the settings used for calls to getSink. */
public UnaryCallSettings<GetSinkRequest, LogSink> getSinkSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).getSinkSettings();
}
/** Returns the object with the settings used for calls to createSink. */
public UnaryCallSettings<CreateSinkRequest, LogSink> createSinkSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).createSinkSettings();
}
/** Returns the object with the settings used for calls to updateSink. */
public UnaryCallSettings<UpdateSinkRequest, LogSink> updateSinkSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).updateSinkSettings();
}
/** Returns the object with the settings used for calls to deleteSink. */
public UnaryCallSettings<DeleteSinkRequest, Empty> deleteSinkSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).deleteSinkSettings();
}
/** Returns the object with the settings used for calls to listExclusions. */
public PagedCallSettings<
ListExclusionsRequest, ListExclusionsResponse, ListExclusionsPagedResponse>
listExclusionsSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).listExclusionsSettings();
}
/** Returns the object with the settings used for calls to getExclusion. */
public UnaryCallSettings<GetExclusionRequest, LogExclusion> getExclusionSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).getExclusionSettings();
}
/** Returns the object with the settings used for calls to createExclusion. */
public UnaryCallSettings<CreateExclusionRequest, LogExclusion> createExclusionSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).createExclusionSettings();
}
/** Returns the object with the settings used for calls to updateExclusion. */
public UnaryCallSettings<UpdateExclusionRequest, LogExclusion> updateExclusionSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).updateExclusionSettings();
}
/** Returns the object with the settings used for calls to deleteExclusion. */
public UnaryCallSettings<DeleteExclusionRequest, Empty> deleteExclusionSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).deleteExclusionSettings();
}
/** Returns the object with the settings used for calls to getCmekSettings. */
public UnaryCallSettings<GetCmekSettingsRequest, CmekSettings> getCmekSettingsSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).getCmekSettingsSettings();
}
/** Returns the object with the settings used for calls to updateCmekSettings. */
public UnaryCallSettings<UpdateCmekSettingsRequest, CmekSettings> updateCmekSettingsSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).updateCmekSettingsSettings();
}
/** Returns the object with the settings used for calls to getSettings. */
public UnaryCallSettings<GetSettingsRequest, Settings> getSettingsSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).getSettingsSettings();
}
/** Returns the object with the settings used for calls to updateSettings. */
public UnaryCallSettings<UpdateSettingsRequest, Settings> updateSettingsSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).updateSettingsSettings();
}
/** Returns the object with the settings used for calls to copyLogEntries. */
public UnaryCallSettings<CopyLogEntriesRequest, Operation> copyLogEntriesSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).copyLogEntriesSettings();
}
/** Returns the object with the settings used for calls to copyLogEntries. */
public OperationCallSettings<
CopyLogEntriesRequest, CopyLogEntriesResponse, CopyLogEntriesMetadata>
copyLogEntriesOperationSettings() {
return ((ConfigServiceV2StubSettings) getStubSettings()).copyLogEntriesOperationSettings();
}
public static final ConfigSettings create(ConfigServiceV2StubSettings stub) throws IOException {
return new ConfigSettings.Builder(stub.toBuilder()).build();
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return ConfigServiceV2StubSettings.defaultExecutorProviderBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return ConfigServiceV2StubSettings.getDefaultEndpoint();
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return ConfigServiceV2StubSettings.getDefaultServiceScopes();
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return ConfigServiceV2StubSettings.defaultCredentialsProviderBuilder();
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return ConfigServiceV2StubSettings.defaultGrpcTransportProviderBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return ConfigServiceV2StubSettings.defaultTransportChannelProvider();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ConfigServiceV2StubSettings.defaultApiClientHeaderProviderBuilder();
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected ConfigSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
}
/** Builder for ConfigSettings. */
public static class Builder extends ClientSettings.Builder<ConfigSettings, Builder> {
protected Builder() throws IOException {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(ConfigServiceV2StubSettings.newBuilder(clientContext));
}
protected Builder(ConfigSettings settings) {
super(settings.getStubSettings().toBuilder());
}
protected Builder(ConfigServiceV2StubSettings.Builder stubSettings) {
super(stubSettings);
}
private static Builder createDefault() {
return new Builder(ConfigServiceV2StubSettings.newBuilder());
}
public ConfigServiceV2StubSettings.Builder getStubSettingsBuilder() {
return ((ConfigServiceV2StubSettings.Builder) getStubSettings());
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(
getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater);
return this;
}
/** Returns the builder for the settings used for calls to listBuckets. */
public PagedCallSettings.Builder<
ListBucketsRequest, ListBucketsResponse, ListBucketsPagedResponse>
listBucketsSettings() {
return getStubSettingsBuilder().listBucketsSettings();
}
/** Returns the builder for the settings used for calls to getBucket. */
public UnaryCallSettings.Builder<GetBucketRequest, LogBucket> getBucketSettings() {
return getStubSettingsBuilder().getBucketSettings();
}
/** Returns the builder for the settings used for calls to createBucket. */
public UnaryCallSettings.Builder<CreateBucketRequest, LogBucket> createBucketSettings() {
return getStubSettingsBuilder().createBucketSettings();
}
/** Returns the builder for the settings used for calls to updateBucket. */
public UnaryCallSettings.Builder<UpdateBucketRequest, LogBucket> updateBucketSettings() {
return getStubSettingsBuilder().updateBucketSettings();
}
/** Returns the builder for the settings used for calls to deleteBucket. */
public UnaryCallSettings.Builder<DeleteBucketRequest, Empty> deleteBucketSettings() {
return getStubSettingsBuilder().deleteBucketSettings();
}
/** Returns the builder for the settings used for calls to undeleteBucket. */
public UnaryCallSettings.Builder<UndeleteBucketRequest, Empty> undeleteBucketSettings() {
return getStubSettingsBuilder().undeleteBucketSettings();
}
/** Returns the builder for the settings used for calls to listViews. */
public PagedCallSettings.Builder<ListViewsRequest, ListViewsResponse, ListViewsPagedResponse>
listViewsSettings() {
return getStubSettingsBuilder().listViewsSettings();
}
/** Returns the builder for the settings used for calls to getView. */
public UnaryCallSettings.Builder<GetViewRequest, LogView> getViewSettings() {
return getStubSettingsBuilder().getViewSettings();
}
/** Returns the builder for the settings used for calls to createView. */
public UnaryCallSettings.Builder<CreateViewRequest, LogView> createViewSettings() {
return getStubSettingsBuilder().createViewSettings();
}
/** Returns the builder for the settings used for calls to updateView. */
public UnaryCallSettings.Builder<UpdateViewRequest, LogView> updateViewSettings() {
return getStubSettingsBuilder().updateViewSettings();
}
/** Returns the builder for the settings used for calls to deleteView. */
public UnaryCallSettings.Builder<DeleteViewRequest, Empty> deleteViewSettings() {
return getStubSettingsBuilder().deleteViewSettings();
}
/** Returns the builder for the settings used for calls to listSinks. */
public PagedCallSettings.Builder<ListSinksRequest, ListSinksResponse, ListSinksPagedResponse>
listSinksSettings() {
return getStubSettingsBuilder().listSinksSettings();
}
/** Returns the builder for the settings used for calls to getSink. */
public UnaryCallSettings.Builder<GetSinkRequest, LogSink> getSinkSettings() {
return getStubSettingsBuilder().getSinkSettings();
}
/** Returns the builder for the settings used for calls to createSink. */
public UnaryCallSettings.Builder<CreateSinkRequest, LogSink> createSinkSettings() {
return getStubSettingsBuilder().createSinkSettings();
}
/** Returns the builder for the settings used for calls to updateSink. */
public UnaryCallSettings.Builder<UpdateSinkRequest, LogSink> updateSinkSettings() {
return getStubSettingsBuilder().updateSinkSettings();
}
/** Returns the builder for the settings used for calls to deleteSink. */
public UnaryCallSettings.Builder<DeleteSinkRequest, Empty> deleteSinkSettings() {
return getStubSettingsBuilder().deleteSinkSettings();
}
/** Returns the builder for the settings used for calls to listExclusions. */
public PagedCallSettings.Builder<
ListExclusionsRequest, ListExclusionsResponse, ListExclusionsPagedResponse>
listExclusionsSettings() {
return getStubSettingsBuilder().listExclusionsSettings();
}
/** Returns the builder for the settings used for calls to getExclusion. */
public UnaryCallSettings.Builder<GetExclusionRequest, LogExclusion> getExclusionSettings() {
return getStubSettingsBuilder().getExclusionSettings();
}
/** Returns the builder for the settings used for calls to createExclusion. */
public UnaryCallSettings.Builder<CreateExclusionRequest, LogExclusion>
createExclusionSettings() {
return getStubSettingsBuilder().createExclusionSettings();
}
/** Returns the builder for the settings used for calls to updateExclusion. */
public UnaryCallSettings.Builder<UpdateExclusionRequest, LogExclusion>
updateExclusionSettings() {
return getStubSettingsBuilder().updateExclusionSettings();
}
/** Returns the builder for the settings used for calls to deleteExclusion. */
public UnaryCallSettings.Builder<DeleteExclusionRequest, Empty> deleteExclusionSettings() {
return getStubSettingsBuilder().deleteExclusionSettings();
}
/** Returns the builder for the settings used for calls to getCmekSettings. */
public UnaryCallSettings.Builder<GetCmekSettingsRequest, CmekSettings>
getCmekSettingsSettings() {
return getStubSettingsBuilder().getCmekSettingsSettings();
}
/** Returns the builder for the settings used for calls to updateCmekSettings. */
public UnaryCallSettings.Builder<UpdateCmekSettingsRequest, CmekSettings>
updateCmekSettingsSettings() {
return getStubSettingsBuilder().updateCmekSettingsSettings();
}
/** Returns the builder for the settings used for calls to getSettings. */
public UnaryCallSettings.Builder<GetSettingsRequest, Settings> getSettingsSettings() {
return getStubSettingsBuilder().getSettingsSettings();
}
/** Returns the builder for the settings used for calls to updateSettings. */
public UnaryCallSettings.Builder<UpdateSettingsRequest, Settings> updateSettingsSettings() {
return getStubSettingsBuilder().updateSettingsSettings();
}
/** Returns the builder for the settings used for calls to copyLogEntries. */
public UnaryCallSettings.Builder<CopyLogEntriesRequest, Operation> copyLogEntriesSettings() {
return getStubSettingsBuilder().copyLogEntriesSettings();
}
/** Returns the builder for the settings used for calls to copyLogEntries. */
public OperationCallSettings.Builder<
CopyLogEntriesRequest, CopyLogEntriesResponse, CopyLogEntriesMetadata>
copyLogEntriesOperationSettings() {
return getStubSettingsBuilder().copyLogEntriesOperationSettings();
}
@Override
public ConfigSettings build() throws IOException {
return new ConfigSettings(this);
}
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.query2.output;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Ordering;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.CollectionUtils;
import com.google.devtools.build.lib.collect.EquivalenceRelation;
import com.google.devtools.build.lib.graph.Digraph;
import com.google.devtools.build.lib.graph.DotOutputVisitor;
import com.google.devtools.build.lib.graph.LabelSerializer;
import com.google.devtools.build.lib.graph.Node;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.query2.output.QueryOptions.OrderOutput;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
/**
* An output formatter that prints the result as factored graph in AT&T
* GraphViz format.
*/
class GraphOutputFormatter extends OutputFormatter {
private int graphNodeStringLimit;
private int graphConditionalEdgesLimit;
@Override
public String getName() {
return "graph";
}
@Override
public void output(
QueryOptions options,
Digraph<Target> result,
OutputStream out,
AspectResolver aspectProvider,
ConditionalEdges conditionalEdges) {
this.graphNodeStringLimit = options.graphNodeStringLimit;
this.graphConditionalEdgesLimit = options.graphConditionalEdgesLimit;
boolean sortLabels = options.orderOutput == OrderOutput.FULL;
if (options.graphFactored) {
outputFactored(
result,
new PrintWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8)),
sortLabels,
conditionalEdges);
} else {
outputUnfactored(
result,
new PrintWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8)),
sortLabels,
options,
conditionalEdges);
}
}
private void outputUnfactored(
Digraph<Target> result,
PrintWriter out,
boolean sortLabels,
final QueryOptions options,
ConditionalEdges conditionalEdges) {
result.visitNodesBeforeEdges(
new DotOutputVisitor<Target>(out, LABEL_STRINGIFIER) {
@Override
public void beginVisit() {
super.beginVisit();
// TODO(bazel-team): (2009) make this the default in Digraph.
out.printf(" node [shape=box];%s", options.getLineTerminator());
}
@Override
public void visitEdge(Node<Target> lhs, Node<Target> rhs) {
super.visitEdge(lhs, rhs);
String outputLabel =
getConditionsGraphLabel(
ImmutableSet.of(lhs), ImmutableSet.of(rhs), conditionalEdges);
if (!outputLabel.isEmpty()) {
out.printf(" [label=\"%s\"];\n", outputLabel);
}
}
},
sortLabels ? new TargetOrdering() : null);
}
private static final Ordering<Node<Target>> NODE_COMPARATOR =
Ordering.from(new TargetOrdering()).onResultOf(EXTRACT_NODE_LABEL);
private static final Comparator<Iterable<Node<Target>>> ITERABLE_COMPARATOR =
NODE_COMPARATOR.lexicographical();
/**
* Given {@code collectionOfUnorderedSets}, a collection of sets of nodes, returns a collection
* of sets with the same elements as {@code collectionOfUnorderedSets} but with a stable
* iteration order within each set given by the target ordering, and the collection ordered by the
* same induced order.
*/
private static Collection<Set<Node<Target>>> orderPartition(
Collection<Set<Node<Target>>> collectionOfUnorderedSets) {
List<Set<Node<Target>>> result = new ArrayList<>();
for (Set<Node<Target>> part : collectionOfUnorderedSets) {
List<Node<Target>> toSort = new ArrayList<>(part);
Collections.sort(toSort, NODE_COMPARATOR);
result.add(ImmutableSet.copyOf(toSort));
}
Collections.sort(result, ITERABLE_COMPARATOR);
return result;
}
private void outputFactored(
Digraph<Target> result,
PrintWriter out,
final boolean sortLabels,
ConditionalEdges conditionalEdges) {
EquivalenceRelation<Node<Target>> equivalenceRelation = createEquivalenceRelation();
Collection<Set<Node<Target>>> partition =
CollectionUtils.partition(result.getNodes(), equivalenceRelation);
if (sortLabels) {
partition = orderPartition(partition);
}
Digraph<Set<Node<Target>>> factoredGraph = result.createImageUnderPartition(partition);
// Concatenate the labels of all topologically-equivalent nodes.
LabelSerializer<Set<Node<Target>>> labelSerializer = new LabelSerializer<Set<Node<Target>>>() {
@Override
public String serialize(Node<Set<Node<Target>>> node) {
int actualLimit = graphNodeStringLimit - RESERVED_LABEL_CHARS;
boolean firstItem = true;
StringBuilder buf = new StringBuilder();
int count = 0;
for (Node<Target> eqNode : node.getLabel()) {
String labelString = eqNode.getLabel().getLabel().toString();
if (!firstItem) {
buf.append("\\n");
// Use -1 to denote no limit, as it is easier than trying to pass MAX_INT on the cmdline
if (graphNodeStringLimit != -1 && (buf.length() + labelString.length() > actualLimit)) {
buf.append("...and ");
buf.append(node.getLabel().size() - count);
buf.append(" more items");
break;
}
}
buf.append(labelString);
count++;
firstItem = false;
}
return buf.toString();
}
};
factoredGraph.visitNodesBeforeEdges(
new DotOutputVisitor<Set<Node<Target>>>(out, labelSerializer) {
@Override
public void beginVisit() {
super.beginVisit();
// TODO(bazel-team): (2009) make this the default in Digraph.
out.println(" node [shape=box];");
}
@Override
public void visitEdge(Node<Set<Node<Target>>> lhs, Node<Set<Node<Target>>> rhs) {
super.visitEdge(lhs, rhs);
String outputLabel =
getConditionsGraphLabel(lhs.getLabel(), rhs.getLabel(), conditionalEdges);
if (!outputLabel.isEmpty()) {
out.printf(" [label=\"%s\"];\n", outputLabel);
}
}
},
sortLabels ? ITERABLE_COMPARATOR : null);
}
/**
* Returns an equivalence relation for nodes in the specified graph.
*
* <p>Two nodes are considered equal iff they have equal topology (predecessors and successors).
*
* TODO(bazel-team): Make this a method of Digraph.
*/
@SuppressWarnings("ReferenceEquality")
private static <LABEL> EquivalenceRelation<Node<LABEL>> createEquivalenceRelation() {
return new EquivalenceRelation<Node<LABEL>>() {
@Override
public int compare(Node<LABEL> x, Node<LABEL> y) {
if (x == y) {
return 0;
}
if (x.numPredecessors() != y.numPredecessors()
|| x.numSuccessors() != y.numSuccessors()) {
return -1;
}
Set<Node<LABEL>> xpred = new HashSet<>(x.getPredecessors());
Set<Node<LABEL>> ypred = new HashSet<>(y.getPredecessors());
if (!xpred.equals(ypred)) {
return -1;
}
Set<Node<LABEL>> xsucc = new HashSet<>(x.getSuccessors());
Set<Node<LABEL>> ysucc = new HashSet<>(y.getSuccessors());
if (!xsucc.equals(ysucc)) {
return -1;
}
return 0;
}
};
}
private String getConditionsGraphLabel(
Iterable<Node<Target>> lhs, Iterable<Node<Target>> rhs, ConditionalEdges conditionalEdges) {
StringBuilder buf = new StringBuilder();
if (this.graphConditionalEdgesLimit == 0) {
return buf.toString();
}
Set<Label> annotatedLabels = new HashSet<>();
for (Node<Target> src : lhs) {
Label srcLabel = src.getLabel().getLabel();
for (Node<Target> dest : rhs) {
Label destLabel = dest.getLabel().getLabel();
Optional<Set<Label>> conditions = conditionalEdges.get(srcLabel, destLabel);
if (conditions.isPresent()) {
boolean firstItem = true;
int limit =
(this.graphConditionalEdgesLimit == -1)
? conditions.get().size()
: (this.graphConditionalEdgesLimit - 1);
for (Label conditionLabel : Iterables.limit(conditions.get(), limit)) {
if (!annotatedLabels.add(conditionLabel)) {
// duplicate label; skip.
continue;
}
if (!firstItem) {
buf.append("\\n");
}
buf.append(conditionLabel.getCanonicalForm());
firstItem = false;
}
if (conditions.get().size() > limit) {
buf.append("...");
}
}
}
}
return buf.toString();
}
private static final int RESERVED_LABEL_CHARS = "\\n...and 9999999 more items".length();
private static final LabelSerializer<Target> LABEL_STRINGIFIER = new LabelSerializer<Target>() {
@Override
public String serialize(Node<Target> node) {
return node.getLabel().getLabel().toString();
}
};
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.DocValuesFormat;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.index.*;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TimeLimitingCollector;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.store.*;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.Version;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import java.io.IOException;
import java.text.ParseException;
import java.util.*;
import static org.elasticsearch.common.lucene.search.NoopCollector.NOOP_COLLECTOR;
/**
*
*/
public class Lucene {
// TODO: remove VERSION, and have users use Version.LATEST.
public static final Version VERSION = Version.LATEST;
public static final Version ANALYZER_VERSION = VERSION;
public static final Version QUERYPARSER_VERSION = VERSION;
public static final String LATEST_DOC_VALUES_FORMAT = "Lucene50";
public static final String LATEST_POSTINGS_FORMAT = "Lucene50";
public static final String LATEST_CODEC = "Lucene50";
static {
Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class);
assert annotation == null : "PostingsFromat " + LATEST_POSTINGS_FORMAT + " is deprecated" ;
annotation = DocValuesFormat.forName(LATEST_DOC_VALUES_FORMAT).getClass().getAnnotation(Deprecated.class);
assert annotation == null : "DocValuesFormat " + LATEST_DOC_VALUES_FORMAT + " is deprecated" ;
}
public static final NamedAnalyzer STANDARD_ANALYZER = new NamedAnalyzer("_standard", AnalyzerScope.GLOBAL, new StandardAnalyzer());
public static final NamedAnalyzer KEYWORD_ANALYZER = new NamedAnalyzer("_keyword", AnalyzerScope.GLOBAL, new KeywordAnalyzer());
public static final ScoreDoc[] EMPTY_SCORE_DOCS = new ScoreDoc[0];
public static final TopDocs EMPTY_TOP_DOCS = new TopDocs(0, EMPTY_SCORE_DOCS, 0.0f);
@SuppressWarnings("deprecation")
public static Version parseVersion(@Nullable String version, Version defaultVersion, ESLogger logger) {
if (version == null) {
return defaultVersion;
}
try {
return Version.parse(version);
} catch (ParseException e) {
logger.warn("no version match {}, default to {}", version, defaultVersion, e);
return defaultVersion;
}
}
/**
* Reads the segments infos, failing if it fails to load
*/
public static SegmentInfos readSegmentInfos(Directory directory) throws IOException {
return SegmentInfos.readLatestCommit(directory);
}
/**
* Returns an iterable that allows to iterate over all files in this segments info
*/
public static Iterable<String> files(SegmentInfos infos) throws IOException {
final List<Collection<String>> list = new ArrayList<>();
list.add(Collections.singleton(infos.getSegmentsFileName()));
for (SegmentCommitInfo info : infos) {
list.add(info.files());
}
return Iterables.concat(list.toArray(new Collection[0]));
}
/**
* Reads the segments infos from the given commit, failing if it fails to load
*/
public static SegmentInfos readSegmentInfos(IndexCommit commit, Directory directory) throws IOException {
return SegmentInfos.readCommit(directory, commit.getSegmentsFileName());
}
/**
* Reads the segments infos from the given segments file name, failing if it fails to load
*/
private static SegmentInfos readSegmentInfos(String segmentsFileName, Directory directory) throws IOException {
return SegmentInfos.readCommit(directory, segmentsFileName);
}
/**
* This method removes all files from the given directory that are not referenced by the given segments file.
* This method will open an IndexWriter and relies on index file deleter to remove all unreferenced files. Segment files
* that are newer than the given segments file are removed forcefully to prevent problems with IndexWriter opening a potentially
* broken commit point / leftover.
* <b>Note:</b> this method will fail if there is another IndexWriter open on the given directory. This method will also acquire
* a write lock from the directory while pruning unused files. This method expects an existing index in the given directory that has
* the given segments file.
*/
public static SegmentInfos pruneUnreferencedFiles(String segmentsFileName, Directory directory) throws IOException {
final SegmentInfos si = readSegmentInfos(segmentsFileName, directory);
try (Lock writeLock = directory.makeLock(IndexWriter.WRITE_LOCK_NAME)) {
if (!writeLock.obtain(IndexWriterConfig.getDefaultWriteLockTimeout())) { // obtain write lock
throw new LockObtainFailedException("Index locked for write: " + writeLock);
}
int foundSegmentFiles = 0;
for (final String file : directory.listAll()) {
/**
* we could also use a deletion policy here but in the case of snapshot and restore
* sometimes we restore an index and override files that were referenced by a "future"
* commit. If such a commit is opened by the IW it would likely throw a corrupted index exception
* since checksums don's match anymore. that's why we prune the name here directly.
* We also want the caller to know if we were not able to remove a segments_N file.
*/
if (file.startsWith(IndexFileNames.SEGMENTS) || file.equals(IndexFileNames.OLD_SEGMENTS_GEN)) {
foundSegmentFiles++;
if (file.equals(si.getSegmentsFileName()) == false) {
directory.deleteFile(file); // remove all segment_N files except of the one we wanna keep
}
}
}
assert SegmentInfos.getLastCommitSegmentsFileName(directory).equals(segmentsFileName);
if (foundSegmentFiles == 0) {
throw new IllegalStateException("no commit found in the directory");
}
}
final CommitPoint cp = new CommitPoint(si, directory);
try (IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)
.setIndexCommit(cp)
.setCommitOnClose(false)
.setMergePolicy(NoMergePolicy.INSTANCE)
.setOpenMode(IndexWriterConfig.OpenMode.APPEND))) {
// do nothing and close this will kick of IndexFileDeleter which will remove all pending files
}
return si;
}
/**
* This method removes all lucene files from the given directory. It will first try to delete all commit points / segments
* files to ensure broken commits or corrupted indices will not be opened in the future. If any of the segment files can't be deleted
* this operation fails.
*/
public static void cleanLuceneIndex(Directory directory) throws IOException {
try (Lock writeLock = directory.makeLock(IndexWriter.WRITE_LOCK_NAME)) {
if (!writeLock.obtain(IndexWriterConfig.getDefaultWriteLockTimeout())) { // obtain write lock
throw new LockObtainFailedException("Index locked for write: " + writeLock);
}
for (final String file : directory.listAll()) {
if (file.startsWith(IndexFileNames.SEGMENTS) || file.equals(IndexFileNames.OLD_SEGMENTS_GEN)) {
directory.deleteFile(file); // remove all segment_N files
}
}
}
try (IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)
.setMergePolicy(NoMergePolicy.INSTANCE) // no merges
.setCommitOnClose(false) // no commits
.setOpenMode(IndexWriterConfig.OpenMode.CREATE))) // force creation - don't append...
{
// do nothing and close this will kick of IndexFileDeleter which will remove all pending files
}
}
public static void checkSegmentInfoIntegrity(final Directory directory) throws IOException {
new SegmentInfos.FindSegmentsFile(directory) {
@Override
protected Object doBody(String segmentFileName) throws IOException {
try (IndexInput input = directory.openInput(segmentFileName, IOContext.READ)) {
final int format = input.readInt();
final int actualFormat;
if (format == CodecUtil.CODEC_MAGIC) {
// 4.0+
actualFormat = CodecUtil.checkHeaderNoMagic(input, "segments", SegmentInfos.VERSION_40, Integer.MAX_VALUE);
if (actualFormat >= SegmentInfos.VERSION_48) {
CodecUtil.checksumEntireFile(input);
}
}
// legacy....
}
return null;
}
}.run();
}
public static long count(IndexSearcher searcher, Query query) throws IOException {
TotalHitCountCollector countCollector = new TotalHitCountCollector();
query = wrapCountQuery(query);
searcher.search(query, countCollector);
return countCollector.getTotalHits();
}
/**
* Performs a count on the <code>searcher</code> for <code>query</code>. Terminates
* early when the count has reached <code>terminateAfter</code>
*/
public static long count(IndexSearcher searcher, Query query, int terminateAfterCount) throws IOException {
EarlyTerminatingCollector countCollector = createCountBasedEarlyTerminatingCollector(terminateAfterCount);
countWithEarlyTermination(searcher, query, countCollector);
return countCollector.count();
}
/**
* Creates count based early termination collector with a threshold of <code>maxCountHits</code>
*/
public final static EarlyTerminatingCollector createCountBasedEarlyTerminatingCollector(int maxCountHits) {
return new EarlyTerminatingCollector(maxCountHits);
}
/**
* Wraps <code>delegate</code> with count based early termination collector with a threshold of <code>maxCountHits</code>
*/
public final static EarlyTerminatingCollector wrapCountBasedEarlyTerminatingCollector(final Collector delegate, int maxCountHits) {
return new EarlyTerminatingCollector(delegate, maxCountHits);
}
/**
* Wraps <code>delegate</code> with a time limited collector with a timeout of <code>timeoutInMillis</code>
*/
public final static TimeLimitingCollector wrapTimeLimitingCollector(final Collector delegate, final Counter counter, long timeoutInMillis) {
return new TimeLimitingCollector(delegate, counter, timeoutInMillis);
}
/**
* Performs an exists (count > 0) query on the <code>searcher</code> for <code>query</code>
* with <code>filter</code> using the given <code>collector</code>
*
* The <code>collector</code> can be instantiated using <code>Lucene.createExistsCollector()</code>
*/
public static boolean exists(IndexSearcher searcher, Query query, Filter filter,
EarlyTerminatingCollector collector) throws IOException {
collector.reset();
countWithEarlyTermination(searcher, filter, query, collector);
return collector.exists();
}
/**
* Performs an exists (count > 0) query on the <code>searcher</code> for <code>query</code>
* using the given <code>collector</code>
*
* The <code>collector</code> can be instantiated using <code>Lucene.createExistsCollector()</code>
*/
public static boolean exists(IndexSearcher searcher, Query query, EarlyTerminatingCollector collector) throws IOException {
collector.reset();
countWithEarlyTermination(searcher, query, collector);
return collector.exists();
}
/**
* Calls <code>countWithEarlyTermination(searcher, null, query, collector)</code>
*/
public static boolean countWithEarlyTermination(IndexSearcher searcher, Query query,
EarlyTerminatingCollector collector) throws IOException {
return countWithEarlyTermination(searcher, null, query, collector);
}
/**
* Performs a count on <code>query</code> and <code>filter</code> with early termination using <code>searcher</code>.
* The early termination threshold is specified by the provided <code>collector</code>
*/
public static boolean countWithEarlyTermination(IndexSearcher searcher, Filter filter, Query query,
EarlyTerminatingCollector collector) throws IOException {
query = wrapCountQuery(query);
try {
if (filter == null) {
searcher.search(query, collector);
} else {
searcher.search(query, filter, collector);
}
} catch (EarlyTerminationException e) {
// early termination
return true;
}
return false;
}
/**
* Creates an {@link org.elasticsearch.common.lucene.Lucene.EarlyTerminatingCollector}
* with a threshold of <code>1</code>
*/
public final static EarlyTerminatingCollector createExistsCollector() {
return createCountBasedEarlyTerminatingCollector(1);
}
private final static Query wrapCountQuery(Query query) {
// we don't need scores, so wrap it in a constant score query
if (!(query instanceof ConstantScoreQuery)) {
query = new ConstantScoreQuery(query);
}
return query;
}
/**
* Closes the index writer, returning <tt>false</tt> if it failed to close.
*/
public static boolean safeClose(IndexWriter writer) {
if (writer == null) {
return true;
}
try {
writer.close();
return true;
} catch (Throwable e) {
return false;
}
}
public static TopDocs readTopDocs(StreamInput in) throws IOException {
if (in.readBoolean()) {
int totalHits = in.readVInt();
float maxScore = in.readFloat();
SortField[] fields = new SortField[in.readVInt()];
for (int i = 0; i < fields.length; i++) {
String field = null;
if (in.readBoolean()) {
field = in.readString();
}
fields[i] = new SortField(field, readSortType(in), in.readBoolean());
}
FieldDoc[] fieldDocs = new FieldDoc[in.readVInt()];
for (int i = 0; i < fieldDocs.length; i++) {
fieldDocs[i] = readFieldDoc(in);
}
return new TopFieldDocs(totalHits, fieldDocs, fields, maxScore);
} else {
int totalHits = in.readVInt();
float maxScore = in.readFloat();
ScoreDoc[] scoreDocs = new ScoreDoc[in.readVInt()];
for (int i = 0; i < scoreDocs.length; i++) {
scoreDocs[i] = new ScoreDoc(in.readVInt(), in.readFloat());
}
return new TopDocs(totalHits, scoreDocs, maxScore);
}
}
public static FieldDoc readFieldDoc(StreamInput in) throws IOException {
Comparable[] cFields = new Comparable[in.readVInt()];
for (int j = 0; j < cFields.length; j++) {
byte type = in.readByte();
if (type == 0) {
cFields[j] = null;
} else if (type == 1) {
cFields[j] = in.readString();
} else if (type == 2) {
cFields[j] = in.readInt();
} else if (type == 3) {
cFields[j] = in.readLong();
} else if (type == 4) {
cFields[j] = in.readFloat();
} else if (type == 5) {
cFields[j] = in.readDouble();
} else if (type == 6) {
cFields[j] = in.readByte();
} else if (type == 7) {
cFields[j] = in.readShort();
} else if (type == 8) {
cFields[j] = in.readBoolean();
} else if (type == 9) {
cFields[j] = in.readBytesRef();
} else {
throw new IOException("Can't match type [" + type + "]");
}
}
return new FieldDoc(in.readVInt(), in.readFloat(), cFields);
}
public static ScoreDoc readScoreDoc(StreamInput in) throws IOException {
return new ScoreDoc(in.readVInt(), in.readFloat());
}
public static void writeTopDocs(StreamOutput out, TopDocs topDocs, int from) throws IOException {
from = Math.min(from, topDocs.scoreDocs.length);
if (topDocs instanceof TopFieldDocs) {
out.writeBoolean(true);
TopFieldDocs topFieldDocs = (TopFieldDocs) topDocs;
out.writeVInt(topDocs.totalHits);
out.writeFloat(topDocs.getMaxScore());
out.writeVInt(topFieldDocs.fields.length);
for (SortField sortField : topFieldDocs.fields) {
if (sortField.getField() == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeString(sortField.getField());
}
if (sortField.getComparatorSource() != null) {
writeSortType(out, ((IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource()).reducedType());
} else {
writeSortType(out, sortField.getType());
}
out.writeBoolean(sortField.getReverse());
}
out.writeVInt(topDocs.scoreDocs.length - from);
for (int i = from; i < topFieldDocs.scoreDocs.length; ++i) {
ScoreDoc doc = topFieldDocs.scoreDocs[i];
writeFieldDoc(out, (FieldDoc) doc);
}
} else {
out.writeBoolean(false);
out.writeVInt(topDocs.totalHits);
out.writeFloat(topDocs.getMaxScore());
out.writeVInt(topDocs.scoreDocs.length - from);
for (int i = from; i < topDocs.scoreDocs.length; ++i) {
ScoreDoc doc = topDocs.scoreDocs[i];
writeScoreDoc(out, doc);
}
}
}
public static void writeFieldDoc(StreamOutput out, FieldDoc fieldDoc) throws IOException {
out.writeVInt(fieldDoc.fields.length);
for (Object field : fieldDoc.fields) {
if (field == null) {
out.writeByte((byte) 0);
} else {
Class type = field.getClass();
if (type == String.class) {
out.writeByte((byte) 1);
out.writeString((String) field);
} else if (type == Integer.class) {
out.writeByte((byte) 2);
out.writeInt((Integer) field);
} else if (type == Long.class) {
out.writeByte((byte) 3);
out.writeLong((Long) field);
} else if (type == Float.class) {
out.writeByte((byte) 4);
out.writeFloat((Float) field);
} else if (type == Double.class) {
out.writeByte((byte) 5);
out.writeDouble((Double) field);
} else if (type == Byte.class) {
out.writeByte((byte) 6);
out.writeByte((Byte) field);
} else if (type == Short.class) {
out.writeByte((byte) 7);
out.writeShort((Short) field);
} else if (type == Boolean.class) {
out.writeByte((byte) 8);
out.writeBoolean((Boolean) field);
} else if (type == BytesRef.class) {
out.writeByte((byte) 9);
out.writeBytesRef((BytesRef) field);
} else {
throw new IOException("Can't handle sort field value of type [" + type + "]");
}
}
}
out.writeVInt(fieldDoc.doc);
out.writeFloat(fieldDoc.score);
}
public static void writeScoreDoc(StreamOutput out, ScoreDoc scoreDoc) throws IOException {
if (!scoreDoc.getClass().equals(ScoreDoc.class)) {
throw new ElasticsearchIllegalArgumentException("This method can only be used to serialize a ScoreDoc, not a " + scoreDoc.getClass());
}
out.writeVInt(scoreDoc.doc);
out.writeFloat(scoreDoc.score);
}
// LUCENE 4 UPGRADE: We might want to maintain our own ordinal, instead of Lucene's ordinal
public static SortField.Type readSortType(StreamInput in) throws IOException {
return SortField.Type.values()[in.readVInt()];
}
public static void writeSortType(StreamOutput out, SortField.Type sortType) throws IOException {
out.writeVInt(sortType.ordinal());
}
public static Explanation readExplanation(StreamInput in) throws IOException {
Explanation explanation;
if (in.readBoolean()) {
Boolean match = in.readOptionalBoolean();
explanation = new ComplexExplanation();
((ComplexExplanation) explanation).setMatch(match);
} else {
explanation = new Explanation();
}
explanation.setValue(in.readFloat());
explanation.setDescription(in.readString());
if (in.readBoolean()) {
int size = in.readVInt();
for (int i = 0; i < size; i++) {
explanation.addDetail(readExplanation(in));
}
}
return explanation;
}
public static void writeExplanation(StreamOutput out, Explanation explanation) throws IOException {
if (explanation instanceof ComplexExplanation) {
out.writeBoolean(true);
out.writeOptionalBoolean(((ComplexExplanation) explanation).getMatch());
} else {
out.writeBoolean(false);
}
out.writeFloat(explanation.getValue());
out.writeString(explanation.getDescription());
Explanation[] subExplanations = explanation.getDetails();
if (subExplanations == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeVInt(subExplanations.length);
for (Explanation subExp : subExplanations) {
writeExplanation(out, subExp);
}
}
}
/**
* This exception is thrown when {@link org.elasticsearch.common.lucene.Lucene.EarlyTerminatingCollector}
* reaches early termination
* */
public final static class EarlyTerminationException extends ElasticsearchException {
public EarlyTerminationException(String msg) {
super(msg);
}
}
/**
* A collector that terminates early by throwing {@link org.elasticsearch.common.lucene.Lucene.EarlyTerminationException}
* when count of matched documents has reached <code>maxCountHits</code>
*/
public final static class EarlyTerminatingCollector extends SimpleCollector {
private final int maxCountHits;
private final Collector delegate;
private int count = 0;
private LeafCollector leafCollector;
EarlyTerminatingCollector(int maxCountHits) {
this.maxCountHits = maxCountHits;
this.delegate = NOOP_COLLECTOR;
}
EarlyTerminatingCollector(final Collector delegate, int maxCountHits) {
this.maxCountHits = maxCountHits;
this.delegate = (delegate == null) ? NOOP_COLLECTOR : delegate;
}
public void reset() {
count = 0;
}
public int count() {
return count;
}
public boolean exists() {
return count > 0;
}
@Override
public void setScorer(Scorer scorer) throws IOException {
leafCollector.setScorer(scorer);
}
@Override
public void collect(int doc) throws IOException {
leafCollector.collect(doc);
if (++count >= maxCountHits) {
throw new EarlyTerminationException("early termination [CountBased]");
}
}
@Override
public void doSetNextReader(LeafReaderContext atomicReaderContext) throws IOException {
leafCollector = delegate.getLeafCollector(atomicReaderContext);
}
@Override
public boolean needsScores() {
return delegate.needsScores();
}
}
private Lucene() {
}
public static final boolean indexExists(final Directory directory) throws IOException {
return DirectoryReader.indexExists(directory);
}
/**
* Returns <tt>true</tt> iff the given exception or
* one of it's causes is an instance of {@link CorruptIndexException},
* {@link IndexFormatTooOldException}, or {@link IndexFormatTooNewException} otherwise <tt>false</tt>.
*/
public static boolean isCorruptionException(Throwable t) {
return ExceptionsHelper.unwrapCorruption(t) != null;
}
/**
* Parses the version string lenient and returns the the default value if the given string is null or emtpy
*/
public static Version parseVersionLenient(String toParse, Version defaultValue) {
return LenientParser.parse(toParse, defaultValue);
}
private static final class LenientParser {
public static Version parse(String toParse, Version defaultValue) {
if (Strings.hasLength(toParse)) {
try {
return Version.parseLeniently(toParse);
} catch (ParseException e) {
// pass to default
}
}
return defaultValue;
}
}
/**
* Return a Scorer that throws an ElasticsearchIllegalStateException
* on all operations with the given message.
*/
public static Scorer illegalScorer(final String message) {
return new Scorer(null) {
@Override
public float score() throws IOException {
throw new ElasticsearchIllegalStateException(message);
}
@Override
public int freq() throws IOException {
throw new ElasticsearchIllegalStateException(message);
}
@Override
public int advance(int arg0) throws IOException {
throw new ElasticsearchIllegalStateException(message);
}
@Override
public long cost() {
throw new ElasticsearchIllegalStateException(message);
}
@Override
public int docID() {
throw new ElasticsearchIllegalStateException(message);
}
@Override
public int nextDoc() throws IOException {
throw new ElasticsearchIllegalStateException(message);
}
};
}
private static final class CommitPoint extends IndexCommit {
private String segmentsFileName;
private final Collection<String> files;
private final Directory dir;
private final long generation;
private final Map<String,String> userData;
private final int segmentCount;
private CommitPoint(SegmentInfos infos, Directory dir) throws IOException {
segmentsFileName = infos.getSegmentsFileName();
this.dir = dir;
userData = infos.getUserData();
files = Collections.unmodifiableCollection(infos.files(dir, true));
generation = infos.getGeneration();
segmentCount = infos.size();
}
@Override
public String toString() {
return "DirectoryReader.ReaderCommit(" + segmentsFileName + ")";
}
@Override
public int getSegmentCount() {
return segmentCount;
}
@Override
public String getSegmentsFileName() {
return segmentsFileName;
}
@Override
public Collection<String> getFileNames() {
return files;
}
@Override
public Directory getDirectory() {
return dir;
}
@Override
public long getGeneration() {
return generation;
}
@Override
public boolean isDeleted() {
return false;
}
@Override
public Map<String,String> getUserData() {
return userData;
}
@Override
public void delete() {
throw new UnsupportedOperationException("This IndexCommit does not support deletions");
}
}
}
| |
/*
* Copyright (c) 2016 LibreTasks - https://github.com/biotinker/LibreTasks
*
* This file is free software: you may copy, redistribute and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* This file is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* This file incorporates work covered by the following copyright and
* permission notice:
/*******************************************************************************
* Copyright 2010 Omnidroid - http://code.google.com/p/omnidroid
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package libretasks.app.view.simple;
import java.util.ArrayList;
import java.util.HashMap;
import android.content.Context;
import android.graphics.Typeface;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.widget.AbsListView;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import libretasks.app.R;
import libretasks.app.view.simple.model.ModelEvent;
import libretasks.app.view.simple.model.ModelItem;
import libretasks.app.view.simple.model.ModelRuleAction;
import libretasks.app.view.simple.model.ModelRuleFilter;
import libretasks.app.view.simple.model.Rule;
import libretasks.app.view.simple.model.RuleNode;
/**
* Our rule has a tree hierarchy to it, we want to display is as a tree in our UI too. Rendering as
* a pure tree would be slow, so we have an internal representation of the rule tree as a flat
* list.
*
* Android has a hierarchical list-tree widget, but it only handles a depth of one (1), while we
* could have unlimited depth in our application. We could nest several list-trees inside one
* another, but this might be overkill for our purposes. We also will probably want to custom
* render the tree links depending on AND/OR relationship, which we couldn't do using the
* built-in list-tree widget.
*
* On each add/delete/replace, the whole dataset is iterated to build the flat-list representation.
* This would be awful for large datasets, but we'll probably never have more than twenty items.
*
* We can definitely improve the way we store these items if we want, for now it's ok with
* development and easy to understand.
*/
public class AdapterRule extends BaseAdapter {
private Context context;
private ListView listView;
/** The rule we're supposed to be rendering. */
private Rule rule;
/**
* This is a 'flat' representation of our rule tree. The key is the integer index of a leaf, as
* if you were counting from the top of the tree.
*/
private HashMap<Integer, NodeWrapper> flat;
public AdapterRule(Context context, ListView listView) {
this.context = context;
this.listView = listView;
this.flat = new HashMap<Integer, NodeWrapper>();
}
public int getCount() {
return flat.size();
}
public ModelItem getItem(int position) {
return flat.get(position).getNode().getItem();
}
public NodeWrapper getNodeWrapper(int position) {
return flat.get(position);
}
public long getItemId(int position) {
return position;
}
/**
* Generates a single item in the listview tree widget.
*/
public View getView(int position, View convertView, ViewGroup parent) {
NodeWrapper it = getNodeWrapper(position);
LinearLayout ll = new LinearLayout(context);
ll.setLayoutParams(new AbsListView.LayoutParams(LayoutParams.FILL_PARENT,
LayoutParams.FILL_PARENT));
ll.setMinimumHeight(50);
ll.setOrientation(LinearLayout.HORIZONTAL);
ll.setGravity(Gravity.CENTER_VERTICAL);
ImageView iv = new ImageView(context);
iv.setImageResource(it.getNode().getItem().getIconResId());
iv.setAdjustViewBounds(true);
iv.setLayoutParams(new AbsListView.LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT));
if (listView.getCheckedItemPosition() == position) {
iv.setBackgroundResource(R.drawable.icon_hilight);
}
TextView tv = new TextView(context);
tv.setText(it.getNode().getItem().getDescriptionShort());
tv.setLayoutParams(new AbsListView.LayoutParams(LayoutParams.FILL_PARENT,
LayoutParams.FILL_PARENT));
tv.setGravity(Gravity.CENTER_VERTICAL);
tv.setPadding(10, 0, 0, 0);
tv.setTextSize(14.0f);
tv.setTypeface(Typeface.defaultFromStyle(Typeface.BOLD));
tv.setTextColor(context.getResources().getColor(R.color.list_element_text));
tv.setMinHeight(46);
// This is where we figure out which tree branch graphics to stack
// to the left of a node to give the appearance of a real tree widget.
ArrayList<Integer> branches = it.getBranches();
for (int i = 0; i < branches.size(); i++) {
int imageResourceId;
if (i == branches.size() - 1) {
// You are whatever I say you are.
imageResourceId = branches.get(i).intValue();
} else {
// Here we do replacements.
if (branches.get(i).intValue() == R.drawable.treebranch_child_end) {
// empty png
imageResourceId = R.drawable.treebranch_parent_empty;
} else {
// straight pipe png
imageResourceId = R.drawable.treebranch_parent;
}
}
ImageView ivBranch = new ImageView(context);
ivBranch.setImageResource(imageResourceId);
ivBranch.setAdjustViewBounds(true);
ivBranch.setLayoutParams(new AbsListView.LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT));
ll.addView(ivBranch);
}
ll.addView(iv);
ll.addView(tv);
return ll;
}
/**
* Set the rule we should render, causes the adapter to redraw itself.
*/
public void setRule(Rule rule) {
this.rule = rule;
flat = (new TreeToFlatArray()).convert(rule.getRootNode());
notifyDataSetChanged();
}
/**
* Here we add the item to the rule, then refresh our UI view of it. We want to force actions to
* go to the end of the tree, filters in the middle, so things are more organized for the
* end-user.
*/
public void addItemToParentPosition(int position, ModelItem item) {
// Add the item.
int positionOld = position;
int positionNew;
if (position < 0) {
if (item instanceof ModelEvent) {
rule.setRootEvent((ModelEvent) item);
positionNew = 0;
} else {
throw new IllegalArgumentException(
"Somehow you added a non-event item as the root element!");
}
} else {
if (item instanceof ModelRuleFilter) {
// If the user is adding a filter to the root event, we want to make
// sure it gets added as a child before any actions.
RuleNode nodeParent = getNodeWrapper(position).getNode();
if (position == 0 && rule.getFirstActionPosition() > -1) {
int insertionIndex = rule.getFirstActionPosition();
nodeParent.addChild(item, insertionIndex);
positionNew = insertionIndex;
} else {
// Either our parent is another filter, or there are just no
// actions added to the root event yet.
nodeParent.addChild(item);
positionNew = position + nodeParent.getChildren().size();
}
} else if (item instanceof ModelRuleAction) {
// Force adding of actions as siblings directly under the root event.
RuleNode nodeParent = getNodeWrapper(0).getNode();
nodeParent.addChild(item);
positionNew = flat.size();
} else {
throw new IllegalArgumentException("Couldn't add unknown item type to node!");
}
}
// Flatten the rule again, we can improve this later if we need to.
flat = (new TreeToFlatArray()).convert(rule.getRootNode());
// Notify that we need a redraw.
notifyDataSetChanged();
// Auto-select the item we just added.
if (positionOld > -1) {
listView.setItemChecked(positionOld, false);
}
listView.setItemChecked(positionNew, true);
}
public void removeItem(int position) {
// Not allowed to remove the root event!
if (position < 1) {
throw new IllegalArgumentException(
"The user shouldn't be able to remove the 0th root event item from the list view!");
}
// Remove this item from the rule.
RuleNode parent = flat.get(position).getNode().getParent();
RuleNode child = flat.get(position).getNode();
parent.removeChild(child);
// Flatten again.
flat = (new TreeToFlatArray()).convert(rule.getRootNode());
notifyDataSetChanged();
}
public void replaceItem(int position, ModelItem item) {
// Not allowed to modify the root event!
if (position < 1) {
throw new IllegalArgumentException(
"The user should not be allowed to replace the 0th/root item in the list!");
}
RuleNode node = flat.get(position).getNode();
node.setItem(item);
notifyDataSetChanged();
}
public void removeAllFiltersAndActions() {
rule.getRootNode().removeAllChildren();
}
/**
* Converts our <code>Rule</code> node elements (which are in a tree form) into a flat
* <code>HashMap</code> for fast drawing. Since adds and deletes aren't likely to happen often,
* it's faster to maintain this form rather than searching throughout the tree struct every time
* we need to draw an element.
*
* As the tree is converted into a flat HashMap, we also generate which thumbnail images should be
* shown next to each node in the tree UI widget.
*/
private static class TreeToFlatArray {
private int idAssigner;
public HashMap<Integer, NodeWrapper> convert(RuleNode nodeRoot) {
idAssigner = 0;
HashMap<Integer, NodeWrapper> result = new HashMap<Integer, NodeWrapper>();
flatten(result, nodeRoot, new ArrayList<Integer>());
return result;
}
private void flatten(HashMap<Integer, NodeWrapper> result, RuleNode node,
ArrayList<Integer> branches) {
result.put(idAssigner++, new NodeWrapper(node, branches));
for (int i = 0; i < node.getChildren().size(); i++) {
ArrayList<Integer> branchesChild = new ArrayList<Integer>();
for (int j = 0; j < branches.size(); j++) {
branchesChild.add(new Integer(branches.get(j).intValue()));
}
if (i == node.getChildren().size() - 1) {
// last child.
branchesChild.add(new Integer(R.drawable.treebranch_child_end));
} else {
// sibling child.
branchesChild.add(new Integer(R.drawable.treebranch_child));
}
flatten(result, node.getChildren().get(i), branchesChild);
}
}
}
/**
* Simply bundles a <code>RuleNode</code> with its tree-image int array for display in the UI.
*/
public static class NodeWrapper {
private RuleNode node;
private ArrayList<Integer> branches;
public NodeWrapper(RuleNode node, ArrayList<Integer> branches) {
this.node = node;
this.branches = branches;
}
public RuleNode getNode() {
return node;
}
public ArrayList<Integer> getBranches() {
return branches;
}
}
}
| |
/**
* Licensed to the Austrian Association for Software Tool Integration (AASTI)
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. The AASTI licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openengsb.parser.csharp.reader;
import java.io.IOException;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPathExpressionException;
import org.openengsb.parser.csharp.structure.CClass;
import org.openengsb.parser.csharp.structure.CEnum;
import org.openengsb.parser.csharp.structure.CInterface;
import org.openengsb.parser.csharp.structure.CMethod;
import org.openengsb.parser.csharp.structure.CParameter;
import org.openengsb.parser.csharp.structure.CParameterizedType;
import org.openengsb.parser.csharp.structure.CType;
import org.openengsb.parser.csharp.structure.CTypeEntry;
import org.openengsb.parser.csharp.utils.XmlHelper;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* @author peter
*
*/
public class ClassReader implements Reader {
protected List<String> classNames = new ArrayList<String>();
protected List<String> errors = new ArrayList<String>();
protected Map<String, CType<?>> types = new HashMap<String, CType<?>>();
protected URLClassLoader loader;
/* (non-Javadoc)
* @see org.openengsb.dotnet.parser.reader.Reader#initialize(java.lang.String)
*/
@Override
public void initialize(String configFile, URLClassLoader loader) throws IOException {
Document dom;
// loader = Thread.currentThread().getContextClassLoader();
this.loader = loader;
try {
dom = XmlHelper.getDocument(configFile);
} catch (SAXException e) {
throw new IOException("Couldn't process file " + configFile, e);
} catch (ParserConfigurationException e) {
throw new IOException("Couldn't process file " + configFile, e);
}
NodeList lst;
String typeName;
try {
lst = XmlHelper.executeXPath(dom, "//simpleTypes/add");
for (int i = 0; i < lst.getLength(); i++) {
try {
typeName = lst.item(i).getTextContent();
System.out.println("New simple-type added: " + typeName);
createAndSaveType(Class.forName(typeName), true);
} catch (DOMException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
} catch (XPathExpressionException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
try {
lst = XmlHelper.executeXPath(dom, "//typesToParse/add");
for (int i = 0; i < lst.getLength(); i++) {
typeName = lst.item(i).getTextContent();
classNames.add(typeName);
}
} catch (XPathExpressionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/* (non-Javadoc)
* @see org.openengsb.dotnet.parser.reader.Reader#process()
*/
@Override
public Collection<CType<?>> process() {
boolean onlyInterfaces = false;
errors = new ArrayList<String>();
for (String name : classNames) {
Class<?> cls = null;
try {
// createAndSaveType(Class.forName(name), false);
cls = loader.loadClass(name);
} catch (ClassNotFoundException e) {
errors.add(name + " couldn't be found in the classpath. (" + e.getMessage() + ")");
}
if (cls != null) {
if (!onlyInterfaces || cls.isInterface()) {
createAndSaveType(cls, false);
}
}
}
return types.values();
}
public List<String> getErrors() {
return errors;
}
public CType<?> createAndSaveType(Class<?> cls, boolean isSimple) {
String name = cls.getName();
CType<?> ret = null;
if (cls.isPrimitive())
isSimple = true;
if (types.containsKey(name))
ret = types.get(name);
else {
if (cls.isInterface()) {
ret = new CInterface(name);
} else if (cls.isEnum()) {
ret = new CEnum(name);
} else {
ret = new CClass(name);
}
ret.setSimpleType(isSimple);
types.put(name, ret);
if (!isSimple)
System.out.println("New complex-type added: " + name);
if (!isSimple) {
analyzeType(ret, cls);
}
}
return ret;
}
public CParameterizedType createParametrizedType(Type type) {
CParameterizedType ret = new CParameterizedType();
if (type instanceof ParameterizedType) {
ParameterizedType paramType = (ParameterizedType)type;
List<CParameterizedType> genericTypes = new ArrayList<CParameterizedType>();
ret.setType(createAndSaveType((Class<?>)paramType.getRawType(), false));
for(Type t : paramType.getActualTypeArguments()) {
genericTypes.add(createParametrizedType(t));
}
ret.setGenericTypes(genericTypes);
} else if (type instanceof Class<?>){
ret.setType(createAndSaveType((Class<?>)type, false));
} else {
System.out.println("und was jetzt??? - " + type);
}
return ret;
}
@SuppressWarnings("unchecked")
public void analyzeType(CType<?> type, Class<?> cls) {
if (cls.isEnum()) {
List<String> entries = new ArrayList<String>();
for (Object item : cls.getEnumConstants()) {
entries.add(item.toString());
}
((CType<String>)type).setEntries(entries);
} else { // Type is Class or Interface
List<CTypeEntry> entries = new ArrayList<CTypeEntry>();
for(Method m : cls.getMethods()) {
if (m.getDeclaringClass().equals(cls) && !isMethodOverriden(m))
entries.add(createMethod(m));
}
((CType<CTypeEntry>)type).setEntries(entries);
Type interfaces[] = cls.getGenericInterfaces();
if (interfaces != null) {
List<CParameterizedType> lstInterfaces = new Vector<CParameterizedType>();
for(Type t : interfaces) {
lstInterfaces.add(createParametrizedType(t));
}
if (cls.isInterface()) {
((CInterface)type).setInterfaces(lstInterfaces);
} else {
((CClass)type).setInterfaces(lstInterfaces);
}
}
if (!cls.isInterface()) {
Type superClass = cls.getGenericSuperclass();
if (superClass != null) {
((CClass)type).setSuperClass(createParametrizedType(superClass));
}
}
}
}
public CMethod createMethod(Method method) {
CMethod ret = new CMethod(method.getName());
List<CParameter> parameters = new ArrayList<CParameter>();
ret.setReturnType(createParametrizedType(method.getGenericReturnType()));
int argCount = 0;
for(Type t : method.getGenericParameterTypes()) {
parameters.add(new CParameter("arg" + argCount, createParametrizedType(t)));
argCount++;
}
ret.setParameters(parameters);
return ret;
}
public static boolean isMethodOverriden(Method myMethod) {
Class<?> declaringClass = myMethod.getDeclaringClass();
if (declaringClass.equals(Object.class)) {
return false;
} else {
declaringClass = declaringClass.getSuperclass();
if (declaringClass == null)
return false;
else {
try {
declaringClass.getMethod(myMethod.getName(), myMethod.getParameterTypes());
return true;
} catch (NoSuchMethodException e) {
return false;
}
}
}
}
}
| |
/*
* ConnectBot: simple, powerful, open-source SSH client for Android
* Copyright 2007 Kenny Root, Jeffrey Sharkey
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.connectbot;
import android.content.ComponentName;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.Intent.ShortcutIconResource;
import android.content.ServiceConnection;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.IBinder;
import android.preference.PreferenceManager;
import android.support.annotation.StyleRes;
import android.support.annotation.VisibleForTesting;
import android.support.design.widget.FloatingActionButton;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.format.DateUtils;
import android.util.Log;
import android.view.ContextMenu;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MenuItem.OnMenuItemClickListener;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ImageView;
import android.widget.TextView;
import org.connectbot.bean.HostBean;
import org.connectbot.data.HostStorage;
import org.connectbot.service.OnHostStatusChangedListener;
import org.connectbot.service.TerminalBridge;
import org.connectbot.service.TerminalManager;
import org.connectbot.transport.TransportFactory;
import org.connectbot.util.HostDatabase;
import org.connectbot.util.PreferenceConstants;
import java.util.List;
public class HostListActivity extends AppCompatListActivity implements OnHostStatusChangedListener {
public final static String TAG = "CB.HostListActivity";
public static final String DISCONNECT_ACTION = "org.connectbot.action.DISCONNECT";
public final static int REQUEST_EDIT = 1;
protected TerminalManager bound = null;
private HostStorage hostdb;
private List<HostBean> hosts;
protected LayoutInflater inflater = null;
protected boolean sortedByColor = false;
private MenuItem sortcolor;
private MenuItem sortlast;
private MenuItem disconnectall;
private SharedPreferences prefs = null;
protected boolean makingShortcut = false;
private boolean waitingForDisconnectAll = false;
/**
* Whether to close the activity when disconnectAll is called. True if this activity was
* only brought to the foreground via the notification button to disconnect all hosts.
*/
private boolean closeOnDisconnectAll = true;
private ServiceConnection connection = new ServiceConnection() {
public void onServiceConnected(ComponentName className, IBinder service) {
bound = ((TerminalManager.TerminalBinder) service).getService();
// update our listview binder to find the service
HostListActivity.this.updateList();
bound.registerOnHostStatusChangedListener(HostListActivity.this);
if (waitingForDisconnectAll) {
disconnectAll();
}
}
public void onServiceDisconnected(ComponentName className) {
bound.unregisterOnHostStatusChangedListener(HostListActivity.this);
bound = null;
HostListActivity.this.updateList();
}
};
@Override
public void onStart() {
super.onStart();
// start the terminal manager service
this.bindService(new Intent(this, TerminalManager.class), connection, Context.BIND_AUTO_CREATE);
hostdb = HostDatabase.get(this);
}
@Override
public void onStop() {
super.onStop();
this.unbindService(connection);
hostdb = null;
closeOnDisconnectAll = true;
}
@Override
public void onResume() {
super.onResume();
// Must disconnectAll before setting closeOnDisconnectAll to know whether to keep the
// activity open after disconnecting.
if ((getIntent().getFlags() & Intent.FLAG_ACTIVITY_LAUNCHED_FROM_HISTORY) == 0 &&
DISCONNECT_ACTION.equals(getIntent().getAction())) {
Log.d(TAG, "Got disconnect all request");
disconnectAll();
}
// Still close on disconnect if waiting for a disconnect.
closeOnDisconnectAll = waitingForDisconnectAll && closeOnDisconnectAll;
}
@Override
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
setIntent(intent);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_EDIT) {
this.updateList();
}
}
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.act_hostlist);
setTitle(R.string.title_hosts_list);
mListView = (RecyclerView) findViewById(R.id.list);
mListView.setHasFixedSize(true);
mListView.setLayoutManager(new LinearLayoutManager(this));
mListView.addItemDecoration(new ListItemDecoration(this));
mEmptyView = findViewById(R.id.empty);
this.prefs = PreferenceManager.getDefaultSharedPreferences(this);
// detect HTC Dream and apply special preferences
if (Build.MANUFACTURER.equals("HTC") && Build.DEVICE.equals("dream")) {
SharedPreferences.Editor editor = prefs.edit();
boolean doCommit = false;
if (!prefs.contains(PreferenceConstants.SHIFT_FKEYS) &&
!prefs.contains(PreferenceConstants.CTRL_FKEYS)) {
editor.putBoolean(PreferenceConstants.SHIFT_FKEYS, true);
editor.putBoolean(PreferenceConstants.CTRL_FKEYS, true);
doCommit = true;
}
if (!prefs.contains(PreferenceConstants.STICKY_MODIFIERS)) {
editor.putString(PreferenceConstants.STICKY_MODIFIERS, PreferenceConstants.YES);
doCommit = true;
}
if (!prefs.contains(PreferenceConstants.KEYMODE)) {
editor.putString(PreferenceConstants.KEYMODE, PreferenceConstants.KEYMODE_RIGHT);
doCommit = true;
}
if (doCommit) {
editor.commit();
}
}
this.makingShortcut = Intent.ACTION_CREATE_SHORTCUT.equals(getIntent().getAction())
|| Intent.ACTION_PICK.equals(getIntent().getAction());
// connect with hosts database and populate list
this.hostdb = HostDatabase.get(this);
this.sortedByColor = prefs.getBoolean(PreferenceConstants.SORT_BY_COLOR, false);
this.registerForContextMenu(mListView);
FloatingActionButton addHostButton =
(FloatingActionButton) findViewById(R.id.add_host_button);
addHostButton.setVisibility(makingShortcut ? View.GONE : View.VISIBLE);
addHostButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = EditHostActivity.createIntentForNewHost(HostListActivity.this);
startActivityForResult(intent, REQUEST_EDIT);
}
public void onNothingSelected(AdapterView<?> arg0) {}
});
this.inflater = LayoutInflater.from(this);
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
super.onPrepareOptionsMenu(menu);
// don't offer menus when creating shortcut
if (makingShortcut) return true;
sortcolor.setVisible(!sortedByColor);
sortlast.setVisible(sortedByColor);
disconnectall.setEnabled(bound.getBridges().size() > 0);
return true;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
// don't offer menus when creating shortcut
if (makingShortcut) return true;
// add host, ssh keys, about
sortcolor = menu.add(R.string.list_menu_sortcolor);
sortcolor.setIcon(android.R.drawable.ic_menu_share);
sortcolor.setOnMenuItemClickListener(new OnMenuItemClickListener() {
public boolean onMenuItemClick(MenuItem item) {
sortedByColor = true;
updateList();
return true;
}
});
sortlast = menu.add(R.string.list_menu_sortname);
sortlast.setIcon(android.R.drawable.ic_menu_share);
sortlast.setOnMenuItemClickListener(new OnMenuItemClickListener() {
public boolean onMenuItemClick(MenuItem item) {
sortedByColor = false;
updateList();
return true;
}
});
MenuItem keys = menu.add(R.string.list_menu_pubkeys);
keys.setIcon(android.R.drawable.ic_lock_lock);
keys.setIntent(new Intent(HostListActivity.this, PubkeyListActivity.class));
MenuItem colors = menu.add(R.string.title_colors);
colors.setIcon(android.R.drawable.ic_menu_slideshow);
colors.setIntent(new Intent(HostListActivity.this, ColorsActivity.class));
disconnectall = menu.add(R.string.list_menu_disconnect);
disconnectall.setIcon(android.R.drawable.ic_menu_delete);
disconnectall.setOnMenuItemClickListener(new OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem menuItem) {
disconnectAll();
return false;
}
});
MenuItem settings = menu.add(R.string.list_menu_settings);
settings.setIcon(android.R.drawable.ic_menu_preferences);
settings.setIntent(new Intent(HostListActivity.this, SettingsActivity.class));
MenuItem help = menu.add(R.string.title_help);
help.setIcon(android.R.drawable.ic_menu_help);
help.setIntent(new Intent(HostListActivity.this, HelpActivity.class));
return true;
}
/**
* Disconnects all active connections and closes the activity if appropriate.
*/
private void disconnectAll() {
if (bound == null) {
waitingForDisconnectAll = true;
return;
}
new android.support.v7.app.AlertDialog.Builder(
HostListActivity.this, R.style.AlertDialogTheme)
.setMessage(getString(R.string.disconnect_all_message))
.setPositiveButton(R.string.disconnect_all_pos, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
bound.disconnectAll(true, false);
waitingForDisconnectAll = false;
// Clear the intent so that the activity can be relaunched without closing.
// TODO(jlklein): Find a better way to do this.
setIntent(new Intent());
if (closeOnDisconnectAll) {
finish();
}
}
})
.setNegativeButton(R.string.disconnect_all_neg, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
waitingForDisconnectAll = false;
// Clear the intent so that the activity can be relaunched without closing.
// TODO(jlklein): Find a better way to do this.
setIntent(new Intent());
}
}).create().show();
}
/**
* @return
*/
private boolean startConsoleActivity(Uri uri) {
HostBean host = TransportFactory.findHost(hostdb, uri);
if (host == null) {
host = TransportFactory.getTransport(uri.getScheme()).createHost(uri);
host.setColor(HostDatabase.COLOR_GRAY);
host.setPubkeyId(HostDatabase.PUBKEYID_ANY);
hostdb.saveHost(host);
}
Intent intent = new Intent(HostListActivity.this, ConsoleActivity.class);
intent.setData(uri);
startActivity(intent);
return true;
}
protected void updateList() {
if (prefs.getBoolean(PreferenceConstants.SORT_BY_COLOR, false) != sortedByColor) {
Editor edit = prefs.edit();
edit.putBoolean(PreferenceConstants.SORT_BY_COLOR, sortedByColor);
edit.commit();
}
if (hostdb == null)
hostdb = HostDatabase.get(this);
hosts = hostdb.getHosts(sortedByColor);
// Don't lose hosts that are connected via shortcuts but not in the database.
if (bound != null) {
for (TerminalBridge bridge : bound.getBridges()) {
if (!hosts.contains(bridge.host))
hosts.add(0, bridge.host);
}
}
mAdapter = new HostAdapter(this, hosts, bound);
mListView.setAdapter(mAdapter);
adjustViewVisibility();
}
@Override
public void onHostStatusChanged() {
updateList();
}
@VisibleForTesting
public class HostViewHolder extends ItemViewHolder {
public final ImageView icon;
public final TextView nickname;
public final TextView caption;
public HostBean host;
public HostViewHolder(View v) {
super(v);
icon = (ImageView) v.findViewById(android.R.id.icon);
nickname = (TextView) v.findViewById(android.R.id.text1);
caption = (TextView) v.findViewById(android.R.id.text2);
}
@Override
public void onClick(View v) {
// launch off to console details
Uri uri = host.getUri();
Intent contents = new Intent(Intent.ACTION_VIEW, uri);
contents.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
if (makingShortcut) {
// create shortcut if requested
ShortcutIconResource icon = Intent.ShortcutIconResource.fromContext(
HostListActivity.this, R.drawable.icon);
Intent intent = new Intent();
intent.putExtra(Intent.EXTRA_SHORTCUT_INTENT, contents);
intent.putExtra(Intent.EXTRA_SHORTCUT_NAME, host.getNickname());
intent.putExtra(Intent.EXTRA_SHORTCUT_ICON_RESOURCE, icon);
setResult(RESULT_OK, intent);
finish();
} else {
// otherwise just launch activity to show this host
contents.setClass(HostListActivity.this, ConsoleActivity.class);
HostListActivity.this.startActivity(contents);
}
}
@Override
public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {
menu.setHeaderTitle(host.getNickname());
// edit, disconnect, delete
MenuItem connect = menu.add(R.string.list_host_disconnect);
final TerminalBridge bridge = (bound == null) ? null : bound.getConnectedBridge(host);
connect.setEnabled(bridge != null);
connect.setOnMenuItemClickListener(new OnMenuItemClickListener() {
public boolean onMenuItemClick(MenuItem item) {
bridge.dispatchDisconnect(true);
return true;
}
});
MenuItem edit = menu.add(R.string.list_host_edit);
edit.setOnMenuItemClickListener(new OnMenuItemClickListener() {
public boolean onMenuItemClick(MenuItem item) {
Intent intent = EditHostActivity.createIntentForExistingHost(
HostListActivity.this, host.getId());
HostListActivity.this.startActivityForResult(intent, REQUEST_EDIT);
return true;
}
});
MenuItem portForwards = menu.add(R.string.list_host_portforwards);
portForwards.setOnMenuItemClickListener(new OnMenuItemClickListener() {
public boolean onMenuItemClick(MenuItem item) {
Intent intent = new Intent(HostListActivity.this, PortForwardListActivity.class);
intent.putExtra(Intent.EXTRA_TITLE, host.getId());
HostListActivity.this.startActivityForResult(intent, REQUEST_EDIT);
return true;
}
});
if (!TransportFactory.canForwardPorts(host.getProtocol()))
portForwards.setEnabled(false);
MenuItem delete = menu.add(R.string.list_host_delete);
delete.setOnMenuItemClickListener(new OnMenuItemClickListener() {
public boolean onMenuItemClick(MenuItem item) {
// prompt user to make sure they really want this
new android.support.v7.app.AlertDialog.Builder(
HostListActivity.this, R.style.AlertDialogTheme)
.setMessage(getString(R.string.delete_message, host.getNickname()))
.setPositiveButton(R.string.delete_pos, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
// make sure we disconnect
if (bridge != null)
bridge.dispatchDisconnect(true);
hostdb.deleteHost(host);
updateList();
}
})
.setNegativeButton(R.string.delete_neg, null).create().show();
return true;
}
});
}
}
@VisibleForTesting
private class HostAdapter extends ItemAdapter {
private final List<HostBean> hosts;
private final TerminalManager manager;
public final static int STATE_UNKNOWN = 1, STATE_CONNECTED = 2, STATE_DISCONNECTED = 3;
public HostAdapter(Context context, List<HostBean> hosts, TerminalManager manager) {
super(context);
this.hosts = hosts;
this.manager = manager;
}
/**
* Check if we're connected to a terminal with the given host.
*/
private int getConnectedState(HostBean host) {
// always disconnected if we don't have backend service
if (this.manager == null || host == null) {
return STATE_UNKNOWN;
}
if (manager.getConnectedBridge(host) != null) {
return STATE_CONNECTED;
}
if (manager.disconnected.contains(host)) {
return STATE_DISCONNECTED;
}
return STATE_UNKNOWN;
}
@Override
public HostViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View v = LayoutInflater.from(parent.getContext())
.inflate(R.layout.item_host, parent, false);
HostViewHolder vh = new HostViewHolder(v);
return vh;
}
@Override
public void onBindViewHolder(ItemViewHolder holder, int position) {
HostViewHolder hostHolder = (HostViewHolder) holder;
HostBean host = hosts.get(position);
hostHolder.host = host;
if (host == null) {
// Well, something bad happened. We can't continue.
Log.e("HostAdapter", "Host bean is null!");
hostHolder.nickname.setText("Error during lookup");
} else {
hostHolder.nickname.setText(host.getNickname());
}
switch (this.getConnectedState(host)) {
case STATE_UNKNOWN:
hostHolder.icon.setImageState(new int[] { }, true);
break;
case STATE_CONNECTED:
hostHolder.icon.setImageState(new int[] { android.R.attr.state_checked }, true);
break;
case STATE_DISCONNECTED:
hostHolder.icon.setImageState(new int[] { android.R.attr.state_expanded }, true);
break;
}
@StyleRes final int chosenStyleFirstLine;
@StyleRes final int chosenStyleSecondLine;
if (HostDatabase.COLOR_RED.equals(host.getColor())) {
chosenStyleFirstLine = R.style.ListItemFirstLineText_Red;
chosenStyleSecondLine = R.style.ListItemSecondLineText_Red;
} else if (HostDatabase.COLOR_GREEN.equals(host.getColor())) {
chosenStyleFirstLine = R.style.ListItemFirstLineText_Green;
chosenStyleSecondLine = R.style.ListItemSecondLineText_Green;
} else if (HostDatabase.COLOR_BLUE.equals(host.getColor())) {
chosenStyleFirstLine = R.style.ListItemFirstLineText_Blue;
chosenStyleSecondLine = R.style.ListItemSecondLineText_Blue;
} else {
chosenStyleFirstLine = R.style.ListItemFirstLineText;
chosenStyleSecondLine = R.style.ListItemSecondLineText;
}
hostHolder.nickname.setTextAppearance(context, chosenStyleFirstLine);
hostHolder.caption.setTextAppearance(context, chosenStyleSecondLine);
CharSequence nice = context.getString(R.string.bind_never);
if (host.getLastConnect() > 0) {
nice = DateUtils.getRelativeTimeSpanString(host.getLastConnect() * 1000);
}
hostHolder.caption.setText(nice);
}
@Override
public long getItemId(int position) {
return hosts.get(position).getId();
}
@Override
public int getItemCount() {
return hosts.size();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.processing.loading.sort.impl;
import java.io.File;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.carbondata.common.CarbonIterator;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
import org.apache.carbondata.core.datastore.row.CarbonRow;
import org.apache.carbondata.core.metadata.schema.ColumnRangeInfo;
import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.core.util.CarbonTimeStatisticsFactory;
import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
import org.apache.carbondata.processing.loading.row.CarbonRowBatch;
import org.apache.carbondata.processing.loading.sort.AbstractMergeSorter;
import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
import org.apache.carbondata.processing.sort.sortdata.SingleThreadFinalSortFilesMerger;
import org.apache.carbondata.processing.sort.sortdata.SortDataRows;
import org.apache.carbondata.processing.sort.sortdata.SortIntermediateFileMerger;
import org.apache.carbondata.processing.sort.sortdata.SortParameters;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
import org.apache.log4j.Logger;
/**
* It parallely reads data from array of iterates and do merge sort.
* First it sorts the data and write to temp files. These temp files will be merge sorted to get
* final merge sort result.
* This step is specifically for the data loading with specifying column value range, such as
* bucketing,sort_column_bounds, it sorts each range of data separately and write to temp files.
*/
public class ParallelReadMergeSorterWithColumnRangeImpl extends AbstractMergeSorter {
private static final Logger LOGGER = LogServiceFactory.getLogService(
ParallelReadMergeSorterWithColumnRangeImpl.class.getName());
private SortParameters originSortParameters;
private SortIntermediateFileMerger[] intermediateFileMergers;
private ColumnRangeInfo columnRangeInfo;
private int sortBufferSize;
private AtomicLong rowCounter;
/**
* counters to collect information about rows processed by each range
*/
private List<AtomicLong> insideRowCounterList;
public ParallelReadMergeSorterWithColumnRangeImpl(AtomicLong rowCounter,
ColumnRangeInfo columnRangeInfo) {
this.rowCounter = rowCounter;
this.columnRangeInfo = columnRangeInfo;
}
@Override
public void initialize(SortParameters sortParameters) {
this.originSortParameters = sortParameters;
int buffer = Integer.parseInt(CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.SORT_SIZE, CarbonCommonConstants.SORT_SIZE_DEFAULT_VAL));
sortBufferSize = buffer / columnRangeInfo.getNumOfRanges();
if (sortBufferSize < 100) {
sortBufferSize = 100;
}
this.insideRowCounterList = new ArrayList<>(columnRangeInfo.getNumOfRanges());
for (int i = 0; i < columnRangeInfo.getNumOfRanges(); i++) {
insideRowCounterList.add(new AtomicLong(0));
}
}
@Override public Iterator<CarbonRowBatch>[] sort(Iterator<CarbonRowBatch>[] iterators)
throws CarbonDataLoadingException {
SortDataRows[] sortDataRows = new SortDataRows[columnRangeInfo.getNumOfRanges()];
intermediateFileMergers = new SortIntermediateFileMerger[columnRangeInfo.getNumOfRanges()];
SortParameters[] sortParameterArray = new SortParameters[columnRangeInfo.getNumOfRanges()];
try {
for (int i = 0; i < columnRangeInfo.getNumOfRanges(); i++) {
SortParameters parameters = originSortParameters.getCopy();
parameters.setPartitionID(i + "");
parameters.setRangeId(i);
sortParameterArray[i] = parameters;
setTempLocation(parameters);
parameters.setBufferSize(sortBufferSize);
intermediateFileMergers[i] = new SortIntermediateFileMerger(parameters);
sortDataRows[i] = new SortDataRows(parameters, intermediateFileMergers[i]);
sortDataRows[i].initialize();
}
} catch (CarbonSortKeyAndGroupByException e) {
throw new CarbonDataLoadingException(e);
}
ExecutorService executorService = Executors.newFixedThreadPool(iterators.length);
this.threadStatusObserver = new ThreadStatusObserver(executorService);
final int batchSize = CarbonProperties.getInstance().getBatchSize();
try {
// dispatch rows to sortDataRows by range id
for (int i = 0; i < iterators.length; i++) {
executorService.execute(new SortIteratorThread(iterators[i], sortDataRows, rowCounter,
this.insideRowCounterList, this.threadStatusObserver));
}
executorService.shutdown();
executorService.awaitTermination(2, TimeUnit.DAYS);
processRowToNextStep(sortDataRows, originSortParameters);
} catch (Exception e) {
checkError();
throw new CarbonDataLoadingException("Problem while shutdown the server ", e);
}
checkError();
try {
for (int i = 0; i < intermediateFileMergers.length; i++) {
intermediateFileMergers[i].finish();
}
} catch (CarbonDataWriterException e) {
throw new CarbonDataLoadingException(e);
} catch (CarbonSortKeyAndGroupByException e) {
throw new CarbonDataLoadingException(e);
}
Iterator<CarbonRowBatch>[] batchIterator = new Iterator[columnRangeInfo.getNumOfRanges()];
for (int i = 0; i < columnRangeInfo.getNumOfRanges(); i++) {
batchIterator[i] = new MergedDataIterator(sortParameterArray[i], batchSize);
}
return batchIterator;
}
private SingleThreadFinalSortFilesMerger getFinalMerger(SortParameters sortParameters) {
String[] storeLocation = CarbonDataProcessorUtil
.getLocalDataFolderLocation(sortParameters.getCarbonTable(),
String.valueOf(sortParameters.getTaskNo()),
sortParameters.getSegmentId() + "", false, false);
// Set the data file location
String[] dataFolderLocation = CarbonDataProcessorUtil.arrayAppend(storeLocation, File.separator,
CarbonCommonConstants.SORT_TEMP_FILE_LOCATION);
return new SingleThreadFinalSortFilesMerger(dataFolderLocation, sortParameters.getTableName(),
sortParameters);
}
@Override public void close() {
for (int i = 0; i < intermediateFileMergers.length; i++) {
intermediateFileMergers[i].close();
}
}
/**
* Below method will be used to process data to next step
*/
private boolean processRowToNextStep(SortDataRows[] sortDataRows, SortParameters parameters)
throws CarbonDataLoadingException {
try {
for (int i = 0; i < sortDataRows.length; i++) {
// start sorting
sortDataRows[i].startSorting();
}
// check any more rows are present
LOGGER.info("Record Processed For table: " + parameters.getTableName());
CarbonTimeStatisticsFactory.getLoadStatisticsInstance()
.recordSortRowsStepTotalTime(parameters.getPartitionID(), System.currentTimeMillis());
CarbonTimeStatisticsFactory.getLoadStatisticsInstance()
.recordDictionaryValuesTotalTime(parameters.getPartitionID(), System.currentTimeMillis());
return false;
} catch (CarbonSortKeyAndGroupByException e) {
throw new CarbonDataLoadingException(e);
}
}
private void setTempLocation(SortParameters parameters) {
String[] carbonDataDirectoryPath = CarbonDataProcessorUtil
.getLocalDataFolderLocation(parameters.getCarbonTable(), parameters.getTaskNo(),
parameters.getSegmentId(),
false, false);
String[] tmpLocs = CarbonDataProcessorUtil.arrayAppend(carbonDataDirectoryPath, File.separator,
CarbonCommonConstants.SORT_TEMP_FILE_LOCATION);
parameters.setTempFileLocation(tmpLocs);
}
/**
* This thread iterates the iterator and adds the rows to @{@link SortDataRows}
*/
private static class SortIteratorThread implements Runnable {
private Iterator<CarbonRowBatch> iterator;
private SortDataRows[] sortDataRows;
private AtomicLong rowCounter;
private List<AtomicLong> insideCounterList;
private ThreadStatusObserver threadStatusObserver;
public SortIteratorThread(Iterator<CarbonRowBatch> iterator, SortDataRows[] sortDataRows,
AtomicLong rowCounter, List<AtomicLong> insideCounterList,
ThreadStatusObserver observer) {
this.iterator = iterator;
this.sortDataRows = sortDataRows;
this.rowCounter = rowCounter;
this.insideCounterList = insideCounterList;
this.threadStatusObserver = observer;
}
@Override
public void run() {
try {
while (iterator.hasNext()) {
CarbonRowBatch batch = iterator.next();
while (batch.hasNext()) {
CarbonRow row = batch.next();
if (row != null) {
SortDataRows sortDataRow = sortDataRows[row.getRangeId()];
synchronized (sortDataRow) {
sortDataRow.addRow(row.getData());
insideCounterList.get(row.getRangeId()).getAndIncrement();
rowCounter.getAndAdd(1);
}
}
}
}
LOGGER.info("Rows processed by each range: " + insideCounterList);
} catch (Exception e) {
LOGGER.error(e);
this.threadStatusObserver.notifyFailed(e);
}
}
}
private class MergedDataIterator extends CarbonIterator<CarbonRowBatch> {
private SortParameters sortParameters;
private int batchSize;
private boolean firstRow = true;
public MergedDataIterator(SortParameters sortParameters, int batchSize) {
this.sortParameters = sortParameters;
this.batchSize = batchSize;
}
private SingleThreadFinalSortFilesMerger finalMerger;
@Override public boolean hasNext() {
if (firstRow) {
firstRow = false;
finalMerger = getFinalMerger(sortParameters);
finalMerger.startFinalMerge();
}
return finalMerger.hasNext();
}
@Override public CarbonRowBatch next() {
int counter = 0;
CarbonRowBatch rowBatch = new CarbonRowBatch(batchSize);
while (finalMerger.hasNext() && counter < batchSize) {
rowBatch.addRow(new CarbonRow(finalMerger.next()));
counter++;
}
return rowBatch;
}
}
}
| |
/*
* Copyright 2001-2010 Stephen Colebourne
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joda.time;
import org.joda.convert.FromString;
import org.joda.convert.ToString;
import org.joda.time.base.BaseSingleFieldPeriod;
import org.joda.time.field.FieldUtils;
import org.joda.time.format.ISOPeriodFormat;
import org.joda.time.format.PeriodFormatter;
/**
* An immutable time period representing a number of hours.
* <p>
* <code>Hours</code> is an immutable period that can only store hours.
* It does not store years, months or minutes for example. As such it is a
* type-safe way of representing a number of hours in an application.
* <p>
* The number of hours is set in the constructor, and may be queried using
* <code>getHours()</code>. Basic mathematical operations are provided -
* <code>plus()</code>, <code>minus()</code>, <code>multipliedBy()</code> and
* <code>dividedBy()</code>.
* <p>
* <code>Hours</code> is thread-safe and immutable.
*
* @author Stephen Colebourne
* @since 1.4
*/
public final class Hours extends BaseSingleFieldPeriod {
/** Constant representing zero hours. */
public static final Hours ZERO = Pool.retrieveHours(0);
/** Constant representing one hour. */
public static final Hours ONE = Pool.retrieveHours(1);
/** Constant representing two hours. */
public static final Hours TWO = Pool.retrieveHours(2);
/** Constant representing three hours. */
public static final Hours THREE = Pool.retrieveHours(3);
/** Constant representing four hours. */
public static final Hours FOUR = Pool.retrieveHours(4);
/** Constant representing five hours. */
public static final Hours FIVE = Pool.retrieveHours(5);
/** Constant representing six hours. */
public static final Hours SIX = Pool.retrieveHours(6);
/** Constant representing seven hours. */
public static final Hours SEVEN = Pool.retrieveHours(7);
/** Constant representing eight hours. */
public static final Hours EIGHT = Pool.retrieveHours(8);
/** Constant representing the maximum number of hours that can be stored in this object. */
public static final Hours MAX_VALUE = Pool.retrieveHours(Integer.MAX_VALUE);
/** Constant representing the minimum number of hours that can be stored in this object. */
public static final Hours MIN_VALUE = Pool.retrieveHours(Integer.MIN_VALUE);
/** The paser to use for this class. */
private static final PeriodFormatter PARSER = ISOPeriodFormat.standard().withParseType(PeriodType.hours());
/** Serialization version. */
private static final long serialVersionUID = 87525275727380864L;
//-----------------------------------------------------------------------
/**
* Obtains an instance of <code>Hours</code> that may be cached.
* <code>Hours</code> is immutable, so instances can be cached and shared.
* This factory method provides access to shared instances.
*
* @param hours the number of hours to obtain an instance for
* @return the instance of Hours
*/
public static Hours hours(int hours) {
return Pool.retrieveHours(hours);
}
//-----------------------------------------------------------------------
/**
* Creates a <code>Hours</code> representing the number of whole hours
* between the two specified datetimes.
*
* @param start the start instant, must not be null
* @param end the end instant, must not be null
* @return the period in hours
* @throws IllegalArgumentException if the instants are null or invalid
*/
public static Hours hoursBetween(ReadableInstant start, ReadableInstant end) {
int amount = BaseSingleFieldPeriod.between(start, end, DurationFieldType.hours());
return Hours.hours(amount);
}
/**
* Creates a <code>Hours</code> representing the number of whole hours
* between the two specified partial datetimes.
* <p>
* The two partials must contain the same fields, for example you can specify
* two <code>LocalTime</code> objects.
*
* @param start the start partial date, must not be null
* @param end the end partial date, must not be null
* @return the period in hours
* @throws IllegalArgumentException if the partials are null or invalid
*/
public static Hours hoursBetween(ReadablePartial start, ReadablePartial end) {
if (start instanceof LocalTime && end instanceof LocalTime) {
Chronology chrono = DateTimeUtils.getChronology(start.getChronology());
int hours = chrono.hours().getDifference(
((LocalTime) end).getLocalMillis(), ((LocalTime) start).getLocalMillis());
return Hours.hours(hours);
}
int amount = BaseSingleFieldPeriod.between(start, end, ZERO);
return Hours.hours(amount);
}
/**
* Creates a <code>Hours</code> representing the number of whole hours
* in the specified interval.
*
* @param interval the interval to extract hours from, null returns zero
* @return the period in hours
* @throws IllegalArgumentException if the partials are null or invalid
*/
public static Hours hoursIn(ReadableInterval interval) {
if (interval == null) {
return Hours.ZERO;
}
int amount = BaseSingleFieldPeriod.between(interval.getStart(), interval.getEnd(), DurationFieldType.hours());
return Hours.hours(amount);
}
/**
* Creates a new <code>Hours</code> representing the number of complete
* standard length hours in the specified period.
* <p>
* This factory method converts all fields from the period to hours using standardised
* durations for each field. Only those fields which have a precise duration in
* the ISO UTC chronology can be converted.
* <ul>
* <li>One week consists of 7 days.
* <li>One day consists of 24 hours.
* <li>One hour consists of 60 minutes.
* <li>One minute consists of 60 seconds.
* <li>One second consists of 1000 milliseconds.
* </ul>
* Months and Years are imprecise and periods containing these values cannot be converted.
*
* @param period the period to get the number of hours from, null returns zero
* @return the period in hours
* @throws IllegalArgumentException if the period contains imprecise duration values
*/
public static Hours standardHoursIn(ReadablePeriod period) {
int amount = BaseSingleFieldPeriod.standardPeriodIn(period, DateTimeConstants.MILLIS_PER_HOUR);
return Hours.hours(amount);
}
/**
* Creates a new <code>Hours</code> by parsing a string in the ISO8601 format 'PTnH'.
* <p>
* The parse will accept the full ISO syntax of PnYnMnWnDTnHnMnS however only the
* hours component may be non-zero. If any other component is non-zero, an exception
* will be thrown.
*
* @param periodStr the period string, null returns zero
* @return the period in hours
* @throws IllegalArgumentException if the string format is invalid
*/
@FromString
public static Hours parseHours(String periodStr) {
if (periodStr == null) {
return Hours.ZERO;
}
Period p = PARSER.parsePeriod(periodStr);
return Hours.hours(p.getHours());
}
//-----------------------------------------------------------------------
/**
* Creates a new instance representing a number of hours.
* You should consider using the factory method {@link #hours(int)}
* instead of the constructor.
*
* @param hours the number of hours to represent
*/
protected Hours(int hours) {
super(hours);
}
/**
* Resolves singletons.
*
* @return the singleton instance
*/
private Object readResolve() {
return Hours.hours(getValue());
}
//-----------------------------------------------------------------------
/**
* Gets the duration field type, which is <code>hours</code>.
*
* @return the period type
*/
public DurationFieldType getFieldType() {
return DurationFieldType.hours();
}
/**
* Gets the period type, which is <code>hours</code>.
*
* @return the period type
*/
public PeriodType getPeriodType() {
return PeriodType.hours();
}
//-----------------------------------------------------------------------
/**
* Converts this period in hours to a period in weeks assuming a
* 7 day week and 24 hour day.
* <p>
* This method allows you to convert between different types of period.
* However to achieve this it makes the assumption that all weeks are 7 days
* long and all days are 24 hours long.
* This is not true when daylight savings time is considered, and may also
* not be true for some unusual chronologies. However, it is included as it
* is a useful operation for many applications and business rules.
*
* @return a period representing the number of whole weeks for this number of hours
*/
public Weeks toStandardWeeks() {
return Weeks.weeks(getValue() / DateTimeConstants.HOURS_PER_WEEK);
}
/**
* Converts this period in hours to a period in days assuming a
* 24 hour day.
* <p>
* This method allows you to convert between different types of period.
* However to achieve this it makes the assumption that all days are 24 hours long.
* This is not true when daylight savings time is considered, and may also
* not be true for some unusual chronologies. However, it is included as it
* is a useful operation for many applications and business rules.
*
* @return a period representing the number of whole days for this number of hours
*/
public Days toStandardDays() {
return Days.days(getValue() / DateTimeConstants.HOURS_PER_DAY);
}
/**
* Converts this period in hours to a period in minutes assuming a
* 60 minute hour.
* <p>
* This method allows you to convert between different types of period.
* However to achieve this it makes the assumption that all hours are 60 minutes long.
* This may not be true for some unusual chronologies. However, it is included
* as it is a useful operation for many applications and business rules.
*
* @return a period representing the number of minutes for this number of hours
* @throws ArithmeticException if the number of minutes is too large to be represented
*/
public Minutes toStandardMinutes() {
return Minutes.minutes(FieldUtils.safeMultiply(getValue(), DateTimeConstants.MINUTES_PER_HOUR));
}
/**
* Converts this period in hours to a period in seconds assuming a
* 60 minute hour and 60 second minute.
* <p>
* This method allows you to convert between different types of period.
* However to achieve this it makes the assumption that all hours are
* 60 minutes long and all minutes are 60 seconds long.
* This may not be true for some unusual chronologies. However, it is included
* as it is a useful operation for many applications and business rules.
*
* @return a period representing the number of seconds for this number of hours
* @throws ArithmeticException if the number of seconds is too large to be represented
*/
public Seconds toStandardSeconds() {
return Seconds.seconds(FieldUtils.safeMultiply(getValue(), DateTimeConstants.SECONDS_PER_HOUR));
}
//-----------------------------------------------------------------------
/**
* Converts this period in hours to a duration in milliseconds assuming a
* 60 minute hour and 60 second minute.
* <p>
* This method allows you to convert from a period to a duration.
* However to achieve this it makes the assumption that all hours are
* 60 minutes and all minutes are 60 seconds. This might not be true for an
* unusual chronology, for example one that takes leap seconds into account.
* However, the method is included as it is a useful operation for many
* applications and business rules.
*
* @return a duration equivalent to this number of hours
*/
public Duration toStandardDuration() {
long hours = getValue(); // assign to a long
return new Duration(hours * DateTimeConstants.MILLIS_PER_HOUR);
}
//-----------------------------------------------------------------------
/**
* Gets the number of hours that this period represents.
*
* @return the number of hours in the period
*/
public int getHours() {
return getValue();
}
//-----------------------------------------------------------------------
/**
* Returns a new instance with the specified number of hours added.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param hours the amount of hours to add, may be negative
* @return the new period plus the specified number of hours
* @throws ArithmeticException if the result overflows an int
*/
public Hours plus(int hours) {
if (hours == 0) {
return this;
}
return Hours.hours(FieldUtils.safeAdd(getValue(), hours));
}
/**
* Returns a new instance with the specified number of hours added.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param hours the amount of hours to add, may be negative, null means zero
* @return the new period plus the specified number of hours
* @throws ArithmeticException if the result overflows an int
*/
public Hours plus(Hours hours) {
if (hours == null) {
return this;
}
return plus(hours.getValue());
}
//-----------------------------------------------------------------------
/**
* Returns a new instance with the specified number of hours taken away.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param hours the amount of hours to take away, may be negative
* @return the new period minus the specified number of hours
* @throws ArithmeticException if the result overflows an int
*/
public Hours minus(int hours) {
return plus(FieldUtils.safeNegate(hours));
}
/**
* Returns a new instance with the specified number of hours taken away.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param hours the amount of hours to take away, may be negative, null means zero
* @return the new period minus the specified number of hours
* @throws ArithmeticException if the result overflows an int
*/
public Hours minus(Hours hours) {
if (hours == null) {
return this;
}
return minus(hours.getValue());
}
//-----------------------------------------------------------------------
/**
* Returns a new instance with the hours multiplied by the specified scalar.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param scalar the amount to multiply by, may be negative
* @return the new period multiplied by the specified scalar
* @throws ArithmeticException if the result overflows an int
*/
public Hours multipliedBy(int scalar) {
return Hours.hours(FieldUtils.safeMultiply(getValue(), scalar));
}
/**
* Returns a new instance with the hours divided by the specified divisor.
* The calculation uses integer division, thus 3 divided by 2 is 1.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param divisor the amount to divide by, may be negative
* @return the new period divided by the specified divisor
* @throws ArithmeticException if the divisor is zero
*/
public Hours dividedBy(int divisor) {
if (divisor == 1) {
return this;
}
return Hours.hours(getValue() / divisor);
}
//-----------------------------------------------------------------------
/**
* Returns a new instance with the hours value negated.
*
* @return the new period with a negated value
* @throws ArithmeticException if the result overflows an int
*/
public Hours negated() {
return Hours.hours(FieldUtils.safeNegate(getValue()));
}
//-----------------------------------------------------------------------
/**
* Is this hours instance greater than the specified number of hours.
*
* @param other the other period, null means zero
* @return true if this hours instance is greater than the specified one
*/
public boolean isGreaterThan(Hours other) {
if (other == null) {
return getValue() > 0;
}
return getValue() > other.getValue();
}
/**
* Is this hours instance less than the specified number of hours.
*
* @param other the other period, null means zero
* @return true if this hours instance is less than the specified one
*/
public boolean isLessThan(Hours other) {
if (other == null) {
return getValue() < 0;
}
return getValue() < other.getValue();
}
//-----------------------------------------------------------------------
/**
* Gets this instance as a String in the ISO8601 duration format.
* <p>
* For example, "PT4H" represents 4 hours.
*
* @return the value as an ISO8601 string
*/
@ToString
public String toString() {
return "PT" + String.valueOf(getValue()) + "H";
}
}
| |
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), available at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* TIANI Medgraph AG.
* Portions created by the Initial Developer are Copyright (C) 2003-2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Gunter Zeilinger <gunter.zeilinger@tiani.com>
* Franz Willer <franz.willer@gwi-ag.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chex.archive.web.maverick.ae;
import java.util.List;
import javax.management.InstanceNotFoundException;
import javax.management.MBeanException;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import javax.management.ReflectionException;
import javax.servlet.ServletConfig;
import org.apache.log4j.Logger;
import org.dcm4chex.archive.ejb.interfaces.AEDTO;
import org.jboss.mx.util.MBeanServerLocator;
/**
* @author franz.willer
*
* TODO To change the template for this generated type comment go to Window -
* Preferences - Java - Code Style - Code Templates
*/
public class AEDelegate {
private static ObjectName echoServiceName = null;
private static ObjectName aeServiceName = null;
private static MBeanServer server;
private List aes;
private static Logger log = Logger.getLogger(AEDelegate.class.getName());
/**
* Iinitialize the Echo service delegator.
* <p>
* Set the name of the EchoService MBean with the servlet config param
* 'echoServiceName'.
*
* @param config
* The ServletConfig object.
*/
public void init(ServletConfig config) {
if (server != null)
return;
server = MBeanServerLocator.locate();
try {
echoServiceName = new ObjectName(config
.getInitParameter("echoServiceName"));
aeServiceName = new ObjectName(config
.getInitParameter("aeServiceName"));
} catch (Exception e) {
log.error("Exception in init! ", e);
}
}
public Logger getLogger() {
return log;
}
// AE Service
/**
* Return list of all configured AE
*/
public List getAEs() {
try {
aes = (List) server.invoke(aeServiceName, "listAEs", null, null);
return aes;
} catch (Exception x) {
log.error("Exception occured in getAEs: " + x.getMessage(), x);
return null;
}
}
/**
* @param title
* @return
* @throws ReflectionException
* @throws MBeanException
* @throws InstanceNotFoundException
*/
public AEDTO getAE(String title) throws InstanceNotFoundException,
MBeanException, ReflectionException {
return (AEDTO) server
.invoke(aeServiceName, "getAE", new Object[] { title },
new String[] { String.class.getName() });
}
/**
* @param newAE
* @throws ReflectionException
* @throws MBeanException
* @throws InstanceNotFoundException
*/
public void updateAE(AEDTO ae, boolean checkHost)
throws InstanceNotFoundException, MBeanException,
ReflectionException {
server.invoke(aeServiceName, "updateAE",
new Object[] {
new Long(ae.getPk()),
ae.getTitle(),
ae.getHostName(),
new Integer(ae.getPort()),
ae.getCipherSuitesAsString(),
ae.getIssuerOfPatientID(),
ae.getUserID(),
ae.getPassword(),
ae.getFileSystemGroupID(),
ae.getDescription(),
ae.getWadoURL(),
ae.getStationName(),
ae.getInstitution(),
ae.getDepartment(),
ae.isInstalled(),
new Boolean(checkHost) },
new String[] {
long.class.getName(),
String.class.getName(),
String.class.getName(),
int.class.getName(),
String.class.getName(),
String.class.getName(),
String.class.getName(),
String.class.getName(),
String.class.getName(),
String.class.getName(),
String.class.getName(),
String.class.getName(),
String.class.getName(),
String.class.getName(),
boolean.class.getName(),
boolean.class.getName() });
}
/**
* @param title
* @return
* @throws ReflectionException
* @throws MBeanException
* @throws InstanceNotFoundException
*/
public AEDTO delAE(String title) throws InstanceNotFoundException,
MBeanException, ReflectionException {
return (AEDTO) server
.invoke(aeServiceName, "removeAE", new Object[] { title },
new String[] { String.class.getName() });
}
// ECHO Service
/**
* Makes the MBean call to echo an AE configuration.
*
*
* @return An info string for status of echo.
*/
public String echo(AEDTO aeData, int nrOfTests) {
if (log.isDebugEnabled())
log.debug("Send echo to " + aeData);
String resp = null;
try {
Object o = server.invoke(echoServiceName, "echo", new Object[] {
aeData, new Integer(nrOfTests) }, new String[] {
AEDTO.class.getName(), Integer.class.getName() });
resp = (String) o;
} catch (Exception x) {
log.error("Exception occured in echoAE: " + x.getMessage(), x);
}
if (log.isDebugEnabled())
log.debug("echo response for " + aeData + ":" + resp);
return resp;
}
/**
* Makes the MBean call to echo an AET (AE config for given title).
*
*
* @return An info string for status of echo.
*/
public String echo(String aet, int nrOfTests) {
String resp = null;
try {
Object o = server.invoke(echoServiceName, "echo", new Object[] {
aet, new Integer(nrOfTests) }, new String[] {
String.class.getName(), Integer.class.getName() });
resp = (String) o;
} catch (Exception x) {
log.error("Exception occured in echo (AET=" + aet + "): "
+ x.getMessage(), x);
}
return resp;
}
/**
* Makes the MBean call to echoe an AE configuration.
*
*
* @return An info string for status of echo.
*/
public String[] echoAll(AEDTO aeData) {
String[] resp = null;
try {
Object o = server.invoke(echoServiceName, "echoAll", null, null);
resp = (String[]) o;
} catch (Exception x) {
log.error("Exception occured in echo (" + aeData + "): "
+ x.getMessage(), x);
}
return resp;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.xml.security.test.dom.keys.keyresolver;
import java.io.FileInputStream;
import java.math.BigInteger;
import java.security.KeyFactory;
import java.security.KeyStore;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.cert.X509Certificate;
import java.security.interfaces.RSAPrivateKey;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.RSAPrivateKeySpec;
import java.security.spec.RSAPublicKeySpec;
import javax.crypto.Cipher;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
import javax.xml.parsers.DocumentBuilder;
import org.apache.xml.security.algorithms.JCEMapper;
import org.apache.xml.security.encryption.EncryptedData;
import org.apache.xml.security.encryption.EncryptedKey;
import org.apache.xml.security.encryption.XMLCipher;
import org.apache.xml.security.keys.KeyInfo;
import org.apache.xml.security.keys.content.X509Data;
import org.apache.xml.security.keys.content.x509.XMLX509Certificate;
import org.apache.xml.security.keys.content.x509.XMLX509IssuerSerial;
import org.apache.xml.security.keys.content.x509.XMLX509SKI;
import org.apache.xml.security.keys.content.x509.XMLX509SubjectName;
import org.apache.xml.security.keys.keyresolver.KeyResolver;
import org.apache.xml.security.keys.keyresolver.KeyResolverException;
import org.apache.xml.security.keys.keyresolver.KeyResolverSpi;
import org.apache.xml.security.keys.keyresolver.implementations.PrivateKeyResolver;
import org.apache.xml.security.keys.keyresolver.implementations.SecretKeyResolver;
import org.apache.xml.security.keys.keyresolver.implementations.SingleKeyResolver;
import org.apache.xml.security.keys.storage.StorageResolver;
import org.apache.xml.security.keys.storage.implementations.KeyStoreResolver;
import org.apache.xml.security.utils.Constants;
import org.apache.xml.security.utils.XMLUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Text;
/**
* KeyResolver test.
*/
public class KeyResolverTest extends org.junit.Assert {
private static final String BASEDIR = System.getProperty("basedir");
private static final String SEP = System.getProperty("file.separator");
public KeyResolverTest() {
org.apache.xml.security.Init.init();
}
/**
* Test key resolvers through a KeyInfo.
*/
@org.junit.Test
public void testKeyResolvers() throws Exception {
//
// This test fails with the IBM JDK
//
if ("IBM Corporation".equals(System.getProperty("java.vendor"))) {
return;
}
char[] pwd = "secret".toCharArray();
KeyStore ks = KeyStore.getInstance("JCEKS");
FileInputStream fis = null;
if (BASEDIR != null && !"".equals(BASEDIR)) {
fis = new FileInputStream(BASEDIR + SEP + "src/test/resources/test.jceks");
} else {
fis = new FileInputStream("src/test/resources/test.jceks");
}
ks.load(fis, pwd);
X509Certificate cert = (X509Certificate)ks.getCertificate("rsakey");
PublicKey publicKey = cert.getPublicKey();
PrivateKey privateKey = (PrivateKey) ks.getKey("rsakey", pwd);
SecretKey secretKey = (SecretKey) ks.getKey("des3key", pwd);
StorageResolver storage = new StorageResolver(new KeyStoreResolver(ks));
KeyResolverSpi privateKeyResolver = new PrivateKeyResolver(ks, pwd);
KeyResolverSpi secretKeyResolver = new SecretKeyResolver(ks, pwd);
DocumentBuilder db = XMLUtils.createDocumentBuilder(false);
Document doc = db.newDocument();
KeyInfo ki;
X509Data x509data;
// X509Certificate hint
ki = new KeyInfo(doc);
ki.addStorageResolver(storage);
x509data = new X509Data(doc);
x509data.add(new XMLX509Certificate(doc, cert));
ki.add(x509data);
assertEquals(publicKey, ki.getPublicKey());
assertNull(ki.getPrivateKey());
ki.registerInternalKeyResolver(privateKeyResolver);
assertEquals(privateKey, ki.getPrivateKey());
// Issuer/Serial hint
ki = new KeyInfo(doc);
ki.addStorageResolver(storage);
x509data = new X509Data(doc);
x509data.add(new XMLX509IssuerSerial(doc, cert.getIssuerX500Principal().getName(), cert.getSerialNumber()));
ki.add(x509data);
assertEquals(publicKey, ki.getPublicKey());
ki.registerInternalKeyResolver(privateKeyResolver);
assertEquals(privateKey, ki.getPrivateKey());
// SubjectName hint
ki = new KeyInfo(doc);
ki.addStorageResolver(storage);
x509data = new X509Data(doc);
x509data.add(new XMLX509SubjectName(doc, cert.getSubjectX500Principal().getName()));
ki.add(x509data);
assertEquals(publicKey, ki.getPublicKey());
ki.registerInternalKeyResolver(privateKeyResolver);
assertEquals(privateKey, ki.getPrivateKey());
// SKI hint
ki = new KeyInfo(doc);
ki.addStorageResolver(storage);
x509data = new X509Data(doc);
x509data.add(new XMLX509SKI(doc, cert));
ki.add(x509data);
assertEquals(publicKey, ki.getPublicKey());
ki.registerInternalKeyResolver(privateKeyResolver);
assertEquals(privateKey, ki.getPrivateKey());
// KeyName hint
String rsaKeyName = "rsakey";
ki = new KeyInfo(doc);
ki.addKeyName(rsaKeyName);
ki.registerInternalKeyResolver(new SingleKeyResolver(rsaKeyName, publicKey));
assertEquals(publicKey, ki.getPublicKey());
ki = new KeyInfo(doc);
ki.addKeyName(rsaKeyName);
ki.registerInternalKeyResolver(privateKeyResolver);
assertEquals(privateKey, ki.getPrivateKey());
ki = new KeyInfo(doc);
ki.addKeyName(rsaKeyName);
ki.registerInternalKeyResolver(new SingleKeyResolver(rsaKeyName, privateKey));
assertEquals(privateKey, ki.getPrivateKey());
String des3KeyName = "des3key";
ki = new KeyInfo(doc);
ki.addKeyName(des3KeyName);
ki.registerInternalKeyResolver(secretKeyResolver);
assertEquals(secretKey, ki.getSecretKey());
ki = new KeyInfo(doc);
ki.addKeyName(des3KeyName);
ki.registerInternalKeyResolver(new SingleKeyResolver(des3KeyName, secretKey));
assertEquals(secretKey, ki.getSecretKey());
}
/**
* Encrypt some data, embedded the data encryption key
* in the message using the key transport algorithm rsa-1_5.
* Decrypt the data by resolving the Key Encryption Key.
* This test verifies if a KeyResolver can return a PrivateKey.
*/
@org.junit.Test
public void testResolvePrivateKey() throws Exception {
// See if AES-128 is available...
String algorithmId =
JCEMapper.translateURItoJCEID(
org.apache.xml.security.utils.EncryptionConstants.ALGO_ID_BLOCKCIPHER_AES128
);
boolean haveAES = false;
if (algorithmId != null) {
try {
if (Cipher.getInstance(algorithmId) != null) {
haveAES = true;
}
} catch (NoSuchAlgorithmException nsae) {
//
} catch (NoSuchPaddingException nspe) {
//
}
}
if (!haveAES) {
return;
}
// Create a sample XML document
Document document = XMLUtils.createDocumentBuilder(false).newDocument();
Element rootElement = document.createElement("root");
document.appendChild(rootElement);
Element elem = document.createElement("elem");
Text text = document.createTextNode("text");
elem.appendChild(text);
rootElement.appendChild(elem);
// Create a data encryption key
byte[] keyBytes = { 0, 1, 2, 3, 4, 5, 6, 7, 0, 1, 2, 3, 4, 5, 6, 7 };
SecretKeySpec dataEncryptKey = new SecretKeySpec(keyBytes, "AES");
// Create public and private keys
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
RSAPublicKeySpec pubKeySpec = new RSAPublicKeySpec(
new BigInteger(
"8710a2bcb2f3fdac177f0ae0461c2dd0ebf72e0d88a5400583a7d8bdabd6" +
"ae009d30cfdf6acb5b6a64cdc730bc630a39d946d08babffe62ea20a87e37c93b3b0e8a8e576045b" +
"bddfbde83ca9bfa180fe6a5f5eee60661936d728314e809201ef52cd71d9fa3c8ce83f9d30ab5e08" +
"1539219e7e45dd6a60be65ac95d2049b8f21", 16),
new BigInteger("10001", 16));
RSAPrivateKeySpec privKeySpec = new RSAPrivateKeySpec(
new BigInteger(
"8710a2bcb2f3fdac177f0ae0461c2dd0ebf72e0d88a5400583a7d8bdabd" +
"6ae009d30cfdf6acb5b6a64cdc730bc630a39d946d08babffe62ea20a87e37c93b3b0e8a8e576045" +
"bbddfbde83ca9bfa180fe6a5f5eee60661936d728314e809201ef52cd71d9fa3c8ce83f9d30ab5e0" +
"81539219e7e45dd6a60be65ac95d2049b8f21", 16),
new BigInteger(
"20c39e569c2aa80cc91e5e6b0d56e49e5bbf78827bf56a546c1d996c597" +
"5187cb9a50fa828e5efe51d52f5d112c20bc700b836facadca6e0051afcdfe866841e37d207c0295" +
"36ff8674b301e2198b2c56abb0a0313f8ff84c1fcd6fa541aa6e5d9c018fab4784d2940def5dc709" +
"ddc714d73b6c23b5d178eaa5933577b8e8ae9", 16));
RSAPublicKey pubKey = (RSAPublicKey) keyFactory.generatePublic(pubKeySpec);
RSAPrivateKey privKey = (RSAPrivateKey) keyFactory.generatePrivate(privKeySpec);
// Encrypt the data encryption key with the key encryption key
XMLCipher keyCipher = XMLCipher.getInstance(XMLCipher.RSA_v1dot5);
keyCipher.init(XMLCipher.WRAP_MODE, pubKey);
EncryptedKey encryptedKey = keyCipher.encryptKey(document, dataEncryptKey);
String keyName = "testResolvePrivateKey";
KeyInfo kekInfo = new KeyInfo(document);
kekInfo.addKeyName(keyName);
encryptedKey.setKeyInfo(kekInfo);
// Encrypt the data
XMLCipher xmlCipher = XMLCipher.getInstance(XMLCipher.AES_128);
xmlCipher.init(XMLCipher.ENCRYPT_MODE, dataEncryptKey);
EncryptedData encryptedData = xmlCipher.getEncryptedData();
KeyInfo keyInfo = new KeyInfo(document);
keyInfo.add(encryptedKey);
encryptedData.setKeyInfo(keyInfo);
xmlCipher.doFinal(document, rootElement, true);
Element encryptedDataElement = (Element) rootElement.getFirstChild();
assertEquals("EncryptedData", encryptedDataElement.getLocalName());
// Decrypt the data by resolving the private key used as the KEK
// First test with an internal KeyResolver
MyPrivateKeyResolver.pk = privKey;
MyPrivateKeyResolver.pkName = keyName;
decryptDocument(document, new MyPrivateKeyResolver());
// Now test with a static KeyResolver
KeyResolver.registerAtStart(MyPrivateKeyResolver.class.getName(), false);
KeyResolverSpi resolver = KeyResolver.iterator().next();
assertEquals(MyPrivateKeyResolver.class.getName(), resolver.getClass().getName());
decryptDocument(document, null);
}
private void decryptDocument(Document docSource, KeyResolverSpi internalResolver) throws Exception
{
Document document = (Document)docSource.cloneNode(true);
Element rootElement = document.getDocumentElement();
Element encryptedDataElement = (Element)rootElement.getFirstChild();
XMLCipher decryptCipher = XMLCipher.getInstance();
decryptCipher.init(XMLCipher.DECRYPT_MODE, null);
if (internalResolver != null) {
decryptCipher.registerInternalKeyResolver(internalResolver);
}
decryptCipher.doFinal(document, encryptedDataElement);
Element decryptedElement = (Element) rootElement.getFirstChild();
assertEquals("elem", decryptedElement.getLocalName());
}
// A KeyResolver that returns a PrivateKey for a specific KeyName.
public static class MyPrivateKeyResolver extends KeyResolverSpi {
// We use static variables because KeyResolver.register() demands
// the use of the default constructor.
private static PrivateKey pk;
private static String pkName;
public boolean engineCanResolve(Element element, String BaseURI, StorageResolver storage) {
return false;
}
public PrivateKey engineLookupAndResolvePrivateKey(
Element element, String BaseURI, StorageResolver storage
) throws KeyResolverException {
if (Constants.SignatureSpecNS.equals(element.getNamespaceURI()) &&
Constants._TAG_KEYNAME.equals(element.getLocalName())) {
String keyName = element.getFirstChild().getNodeValue();
if (pkName.equals(keyName)) {
return pk;
}
}
return null;
}
}
}
| |
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.media.ui;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.clearInvocations;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import android.os.Build;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.annotation.Config;
import org.chromium.base.test.BaseRobolectricTestRunner;
import org.chromium.components.browser_ui.media.MediaNotificationController;
import org.chromium.components.browser_ui.media.MediaNotificationInfo;
/**
* JUnit tests for checking {@link MediaNotificationController} throttles notification updates
* correctly.
*/
@RunWith(BaseRobolectricTestRunner.class)
@Config(manifest = Config.NONE,
// Remove this after updating to a version of Robolectric that supports
// notification channel creation. crbug.com/774315
sdk = Build.VERSION_CODES.N_MR1, shadows = {MediaNotificationTestShadowResources.class})
public class MediaNotificationThrottlerTest extends MediaNotificationTestBase {
private static final int THROTTLE_MILLIS =
MediaNotificationController.Throttler.THROTTLE_MILLIS;
@Before
@Override
public void setUp() {
super.setUp();
getController().mThrottler =
spy(new MediaNotificationController.Throttler(getController()));
}
@Test
public void testEmptyState() {
assertNull(getThrottler().mTask);
assertNull(getThrottler().mLastPendingInfo);
}
@Test
public void testFirstNotificationIsUnthrottled() {
MediaNotificationInfo info = mMediaNotificationInfoBuilder.build();
getThrottler().queueNotification(info);
// Verify the notification is shown immediately
verify(getThrottler()).showNotificationImmediately(info);
verify(getController()).showNotification(info);
// Verify entering the throttled state.
assertNotNull(getThrottler().mTask);
assertNull(getThrottler().mLastPendingInfo);
}
@Test
public void testDelayedTaskFiresWithNoQueuedNotification() {
MediaNotificationInfo info = mMediaNotificationInfoBuilder.build();
getThrottler().queueNotification(info);
clearInvocations(getThrottler());
// When the task fires while no notification info is queued in throttled state, the
// throttler enters unthrottled state.
advanceTimeByMillis(THROTTLE_MILLIS);
verify(getThrottler(), never()).showNotificationImmediately(info);
assertNull(getThrottler().mTask);
}
@Test
public void testDelayedTaskFiresWithQueuedNotification() {
mMediaNotificationInfoBuilder.setPaused(false);
MediaNotificationInfo info1 = mMediaNotificationInfoBuilder.build();
mMediaNotificationInfoBuilder.setPaused(true);
MediaNotificationInfo info2 = mMediaNotificationInfoBuilder.build();
getThrottler().queueNotification(info1);
getThrottler().queueNotification(info2);
// In throttled state, queueing a notification info will only update the queued notification
// info.
verify(getThrottler()).showNotificationImmediately(info1);
verify(getThrottler(), never()).showNotificationImmediately(info2);
assertNotNull(getThrottler().mTask);
assertSame(info2, getThrottler().mLastPendingInfo);
clearInvocations(getThrottler());
// When the delayed task fires, the queued notification info will be used to update
// notification.
advanceTimeByMillis(THROTTLE_MILLIS);
verify(getThrottler()).showNotificationImmediately(info2);
assertNotNull(getThrottler().mTask);
assertNull(getThrottler().mLastPendingInfo);
}
@Test
public void testQueueingNotificationWontResetTimer() {
mMediaNotificationInfoBuilder.setPaused(false);
MediaNotificationInfo info1 = mMediaNotificationInfoBuilder.build();
mMediaNotificationInfoBuilder.setPaused(true);
MediaNotificationInfo info2 = mMediaNotificationInfoBuilder.build();
getThrottler().queueNotification(info1);
// Wait till the timer goes half way.
advanceTimeByMillis(THROTTLE_MILLIS / 2);
getThrottler().queueNotification(info2);
clearInvocations(getThrottler());
// The task should fire after |THROTTLE_MILLIS| since the it is posted.
advanceTimeByMillis(THROTTLE_MILLIS / 2);
verify(getThrottler()).showNotificationImmediately(info2);
assertNotNull(getThrottler().mTask);
assertNull(getThrottler().mLastPendingInfo);
// Can't check whether another task is posted due to the limitation of Robolectric shadows.
}
@Test
public void testQueueNotificationIgnoresNotification_Unthrottled() {
mMediaNotificationInfoBuilder.setPaused(false);
MediaNotificationInfo infoPlaying1 = mMediaNotificationInfoBuilder.build();
MediaNotificationInfo infoPlaying2 = mMediaNotificationInfoBuilder.build();
mMediaNotificationInfoBuilder.setPaused(true);
MediaNotificationInfo infoPaused1 = mMediaNotificationInfoBuilder.build();
MediaNotificationInfo infoPaused2 = mMediaNotificationInfoBuilder.build();
// This will update the notification immediately and enter throttled state.
getThrottler().queueNotification(infoPlaying1);
// This will not update the queued notification as it is the same as which stored in
// MediaNotificationController.
getThrottler().queueNotification(infoPlaying2);
assertNull(getThrottler().mLastPendingInfo);
// This will update the queued notification info as it is a different one.
getThrottler().queueNotification(infoPaused1);
assertSame(infoPaused1, getThrottler().mLastPendingInfo);
// This will not update the queued notification info as it is the same with the queued info.
getThrottler().queueNotification(infoPaused2);
assertSame(infoPaused1, getThrottler().mLastPendingInfo);
// This will not update the queued notification info as it is different from the queued
// info.
getThrottler().queueNotification(infoPlaying1);
assertSame(infoPlaying1, getThrottler().mLastPendingInfo);
}
@Test
public void testQueueNotificationIgnoresNotification_Throttled() {
MediaNotificationInfo info1 = mMediaNotificationInfoBuilder.build();
MediaNotificationInfo info2 = mMediaNotificationInfoBuilder.build();
getThrottler().queueNotification(info1);
clearInvocations(getThrottler());
getThrottler().queueNotification(info2);
verify(getThrottler(), never())
.showNotificationImmediately(any(MediaNotificationInfo.class));
assertNotNull(getThrottler().mTask);
assertNull(getThrottler().mLastPendingInfo);
}
@Test
public void testClearNotificationClearsThrottler() {
MediaNotificationInfo info = mMediaNotificationInfoBuilder.build();
getThrottler().queueNotification(info);
getController().clearNotification();
verify(getThrottler()).clearPendingNotifications();
}
@Test
public void testClearThrottler() {
MediaNotificationInfo info = mMediaNotificationInfoBuilder.build();
getThrottler().queueNotification(info);
getThrottler().clearPendingNotifications();
clearInvocations(getThrottler());
assertNull(getThrottler().mTask);
assertNull(getThrottler().mLastPendingInfo);
// Check the task is removed
advanceTimeByMillis(THROTTLE_MILLIS);
verify(getThrottler(), never())
.showNotificationImmediately(any(MediaNotificationInfo.class));
}
private MediaNotificationController.Throttler getThrottler() {
return getController().mThrottler;
}
}
| |
/**
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.ObjectWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ReflectionUtils;
import javax.net.SocketFactory;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.ConnectException;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketTimeoutException;
import java.net.UnknownHostException;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
/** A client for an IPC service. IPC calls take a single {@link Writable} as a
* parameter, and return a {@link Writable} as their value. A service runs on
* a port and is defined by a parameter class and a value class.
*
* <p>This is the org.apache.hadoop.ipc.Client renamed as HBaseClient and
* moved into this package so can access package-private methods.
*
* @see HBaseServer
*/
public class HBaseClient {
private static final Log LOG =
LogFactory.getLog("org.apache.hadoop.ipc.HBaseClient");
protected final Hashtable<ConnectionId, Connection> connections =
new Hashtable<ConnectionId, Connection>();
protected final Class<? extends Writable> valueClass; // class of call values
protected int counter; // counter for call ids
protected final AtomicBoolean running = new AtomicBoolean(true); // if client runs
final protected Configuration conf;
final protected int maxIdleTime; // connections will be culled if it was idle for
// maxIdleTime microsecs
final protected int maxRetries; //the max. no. of retries for socket connections
final protected long failureSleep; // Time to sleep before retry on failure.
protected final boolean tcpNoDelay; // if T then disable Nagle's Algorithm
protected final boolean tcpKeepAlive; // if T then use keepalives
protected int pingInterval; // how often sends ping to the server in msecs
protected int socketTimeout; // socket timeout
protected final SocketFactory socketFactory; // how to create sockets
private int refCount = 1;
final private static String PING_INTERVAL_NAME = "ipc.ping.interval";
final private static String SOCKET_TIMEOUT = "ipc.socket.timeout";
final static int DEFAULT_PING_INTERVAL = 60000; // 1 min
final static int DEFAULT_SOCKET_TIMEOUT = 20000; // 20 seconds
final static int PING_CALL_ID = -1;
/**
* set the ping interval value in configuration
*
* @param conf Configuration
* @param pingInterval the ping interval
*/
public static void setPingInterval(Configuration conf, int pingInterval) {
conf.setInt(PING_INTERVAL_NAME, pingInterval);
}
/**
* Get the ping interval from configuration;
* If not set in the configuration, return the default value.
*
* @param conf Configuration
* @return the ping interval
*/
static int getPingInterval(Configuration conf) {
return conf.getInt(PING_INTERVAL_NAME, DEFAULT_PING_INTERVAL);
}
/**
* Set the socket timeout
* @param conf Configuration
* @param socketTimeout the socket timeout
*/
public static void setSocketTimeout(Configuration conf, int socketTimeout) {
conf.setInt(SOCKET_TIMEOUT, socketTimeout);
}
/**
* @return the socket timeout
*/
static int getSocketTimeout(Configuration conf) {
return conf.getInt(SOCKET_TIMEOUT, DEFAULT_SOCKET_TIMEOUT);
}
/**
* Increment this client's reference count
*
*/
synchronized void incCount() {
refCount++;
}
/**
* Decrement this client's reference count
*
*/
synchronized void decCount() {
refCount--;
}
/**
* Return if this client has no reference
*
* @return true if this client has no reference; false otherwise
*/
synchronized boolean isZeroReference() {
return refCount==0;
}
/** A call waiting for a value. */
private class Call {
final int id; // call id
final Writable param; // parameter
Writable value; // value, null if error
IOException error; // exception, null if value
boolean done; // true when call is done
protected Call(Writable param) {
this.param = param;
synchronized (HBaseClient.this) {
this.id = counter++;
}
}
/** Indicate when the call is complete and the
* value or error are available. Notifies by default. */
protected synchronized void callComplete() {
this.done = true;
notify(); // notify caller
}
/** Set the exception when there is an error.
* Notify the caller the call is done.
*
* @param error exception thrown by the call; either local or remote
*/
public synchronized void setException(IOException error) {
this.error = error;
callComplete();
}
/** Set the return value when there is no error.
* Notify the caller the call is done.
*
* @param value return value of the call.
*/
public synchronized void setValue(Writable value) {
this.value = value;
callComplete();
}
}
/** Thread that reads responses and notifies callers. Each connection owns a
* socket connected to a remote address. Calls are multiplexed through this
* socket: responses may be delivered out of order. */
private class Connection extends Thread {
private ConnectionId remoteId;
private Socket socket = null; // connected socket
private DataInputStream in;
private DataOutputStream out;
// currently active calls
private final Hashtable<Integer, Call> calls = new Hashtable<Integer, Call>();
private final AtomicLong lastActivity = new AtomicLong();// last I/O activity time
protected final AtomicBoolean shouldCloseConnection = new AtomicBoolean(); // indicate if the connection is closed
private IOException closeException; // close reason
public Connection(InetSocketAddress address) throws IOException {
this(new ConnectionId(address, null, 0));
}
public Connection(ConnectionId remoteId) throws IOException {
if (remoteId.getAddress().isUnresolved()) {
throw new UnknownHostException("unknown host: " +
remoteId.getAddress().getHostName());
}
this.remoteId = remoteId;
UserGroupInformation ticket = remoteId.getTicket();
this.setName("IPC Client (" + socketFactory.hashCode() +") connection to " +
remoteId.getAddress().toString() +
((ticket==null)?" from an unknown user": (" from " + ticket.getUserName())));
this.setDaemon(true);
}
/** Update lastActivity with the current time. */
private void touch() {
lastActivity.set(System.currentTimeMillis());
}
/**
* Add a call to this connection's call queue and notify
* a listener; synchronized.
* Returns false if called during shutdown.
* @param call to add
* @return true if the call was added.
*/
protected synchronized boolean addCall(Call call) {
if (shouldCloseConnection.get())
return false;
calls.put(call.id, call);
notify();
return true;
}
/** This class sends a ping to the remote side when timeout on
* reading. If no failure is detected, it retries until at least
* a byte is read.
*/
private class PingInputStream extends FilterInputStream {
/* constructor */
protected PingInputStream(InputStream in) {
super(in);
}
/* Process timeout exception
* if the connection is not going to be closed, send a ping.
* otherwise, throw the timeout exception.
*/
private void handleTimeout(SocketTimeoutException e) throws IOException {
if (shouldCloseConnection.get() || !running.get() ||
remoteId.rpcTimeout > 0) {
throw e;
}
sendPing();
}
/** Read a byte from the stream.
* Send a ping if timeout on read. Retries if no failure is detected
* until a byte is read.
* @throws IOException for any IO problem other than socket timeout
*/
@Override
public int read() throws IOException {
do {
try {
return super.read();
} catch (SocketTimeoutException e) {
handleTimeout(e);
}
} while (true);
}
/** Read bytes into a buffer starting from offset <code>off</code>
* Send a ping if timeout on read. Retries if no failure is detected
* until a byte is read.
*
* @return the total number of bytes read; -1 if the connection is closed.
*/
@Override
public int read(byte[] buf, int off, int len) throws IOException {
do {
try {
return super.read(buf, off, len);
} catch (SocketTimeoutException e) {
handleTimeout(e);
}
} while (true);
}
}
/** Connect to the server and set up the I/O streams. It then sends
* a header to the server and starts
* the connection thread that waits for responses.
* @throws java.io.IOException e
*/
protected synchronized void setupIOstreams() throws IOException {
if (socket != null || shouldCloseConnection.get()) {
return;
}
short ioFailures = 0;
short timeoutFailures = 0;
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Connecting to "+remoteId.getAddress());
}
while (true) {
try {
this.socket = socketFactory.createSocket();
this.socket.setTcpNoDelay(tcpNoDelay);
this.socket.setKeepAlive(tcpKeepAlive);
NetUtils.connect(this.socket, remoteId.getAddress(),
getSocketTimeout(conf));
if (remoteId.rpcTimeout > 0) {
pingInterval = remoteId.rpcTimeout; // overwrite pingInterval
}
this.socket.setSoTimeout(pingInterval);
break;
} catch (SocketTimeoutException toe) {
handleConnectionFailure(timeoutFailures++, maxRetries, toe);
} catch (IOException ie) {
handleConnectionFailure(ioFailures++, maxRetries, ie);
}
}
this.in = new DataInputStream(new BufferedInputStream
(new PingInputStream(NetUtils.getInputStream(socket))));
this.out = new DataOutputStream
(new BufferedOutputStream(NetUtils.getOutputStream(socket)));
writeHeader();
// update last activity time
touch();
// start the receiver thread after the socket connection has been set up
start();
} catch (IOException e) {
markClosed(e);
close();
throw e;
}
}
/* Handle connection failures
*
* If the current number of retries is equal to the max number of retries,
* stop retrying and throw the exception; Otherwise backoff N seconds and
* try connecting again.
*
* This Method is only called from inside setupIOstreams(), which is
* synchronized. Hence the sleep is synchronized; the locks will be retained.
*
* @param curRetries current number of retries
* @param maxRetries max number of retries allowed
* @param ioe failure reason
* @throws IOException if max number of retries is reached
*/
private void handleConnectionFailure(
int curRetries, int maxRetries, IOException ioe) throws IOException {
// close the current connection
if (socket != null) { // could be null if the socket creation failed
try {
socket.close();
} catch (IOException e) {
LOG.warn("Not able to close a socket", e);
}
}
// set socket to null so that the next call to setupIOstreams
// can start the process of connect all over again.
socket = null;
// throw the exception if the maximum number of retries is reached
if (curRetries >= maxRetries) {
throw ioe;
}
// otherwise back off and retry
try {
Thread.sleep(failureSleep);
} catch (InterruptedException ignored) {}
LOG.info("Retrying connect to server: " + remoteId.getAddress() +
" after sleeping " + failureSleep + "ms. Already tried " + curRetries +
" time(s).");
}
/* Write the header for each connection
* Out is not synchronized because only the first thread does this.
*/
private void writeHeader() throws IOException {
out.write(HBaseServer.HEADER.array());
out.write(HBaseServer.CURRENT_VERSION);
//When there are more fields we can have ConnectionHeader Writable.
DataOutputBuffer buf = new DataOutputBuffer();
ObjectWritable.writeObject(buf, remoteId.getTicket(),
UserGroupInformation.class, conf);
int bufLen = buf.getLength();
out.writeInt(bufLen);
out.write(buf.getData(), 0, bufLen);
}
/* wait till someone signals us to start reading RPC response or
* it is idle too long, it is marked as to be closed,
* or the client is marked as not running.
*
* Return true if it is time to read a response; false otherwise.
*/
@SuppressWarnings({"ThrowableInstanceNeverThrown"})
private synchronized boolean waitForWork() {
if (calls.isEmpty() && !shouldCloseConnection.get() && running.get()) {
long timeout = maxIdleTime-
(System.currentTimeMillis()-lastActivity.get());
if (timeout>0) {
try {
wait(timeout);
} catch (InterruptedException ignored) {}
}
}
if (!calls.isEmpty() && !shouldCloseConnection.get() && running.get()) {
return true;
} else if (shouldCloseConnection.get()) {
return false;
} else if (calls.isEmpty()) { // idle connection closed or stopped
markClosed(null);
return false;
} else { // get stopped but there are still pending requests
markClosed((IOException)new IOException().initCause(
new InterruptedException()));
return false;
}
}
public InetSocketAddress getRemoteAddress() {
return remoteId.getAddress();
}
/* Send a ping to the server if the time elapsed
* since last I/O activity is equal to or greater than the ping interval
*/
protected synchronized void sendPing() throws IOException {
long curTime = System.currentTimeMillis();
if ( curTime - lastActivity.get() >= pingInterval) {
lastActivity.set(curTime);
//noinspection SynchronizeOnNonFinalField
synchronized (this.out) {
out.writeInt(PING_CALL_ID);
out.flush();
}
}
}
@Override
public void run() {
if (LOG.isDebugEnabled())
LOG.debug(getName() + ": starting, having connections "
+ connections.size());
try {
while (waitForWork()) {//wait here for work - read or close connection
receiveResponse();
}
} catch (Throwable t) {
LOG.warn("Unexpected exception receiving call responses", t);
markClosed(new IOException("Unexpected exception receiving call responses", t));
}
close();
if (LOG.isDebugEnabled())
LOG.debug(getName() + ": stopped, remaining connections "
+ connections.size());
}
/* Initiates a call by sending the parameter to the remote server.
* Note: this is not called from the Connection thread, but by other
* threads.
*/
protected void sendParam(Call call) {
if (shouldCloseConnection.get()) {
return;
}
DataOutputBuffer d=null;
try {
//noinspection SynchronizeOnNonFinalField
synchronized (this.out) { // FindBugs IS2_INCONSISTENT_SYNC
if (LOG.isDebugEnabled())
LOG.debug(getName() + " sending #" + call.id);
//for serializing the
//data to be written
d = new DataOutputBuffer();
d.writeInt(0xdeadbeef); // placeholder for data length
d.writeInt(call.id);
call.param.write(d);
byte[] data = d.getData();
int dataLength = d.getLength();
// fill in the placeholder
Bytes.putInt(data, 0, dataLength - 4);
out.write(data, 0, dataLength);
out.flush();
}
} catch(IOException e) {
markClosed(e);
} finally {
//the buffer is just an in-memory buffer, but it is still polite to
// close early
IOUtils.closeStream(d);
}
}
/* Receive a response.
* Because only one receiver, so no synchronization on in.
*/
private void receiveResponse() {
if (shouldCloseConnection.get()) {
return;
}
touch();
try {
int id = in.readInt(); // try to read an id
if (LOG.isDebugEnabled())
LOG.debug(getName() + " got value #" + id);
Call call = calls.get(id);
boolean isError = in.readBoolean(); // read if error
if (isError) {
//noinspection ThrowableInstanceNeverThrown
call.setException(new RemoteException( WritableUtils.readString(in),
WritableUtils.readString(in)));
calls.remove(id);
} else {
Writable value = ReflectionUtils.newInstance(valueClass, conf);
value.readFields(in); // read value
call.setValue(value);
calls.remove(id);
}
} catch (IOException e) {
markClosed(e);
}
}
private synchronized void markClosed(IOException e) {
if (shouldCloseConnection.compareAndSet(false, true)) {
closeException = e;
notifyAll();
}
}
/** Close the connection. */
private synchronized void close() {
if (!shouldCloseConnection.get()) {
LOG.error("The connection is not in the closed state");
return;
}
// release the resources
// first thing to do;take the connection out of the connection list
synchronized (connections) {
if (connections.get(remoteId) == this) {
connections.remove(remoteId);
}
}
// close the streams and therefore the socket
IOUtils.closeStream(out);
IOUtils.closeStream(in);
// clean up all calls
if (closeException == null) {
if (!calls.isEmpty()) {
LOG.warn(
"A connection is closed for no cause and calls are not empty");
// clean up calls anyway
closeException = new IOException("Unexpected closed connection");
cleanupCalls();
}
} else {
// log the info
if (LOG.isDebugEnabled()) {
LOG.debug("closing ipc connection to " + remoteId.address + ": " +
closeException.getMessage(),closeException);
}
// cleanup calls
cleanupCalls();
}
if (LOG.isDebugEnabled())
LOG.debug(getName() + ": closed");
}
/* Cleanup all calls and mark them as done */
private void cleanupCalls() {
Iterator<Entry<Integer, Call>> itor = calls.entrySet().iterator() ;
while (itor.hasNext()) {
Call c = itor.next().getValue();
c.setException(closeException); // local exception
itor.remove();
}
}
}
/** Call implementation used for parallel calls. */
private class ParallelCall extends Call {
private final ParallelResults results;
protected final int index;
public ParallelCall(Writable param, ParallelResults results, int index) {
super(param);
this.results = results;
this.index = index;
}
/** Deliver result to result collector. */
@Override
protected void callComplete() {
results.callComplete(this);
}
}
/** Result collector for parallel calls. */
private static class ParallelResults {
protected final Writable[] values;
protected int size;
protected int count;
public ParallelResults(int size) {
this.values = new Writable[size];
this.size = size;
}
/*
* Collect a result.
*/
synchronized void callComplete(ParallelCall call) {
// FindBugs IS2_INCONSISTENT_SYNC
values[call.index] = call.value; // store the value
count++; // count it
if (count == size) // if all values are in
notify(); // then notify waiting caller
}
}
/**
* Construct an IPC client whose values are of the given {@link Writable}
* class.
* @param valueClass value class
* @param conf configuration
* @param factory socket factory
*/
public HBaseClient(Class<? extends Writable> valueClass, Configuration conf,
SocketFactory factory) {
this.valueClass = valueClass;
this.maxIdleTime =
conf.getInt("hbase.ipc.client.connection.maxidletime", 10000); //10s
this.maxRetries = conf.getInt("hbase.ipc.client.connect.max.retries", 0);
this.failureSleep = conf.getInt("hbase.client.pause", 1000);
this.tcpNoDelay = conf.getBoolean("hbase.ipc.client.tcpnodelay", false);
this.tcpKeepAlive = conf.getBoolean("hbase.ipc.client.tcpkeepalive", true);
this.pingInterval = getPingInterval(conf);
if (LOG.isDebugEnabled()) {
LOG.debug("The ping interval is" + this.pingInterval + "ms.");
}
this.conf = conf;
this.socketFactory = factory;
}
/**
* Construct an IPC client with the default SocketFactory
* @param valueClass value class
* @param conf configuration
*/
public HBaseClient(Class<? extends Writable> valueClass, Configuration conf) {
this(valueClass, conf, NetUtils.getDefaultSocketFactory(conf));
}
/** Return the socket factory of this client
*
* @return this client's socket factory
*/
SocketFactory getSocketFactory() {
return socketFactory;
}
/** Stop all threads related to this client. No further calls may be made
* using this client. */
public void stop() {
if (LOG.isDebugEnabled()) {
LOG.debug("Stopping client");
}
if (!running.compareAndSet(true, false)) {
return;
}
// wake up all connections
synchronized (connections) {
for (Connection conn : connections.values()) {
conn.interrupt();
}
}
// wait until all connections are closed
while (!connections.isEmpty()) {
try {
Thread.sleep(100);
} catch (InterruptedException ignored) {
}
}
}
/** Make a call, passing <code>param</code>, to the IPC server running at
* <code>address</code>, returning the value. Throws exceptions if there are
* network problems or if the remote code threw an exception.
* @param param writable parameter
* @param address network address
* @return Writable
* @throws IOException e
*/
public Writable call(Writable param, InetSocketAddress address)
throws IOException {
return call(param, address, null, 0);
}
public Writable call(Writable param, InetSocketAddress addr,
UserGroupInformation ticket, int rpcTimeout)
throws IOException {
Call call = new Call(param);
Connection connection = getConnection(addr, ticket, rpcTimeout, call);
connection.sendParam(call); // send the parameter
boolean interrupted = false;
//noinspection SynchronizationOnLocalVariableOrMethodParameter
synchronized (call) {
while (!call.done) {
try {
call.wait(); // wait for the result
} catch (InterruptedException ignored) {
// save the fact that we were interrupted
interrupted = true;
}
}
if (interrupted) {
// set the interrupt flag now that we are done waiting
Thread.currentThread().interrupt();
}
if (call.error != null) {
if (call.error instanceof RemoteException) {
call.error.fillInStackTrace();
throw call.error;
}
// local exception
throw wrapException(addr, call.error);
}
return call.value;
}
}
/**
* Take an IOException and the address we were trying to connect to
* and return an IOException with the input exception as the cause.
* The new exception provides the stack trace of the place where
* the exception is thrown and some extra diagnostics information.
* If the exception is ConnectException or SocketTimeoutException,
* return a new one of the same type; Otherwise return an IOException.
*
* @param addr target address
* @param exception the relevant exception
* @return an exception to throw
*/
@SuppressWarnings({"ThrowableInstanceNeverThrown"})
private IOException wrapException(InetSocketAddress addr,
IOException exception) {
if (exception instanceof ConnectException) {
//connection refused; include the host:port in the error
return (ConnectException)new ConnectException(
"Call to " + addr + " failed on connection exception: " + exception)
.initCause(exception);
} else if (exception instanceof SocketTimeoutException) {
return (SocketTimeoutException)new SocketTimeoutException(
"Call to " + addr + " failed on socket timeout exception: "
+ exception).initCause(exception);
} else {
return (IOException)new IOException(
"Call to " + addr + " failed on local exception: " + exception)
.initCause(exception);
}
}
/** Makes a set of calls in parallel. Each parameter is sent to the
* corresponding address. When all values are available, or have timed out
* or errored, the collected results are returned in an array. The array
* contains nulls for calls that timed out or errored.
* @param params writable parameters
* @param addresses socket addresses
* @return Writable[]
* @throws IOException e
*/
public Writable[] call(Writable[] params, InetSocketAddress[] addresses)
throws IOException {
if (addresses.length == 0) return new Writable[0];
ParallelResults results = new ParallelResults(params.length);
// TODO this synchronization block doesnt make any sense, we should possibly fix it
//noinspection SynchronizationOnLocalVariableOrMethodParameter
synchronized (results) {
for (int i = 0; i < params.length; i++) {
ParallelCall call = new ParallelCall(params[i], results, i);
try {
Connection connection = getConnection(addresses[i], null, 0, call);
connection.sendParam(call); // send each parameter
} catch (IOException e) {
// log errors
LOG.info("Calling "+addresses[i]+" caught: " +
e.getMessage(),e);
results.size--; // wait for one fewer result
}
}
while (results.count != results.size) {
try {
results.wait(); // wait for all results
} catch (InterruptedException ignored) {}
}
return results.values;
}
}
/* Get a connection from the pool, or create a new one and add it to the
* pool. Connections to a given host/port are reused. */
private Connection getConnection(InetSocketAddress addr,
UserGroupInformation ticket,
int rpcTimeout,
Call call)
throws IOException {
if (!running.get()) {
// the client is stopped
throw new IOException("The client is stopped");
}
Connection connection;
/* we could avoid this allocation for each RPC by having a
* connectionsId object and with set() method. We need to manage the
* refs for keys in HashMap properly. For now its ok.
*/
ConnectionId remoteId = new ConnectionId(addr, ticket, rpcTimeout);
do {
synchronized (connections) {
connection = connections.get(remoteId);
if (connection == null) {
connection = new Connection(remoteId);
connections.put(remoteId, connection);
}
}
} while (!connection.addCall(call));
//we don't invoke the method below inside "synchronized (connections)"
//block above. The reason for that is if the server happens to be slow,
//it will take longer to establish a connection and that will slow the
//entire system down.
connection.setupIOstreams();
return connection;
}
/**
* This class holds the address and the user ticket. The client connections
* to servers are uniquely identified by <remoteAddress, ticket>
*/
private static class ConnectionId {
final InetSocketAddress address;
final UserGroupInformation ticket;
final private int rpcTimeout;
ConnectionId(InetSocketAddress address, UserGroupInformation ticket,
int rpcTimeout) {
this.address = address;
this.ticket = ticket;
this.rpcTimeout = rpcTimeout;
}
InetSocketAddress getAddress() {
return address;
}
UserGroupInformation getTicket() {
return ticket;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof ConnectionId) {
ConnectionId id = (ConnectionId) obj;
return address.equals(id.address) && ticket == id.ticket &&
rpcTimeout == id.rpcTimeout;
//Note : ticket is a ref comparision.
}
return false;
}
@Override
public int hashCode() {
return address.hashCode() ^ System.identityHashCode(ticket) ^ rpcTimeout;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.sql.*;
import org.apache.phoenix.schema.TypeMismatchException;
import org.junit.Test;
public class ArrayAppendFunctionIT extends ParallelStatsDisabledIT {
private String initTables(Connection conn) throws Exception {
String tableName = generateUniqueName();
String ddl = "CREATE TABLE " + tableName + " (region_name VARCHAR PRIMARY KEY,varchars VARCHAR[],integers INTEGER[],doubles DOUBLE[],bigints BIGINT[],chars CHAR(15)[],double1 DOUBLE,char1 CHAR(17),nullcheck INTEGER,chars2 CHAR(15)[])";
conn.createStatement().execute(ddl);
String dml = "UPSERT INTO " + tableName + "(region_name,varchars,integers,doubles,bigints,chars,double1,char1,nullcheck,chars2) VALUES('SF Bay Area'," +
"ARRAY['2345','46345','23234']," +
"ARRAY[2345,46345,23234,456]," +
"ARRAY[23.45,46.345,23.234,45.6,5.78]," +
"ARRAY[12,34,56,78,910]," +
"ARRAY['a','bbbb','c','ddd','e']," +
"23.45," +
"'wert'," +
"NULL," +
"ARRAY['a','bbbb','c','ddd','e','foo']" +
")";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.execute();
conn.commit();
return tableName;
}
private String initTablesDesc(Connection conn, String type, String val) throws Exception {
String tableName = generateUniqueName();
String ddl = "CREATE TABLE " + tableName + " (pk " + type + " PRIMARY KEY DESC,varchars VARCHAR[],integers INTEGER[],doubles DOUBLE[],bigints BIGINT[],chars CHAR(15)[],chars2 CHAR(15)[], bools BOOLEAN[])";
conn.createStatement().execute(ddl);
String dml = "UPSERT INTO " + tableName + "(pk,varchars,integers,doubles,bigints,chars,chars2,bools) VALUES(" + val + "," +
"ARRAY['2345','46345','23234']," +
"ARRAY[2345,46345,23234,456]," +
"ARRAY[23.45,46.345,23.234,45.6,5.78]," +
"ARRAY[12,34,56,78,910]," +
"ARRAY['a','bbbb','c','ddd','e']," +
"ARRAY['a','bbbb','c','ddd','e','foo']," +
"ARRAY[true,false]" +
")";
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.execute();
conn.commit();
return tableName;
}
@Test
public void testArrayAppendFunctionVarchar() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(varchars,'34567') FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
String[] strings = new String[]{"2345", "46345", "23234", "34567"};
Array array = conn.createArrayOf("VARCHAR", strings);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionInteger() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(integers,1234) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
Integer[] integers = new Integer[]{2345, 46345, 23234, 456, 1234};
Array array = conn.createArrayOf("INTEGER", integers);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionDouble() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(doubles,double1) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
Double[] doubles = new Double[]{23.45, 46.345, 23.234, 45.6, 5.78, 23.45};
Array array = conn.createArrayOf("DOUBLE", doubles);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionDouble2() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(doubles,23) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
Double[] doubles = new Double[]{23.45, 46.345, 23.234, 45.6, 5.78, new Double(23)};
Array array = conn.createArrayOf("DOUBLE", doubles);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionBigint() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(bigints,1112) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
Long[] longs = new Long[]{12l, 34l, 56l, 78l, 910l, 1112l};
Array array = conn.createArrayOf("BIGINT", longs);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionChar() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(chars,'fac') FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
String[] strings = new String[]{"a", "bbbb", "c", "ddd", "e", "fac"};
Array array = conn.createArrayOf("CHAR", strings);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test(expected = TypeMismatchException.class)
public void testArrayAppendFunctionIntToCharArray() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(varchars,234) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
}
@Test(expected = TypeMismatchException.class)
public void testArrayAppendFunctionVarcharToIntegerArray() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(integers,'234') FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
}
@Test(expected = SQLException.class)
public void testArrayAppendFunctionChar2() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(chars,'facfacfacfacfacfacfac') FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
rs.next();
rs.getArray(1);
}
@Test
public void testArrayAppendFunctionIntegerToDoubleArray() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(doubles,45) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
Double[] doubles = new Double[]{23.45, 46.345, 23.234, 45.6, 5.78, 45.0};
Array array = conn.createArrayOf("DOUBLE", doubles);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionWithNestedFunctions1() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(ARRAY[23,45],integers[1]) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
Integer[] integers = new Integer[]{23, 45, 2345};
Array array = conn.createArrayOf("INTEGER", integers);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionWithNestedFunctions2() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(integers,ARRAY_ELEM(ARRAY[2,4],1)) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
Integer[] integers = new Integer[]{2345, 46345, 23234, 456, 2};
Array array = conn.createArrayOf("INTEGER", integers);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionWithNestedFunctions3() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(doubles,ARRAY_ELEM(doubles,2)) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
Double[] doubles = new Double[]{23.45, 46.345, 23.234, 45.6, 5.78, 46.345};
Array array = conn.createArrayOf("DOUBLE", doubles);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionWithUpsert1() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = generateUniqueName();
String ddl = "CREATE TABLE " + tableName + " (region_name VARCHAR PRIMARY KEY,varchars VARCHAR[])";
conn.createStatement().execute(ddl);
String dml = "UPSERT INTO " + tableName + "(region_name,varchars) VALUES('SF Bay Area',ARRAY_APPEND(ARRAY['hello','world'],':-)'))";
conn.createStatement().execute(dml);
conn.commit();
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT varchars FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
String[] strings = new String[]{"hello", "world", ":-)"};
Array array = conn.createArrayOf("VARCHAR", strings);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionWithUpsert2() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = generateUniqueName();
String ddl = "CREATE TABLE " + tableName + " (region_name VARCHAR PRIMARY KEY,integers INTEGER[])";
conn.createStatement().execute(ddl);
String dml = "UPSERT INTO " + tableName + "(region_name,integers) VALUES('SF Bay Area',ARRAY_APPEND(ARRAY[4,5],6))";
conn.createStatement().execute(dml);
conn.commit();
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT integers FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
Integer[] integers = new Integer[]{4, 5, 6};
Array array = conn.createArrayOf("INTEGER", integers);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionWithUpsert3() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = generateUniqueName();
String ddl = "CREATE TABLE " + tableName + " (region_name VARCHAR PRIMARY KEY,doubles DOUBLE[])";
conn.createStatement().execute(ddl);
String dml = "UPSERT INTO " + tableName + "(region_name,doubles) VALUES('SF Bay Area',ARRAY_APPEND(ARRAY[5.67,7.87],9.0))";
conn.createStatement().execute(dml);
conn.commit();
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT doubles FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
Double[] doubles = new Double[]{5.67, 7.87, new Double(9)};
Array array = conn.createArrayOf("DOUBLE", doubles);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionWithUpsertSelect1() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String sourceTableName = generateUniqueName();
String targetTableName = generateUniqueName();
String ddl = "CREATE TABLE " + sourceTableName + " (region_name VARCHAR PRIMARY KEY,doubles DOUBLE[])";
conn.createStatement().execute(ddl);
ddl = "CREATE TABLE " + targetTableName + " (region_name VARCHAR PRIMARY KEY,doubles DOUBLE[])";
conn.createStatement().execute(ddl);
String dml = "UPSERT INTO " + sourceTableName + "(region_name,doubles) VALUES('SF Bay Area',ARRAY_APPEND(ARRAY[5.67,7.87],9.0))";
conn.createStatement().execute(dml);
dml = "UPSERT INTO " + sourceTableName + "(region_name,doubles) VALUES('SF Bay Area2',ARRAY_APPEND(ARRAY[56.7,7.87],9.2))";
conn.createStatement().execute(dml);
conn.commit();
dml = "UPSERT INTO " + targetTableName + "(region_name, doubles) SELECT region_name, ARRAY_APPEND(doubles,5) FROM " + sourceTableName ;
conn.createStatement().execute(dml);
conn.commit();
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT doubles FROM " + targetTableName );
assertTrue(rs.next());
Double[] doubles = new Double[]{5.67, 7.87, new Double(9), new Double(5)};
Array array = conn.createArrayOf("DOUBLE", doubles);
assertEquals(array, rs.getArray(1));
assertTrue(rs.next());
doubles = new Double[]{56.7, 7.87, new Double(9.2), new Double(5)};
array = conn.createArrayOf("DOUBLE", doubles);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionWithUpsertSelect2() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String sourceTableName = generateUniqueName();
String targetTableName = generateUniqueName();
String ddl = "CREATE TABLE " + sourceTableName + " (region_name VARCHAR PRIMARY KEY,varchars VARCHAR[])";
conn.createStatement().execute(ddl);
ddl = "CREATE TABLE " + targetTableName + " (region_name VARCHAR PRIMARY KEY,varchars VARCHAR[])";
conn.createStatement().execute(ddl);
String dml = "UPSERT INTO " + sourceTableName + "(region_name,varchars) VALUES('SF Bay Area',ARRAY_APPEND(ARRAY['abcd','b'],'c'))";
conn.createStatement().execute(dml);
dml = "UPSERT INTO " + sourceTableName + "(region_name,varchars) VALUES('SF Bay Area2',ARRAY_APPEND(ARRAY['d','fgh'],'something'))";
conn.createStatement().execute(dml);
conn.commit();
dml = "UPSERT INTO " + targetTableName + "(region_name, varchars) SELECT region_name, ARRAY_APPEND(varchars,'stu') FROM " + sourceTableName ;
conn.createStatement().execute(dml);
conn.commit();
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT varchars FROM " + targetTableName );
assertTrue(rs.next());
String[] strings = new String[]{"abcd", "b", "c", "stu"};
Array array = conn.createArrayOf("VARCHAR", strings);
assertEquals(array, rs.getArray(1));
assertTrue(rs.next());
strings = new String[]{"d", "fgh", "something", "stu"};
array = conn.createArrayOf("VARCHAR", strings);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionInWhere1() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE ARRAY[2345,46345,23234,456,123]=ARRAY_APPEND(integers,123)");
assertTrue(rs.next());
assertEquals("SF Bay Area", rs.getString(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionInWhere2() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE varchars[1]=ANY(ARRAY_APPEND(ARRAY['2345','46345','23234'],'1234'))");
assertTrue(rs.next());
assertEquals("SF Bay Area", rs.getString(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionInWhere3() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE ARRAY['2345','46345','23234','1234']=ARRAY_APPEND(ARRAY['2345','46345','23234'],'1234')");
assertTrue(rs.next());
assertEquals("SF Bay Area", rs.getString(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionInWhere4() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE ARRAY[23.45,4634.5,2.3234,123.4]=ARRAY_APPEND(ARRAY[23.45,4634.5,2.3234],123.4)");
assertTrue(rs.next());
assertEquals("SF Bay Area", rs.getString(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionInWhere5() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE ARRAY['2345','46345','23234','foo']=ARRAY_APPEND(varchars,'foo')");
assertTrue(rs.next());
assertEquals("SF Bay Area", rs.getString(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionInWhere6() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE chars2=ARRAY_APPEND(chars,'foo')");
assertTrue(rs.next());
assertEquals("SF Bay Area", rs.getString(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionInWhere7() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE ARRAY[2,3,4]=ARRAY_APPEND(ARRAY[2,3],4)");
assertTrue(rs.next());
assertEquals("SF Bay Area", rs.getString(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionVarcharWithNull() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(varchars,NULL) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
String[] strings = new String[]{"2345", "46345", "23234"};
Array array = conn.createArrayOf("VARCHAR", strings);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionDoublesWithNull() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(doubles,NULL) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
Double[] doubles = new Double[]{23.45, 46.345, 23.234, 45.6, 5.78};
Array array = conn.createArrayOf("DOUBLE", doubles);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionCharsWithNull() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(chars,NULL) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
String[] strings = new String[]{"a", "bbbb", "c", "ddd", "e"};
Array array = conn.createArrayOf("CHAR", strings);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionWithNull() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(integers,nullcheck) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
Integer[] integers = new Integer[]{2345, 46345, 23234, 456};
Array array = conn.createArrayOf("INTEGER", integers);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test(expected = SQLException.class)
public void testArrayAppendFunctionCharLimitCheck() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTables(conn);
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(chars,char1) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
String[] strings = new String[]{"a", "bbbb", "c", "ddd", "e", "wert"};
Array array = conn.createArrayOf("CHAR", strings);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionIntegerDesc() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTablesDesc(conn, "INTEGER", "23");
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(integers,pk) FROM " + tableName + "");
assertTrue(rs.next());
Integer[] integers = new Integer[]{2345, 46345, 23234, 456, 23};
Array array = conn.createArrayOf("INTEGER", integers);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionVarcharDesc() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTablesDesc(conn, "VARCHAR", "'e'");
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(varchars,pk) FROM " + tableName + "");
assertTrue(rs.next());
String[] strings = new String[]{"2345", "46345", "23234", "e"};
Array array = conn.createArrayOf("VARCHAR", strings);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionBigIntDesc() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTablesDesc(conn, "BIGINT", "1112");
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(bigints,pk) FROM " + tableName );
assertTrue(rs.next());
Long[] longs = new Long[]{12l, 34l, 56l, 78l, 910l, 1112l};
Array array = conn.createArrayOf("BIGINT", longs);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
@Test
public void testArrayAppendFunctionBooleanDesc() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String tableName = initTablesDesc(conn, "BOOLEAN", "false");
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT ARRAY_APPEND(bools,pk) FROM " + tableName );
assertTrue(rs.next());
Boolean[] booleans = new Boolean[]{true, false, false};
Array array = conn.createArrayOf("BOOLEAN", booleans);
assertEquals(array, rs.getArray(1));
assertFalse(rs.next());
}
}
| |
// Decompiled by Jad v1.5.8e. Copyright 2001 Pavel Kouznetsov.
// Jad home page: http://www.geocities.com/kpdus/jad.html
// Decompiler options: braces fieldsfirst space lnc
package com.tencent.b.b;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.IntentFilter;
import android.location.Location;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import com.tencent.b.a.a.b;
import com.tencent.b.a.a.c;
import com.tencent.b.a.a.d;
import java.util.ArrayList;
import java.util.Iterator;
import org.json.JSONArray;
import org.json.JSONObject;
// Referenced classes of package com.tencent.b.b:
// B, C, e, m,
// f, s, b, c,
// z, A, y, t,
// a
public final class n
implements b.a, e.b, f.a, m.a
{
private static boolean t = false;
private static n u = null;
private d A;
private int B;
private int C;
private int D;
private String E;
private String F;
private String G;
private String H;
private String I;
private String J;
private boolean K;
private boolean L;
private long M;
private Handler N;
private Runnable O;
private final BroadcastReceiver P = new C(this);
private long a;
private Context b;
private e c;
private m d;
private f e;
private int f;
private int g;
private com.tencent.b.b.c h;
private com.tencent.b.b.b i;
private b j;
private int k;
private int l;
private int m;
private byte n[];
private byte o[];
private boolean p;
private y q;
private z r;
private A s;
private long v;
private e.a w;
private m.b x;
private f.b y;
private d z;
private n()
{
a = 5000L;
b = null;
c = null;
d = null;
e = null;
f = 1024;
g = 4;
h = null;
i = null;
j = null;
n = new byte[0];
o = new byte[0];
p = false;
q = null;
r = null;
s = null;
v = -1L;
w = null;
x = null;
y = null;
z = null;
A = null;
B = 0;
C = 0;
D = 1;
E = "";
F = "";
G = "";
H = "";
I = "";
J = "";
K = false;
L = false;
M = 0L;
N = null;
O = new B(this);
c = new e();
d = new m();
e = new f();
}
static long a(n n1)
{
return n1.M;
}
static long a(n n1, long l1)
{
n1.M = l1;
return l1;
}
static A a(n n1, A a1)
{
n1.s = null;
return null;
}
public static n a()
{
com/tencent/b/b/n;
JVM INSTR monitorenter ;
n n1;
if (u == null)
{
u = new n();
}
n1 = u;
com/tencent/b/b/n;
JVM INSTR monitorexit ;
return n1;
Exception exception;
exception;
throw exception;
}
private static ArrayList a(JSONArray jsonarray)
{
int i1 = jsonarray.length();
ArrayList arraylist = new ArrayList();
for (int j1 = 0; j1 < i1; j1++)
{
JSONObject jsonobject = jsonarray.getJSONObject(j1);
arraylist.add(new c(jsonobject.getString("name"), jsonobject.getString("addr"), jsonobject.getString("catalog"), jsonobject.getDouble("dist"), Double.parseDouble(jsonobject.getString("latitude")), Double.parseDouble(jsonobject.getString("longitude"))));
}
return arraylist;
}
static void a(n n1, int i1)
{
if (i1 == 0)
{
n1.w = null;
}
int j1;
if (i1 == 0)
{
j1 = 1;
} else
{
j1 = 2;
}
n1.f = j1;
if (n1.j != null)
{
n1.j.onStatusUpdate(n1.f);
}
}
static void a(n n1, Location location)
{
if (location == null || location.getLatitude() > 359D || location.getLongitude() > 359D)
{
if (n1.w != null && n1.w.a())
{
n1.b(true);
} else
{
n1.e();
}
}
n1.z = new d();
n1.z.z = 0;
n1.z.y = 0;
n1.z.b = com.tencent.b.b.s.a(location.getLatitude(), 6);
n1.z.c = com.tencent.b.b.s.a(location.getLongitude(), 6);
if (n1.w != null && n1.w.a())
{
n1.z.e = com.tencent.b.b.s.a(n1.w.b().getAccuracy(), 1);
n1.z.d = com.tencent.b.b.s.a(n1.w.b().getAltitude(), 1);
n1.z.f = com.tencent.b.b.s.a(n1.w.b().getSpeed(), 1);
n1.z.g = com.tencent.b.b.s.a(n1.w.b().getBearing(), 1);
n1.z.a = 0;
}
n1.z.x = true;
if (n1.l != 0 && n1.A != null && n1.B == 0)
{
if ((n1.l == 3 || n1.l == 4) && n1.l == n1.A.z)
{
n1.z.i = n1.A.i;
n1.z.j = n1.A.j;
n1.z.k = n1.A.k;
n1.z.l = n1.A.l;
n1.z.m = n1.A.m;
n1.z.n = n1.A.n;
n1.z.o = n1.A.o;
n1.z.p = n1.A.p;
n1.z.z = 3;
}
if (n1.l == 4 && n1.l == n1.A.z && n1.A.w != null)
{
n1.z.w = new ArrayList();
c c1;
for (Iterator iterator = n1.A.w.iterator(); iterator.hasNext(); n1.z.w.add(new c(c1)))
{
c1 = (c)iterator.next();
}
n1.z.z = 4;
}
if (n1.l == 7 && n1.l == n1.A.z)
{
n1.z.z = 7;
n1.z.h = n1.A.h;
n1.z.i = n1.A.i;
if (n1.A.h == 0)
{
n1.z.j = n1.A.j;
n1.z.k = n1.A.k;
n1.z.l = n1.A.l;
n1.z.m = n1.A.m;
n1.z.n = n1.A.n;
n1.z.o = n1.A.o;
n1.z.p = n1.A.p;
} else
{
n1.z.q = n1.A.q;
n1.z.r = n1.A.r;
n1.z.s = n1.A.s;
n1.z.t = n1.A.t;
n1.z.u = n1.A.u;
n1.z.v = n1.A.v;
}
}
}
if (n1.B != 0 || n1.A != null)
{
if (n1.B != 0)
{
n1.z.y = n1.B;
}
if (System.currentTimeMillis() - n1.v >= n1.a && n1.j != null && n1.k == 1)
{
n1.j.onLocationUpdate(n1.z);
n1.v = System.currentTimeMillis();
}
}
}
static void a(n n1, e.a a1)
{
if (a1 != null)
{
n1.w = a1;
if (n1.k == 1 && n1.w != null && n1.w.a())
{
if (n1.m == 0)
{
n1.b(false);
return;
}
if (n1.m == 1 && n1.i != null)
{
com.tencent.b.b.b b1 = n1.i;
double d1 = n1.w.b().getLatitude();
double d2 = n1.w.b().getLongitude();
Context _tmp = n1.b;
b1.a(d1, d2, n1);
return;
}
}
}
}
static void a(n n1, f.b b1)
{
if (b1 != null)
{
n1.y = b1;
n1.d();
}
}
static void a(n n1, m.b b1)
{
n1.x = b1;
if (n1.e != null && n1.e.b() && n1.e.c())
{
n1.e.a(0L);
return;
}
if (n1.C > 0 && !com.tencent.b.b.s.a(b1.a, b1.b, b1.c, b1.d, b1.e))
{
n1.C = -1 + n1.C;
}
n1.d();
}
static void a(n n1, String s1)
{
if (com.tencent.b.b.s.c(s1)) goto _L2; else goto _L1
_L1:
if (n1.C <= 0) goto _L4; else goto _L3
_L3:
n1.C = -1 + n1.C;
_L9:
return;
_L4:
if (n1.k == 0 && n1.j != null)
{
n1.j.onLocationDataUpdate(null, -1);
return;
}
if (n1.k == 1 && n1.j != null)
{
n1.z = new d();
n1.B = 3;
n1.z.y = 3;
n1.z.z = -1;
n1.j.onLocationUpdate(n1.z);
return;
}
continue; /* Loop/switch isn't completed */
_L2:
if (n1.k != 0 || n1.j == null) goto _L6; else goto _L5
_L5:
byte abyte1[] = s1.getBytes();
byte abyte0[] = abyte1;
_L7:
n1.j.onLocationDataUpdate(abyte0, 0);
return;
Exception exception;
exception;
abyte0 = null;
if (true) goto _L7; else goto _L6
_L6:
String s2;
if (n1.h == null)
{
s2 = null;
} else
if (n1.x != null && n1.y != null)
{
s2 = n1.h.b(n1.x.b, n1.x.c, n1.x.d, n1.x.e, n1.y.a());
} else
{
s2 = null;
}
if (s2 != null)
{
n1.a(s2);
return;
}
if (n1.h != null && n1.x != null && n1.y != null)
{
n1.h.a(n1.x.b, n1.x.c, n1.x.d, n1.x.e, n1.y.a());
}
if (!n1.p)
{
if (n1.r != null)
{
n1.r.interrupt();
}
n1.r = null;
n1.r = new z(n1, s1);
n1.r.start();
return;
}
if (true) goto _L9; else goto _L8
_L8:
}
private void a(String s1)
{
JSONObject jsonobject;
d d1;
z = new d();
jsonobject = new JSONObject(s1);
JSONObject jsonobject1 = jsonobject.getJSONObject("location");
z.a = 1;
z.b = com.tencent.b.b.s.a(jsonobject1.getDouble("latitude"), 6);
z.c = com.tencent.b.b.s.a(jsonobject1.getDouble("longitude"), 6);
z.d = com.tencent.b.b.s.a(jsonobject1.getDouble("altitude"), 1);
z.e = com.tencent.b.b.s.a(jsonobject1.getDouble("accuracy"), 1);
d1 = z;
boolean flag;
String s2;
int i1;
int j1;
int k1;
d d2;
double d3;
double d4;
int i2;
JSONObject jsonobject3;
if (m == 1)
{
flag = true;
} else
{
flag = false;
}
d1.x = flag;
s2 = jsonobject.getString("bearing");
i1 = -100;
j1 = 0;
if (s2 == null)
{
break MISSING_BLOCK_LABEL_193;
}
k1 = s2.split(",").length;
j1 = 0;
if (k1 <= 1)
{
break MISSING_BLOCK_LABEL_193;
}
j1 = Integer.parseInt(s2.split(",")[1]);
if (x != null)
{
i1 = x.f;
}
d2 = z;
d3 = z.e;
if (j1 >= 6)
{
d4 = 40D;
} else
if (j1 == 5)
{
d4 = 60D;
} else
if (j1 == 4)
{
d4 = 70D;
} else
if (j1 == 3)
{
d4 = 90D;
} else
if (j1 == 2)
{
d4 = 110D;
} else
{
int l1;
if (i1 >= -72 && j1 == 0)
{
l1 = 10 * (int)((0.45000000000000001D * d3) / 10D);
} else
if (d3 <= 100D)
{
l1 = 10 * (int)(1.0D + (d3 - 1.0D) / 10D);
} else
if (d3 > 100D && d3 <= 800D)
{
l1 = 10 * (int)((0.84999999999999998D * d3) / 10D);
} else
{
l1 = 10 * (int)((0.80000000000000004D * d3) / 10D);
}
d4 = l1;
}
d2.e = d4;
z.z = 0;
if ((l == 3 || l == 4) && m == 1)
{
JSONObject jsonobject4 = jsonobject.getJSONObject("details").getJSONObject("subnation");
z.a(jsonobject4.getString("name"));
z.m = jsonobject4.getString("town");
z.n = jsonobject4.getString("village");
z.o = jsonobject4.getString("street");
z.p = jsonobject4.getString("street_no");
z.z = 3;
z.h = 0;
}
if (l == 4 && m == 1)
{
JSONArray jsonarray = jsonobject.getJSONObject("details").getJSONArray("poilist");
z.w = a(jsonarray);
z.z = 4;
}
if (l != 7 || m != 1) goto _L2; else goto _L1
_L1:
JSONObject jsonobject2 = jsonobject.getJSONObject("details");
i2 = jsonobject2.getInt("stat");
jsonobject3 = jsonobject2.getJSONObject("subnation");
if (i2 != 0) goto _L4; else goto _L3
_L3:
z.a(jsonobject3.getString("name"));
z.m = jsonobject3.getString("town");
z.n = jsonobject3.getString("village");
z.o = jsonobject3.getString("street");
z.p = jsonobject3.getString("street_no");
_L6:
z.h = i2;
z.z = 7;
_L2:
z.y = 0;
A = new d(z);
B = 0;
if (h != null)
{
h.a(s1);
}
_L7:
if (j != null && k == 1 && (w == null || !w.a()))
{
j.onLocationUpdate(z);
v = System.currentTimeMillis();
}
return;
_L4:
if (i2 != 1) goto _L6; else goto _L5
_L5:
z.i = jsonobject3.getString("nation");
z.q = jsonobject3.getString("admin_level_1");
z.r = jsonobject3.getString("admin_level_2");
z.s = jsonobject3.getString("admin_level_3");
z.t = jsonobject3.getString("locality");
z.u = jsonobject3.getString("sublocality");
z.v = jsonobject3.getString("route");
goto _L6
Exception exception;
exception;
z = new d();
z.z = -1;
z.y = 2;
B = 2;
goto _L7
}
static boolean a(n n1, boolean flag)
{
n1.p = flag;
return flag;
}
static boolean a(boolean flag)
{
t = true;
return true;
}
static f b(n n1)
{
return n1.e;
}
static void b(n n1, int i1)
{
int j1 = 3;
if (i1 == j1)
{
j1 = 4;
}
n1.g = j1;
if (n1.j != null)
{
n1.j.onStatusUpdate(n1.g);
}
}
static void b(n n1, String s1)
{
n1.a(s1);
}
private void b(boolean flag)
{
if (w != null && w.a())
{
Location location = w.b();
z = new d();
z.b = com.tencent.b.b.s.a(location.getLatitude(), 6);
z.c = com.tencent.b.b.s.a(location.getLongitude(), 6);
z.d = com.tencent.b.b.s.a(location.getAltitude(), 1);
z.e = com.tencent.b.b.s.a(location.getAccuracy(), 1);
z.f = com.tencent.b.b.s.a(location.getSpeed(), 1);
z.g = com.tencent.b.b.s.a(location.getBearing(), 1);
z.a = 0;
z.x = false;
if (!flag)
{
z.y = 0;
} else
{
z.y = 1;
}
z.z = 0;
A = new d(z);
B = 0;
if (System.currentTimeMillis() - v >= a && j != null && k == 1)
{
j.onLocationUpdate(z);
v = System.currentTimeMillis();
}
}
}
static String c(n n1, String s1)
{
n1.E = s1;
return s1;
}
static void c(n n1)
{
n1.d();
}
static boolean c()
{
return t;
}
static y d(n n1)
{
return n1.q;
}
static String d(n n1, String s1)
{
n1.F = s1;
return s1;
}
private void d()
{
if (s != null)
{
return;
} else
{
s = new A(this, w, x, y);
s.start();
return;
}
}
static int e(n n1)
{
return n1.B;
}
static String e(n n1, String s1)
{
n1.G = s1;
return s1;
}
private void e()
{
z = new d();
B = 1;
z.y = 1;
z.z = -1;
z.a = 1;
if (j != null && k == 1)
{
j.onLocationUpdate(z);
}
}
static e.a f(n n1)
{
return n1.w;
}
static String f(n n1, String s1)
{
n1.I = s1;
return s1;
}
static void g(n n1)
{
n1.e();
}
static int h(n n1)
{
return n1.D;
}
static int i(n n1)
{
return n1.m;
}
static void j(n n1)
{
}
static Context k(n n1)
{
return n1.b;
}
static String l(n n1)
{
return n1.E;
}
static String m(n n1)
{
return n1.F;
}
static String n(n n1)
{
return n1.G;
}
static int o(n n1)
{
return n1.g;
}
static m p(n n1)
{
return n1.d;
}
static String q(n n1)
{
return n1.H;
}
static boolean r(n n1)
{
return n1.K;
}
static int s(n n1)
{
return n1.l;
}
static String t(n n1)
{
return n1.I;
}
static String u(n n1)
{
return n1.J;
}
public final void a(double d1, double d2)
{
synchronized (o)
{
Message message = q.obtainMessage(6);
Location location = new Location("Deflect");
location.setLatitude(d1);
location.setLongitude(d2);
message.obj = location;
q.sendMessage(message);
}
}
public final void a(int i1)
{
synchronized (o)
{
Message message = q.obtainMessage(4, i1, 0);
q.sendMessage(message);
}
}
public final void a(e.a a1)
{
synchronized (o)
{
Message message = q.obtainMessage(1, a1);
q.sendMessage(message);
}
}
public final void a(f.b b1)
{
synchronized (o)
{
Message message = q.obtainMessage(3, b1);
q.sendMessage(message);
}
}
public final void a(m.b b1)
{
synchronized (o)
{
Message message = q.obtainMessage(2, b1);
q.sendMessage(message);
}
}
public final boolean a(Context context, b b1)
{
byte abyte0[] = n;
abyte0;
JVM INSTR monitorenter ;
if (context == null || b1 == null)
{
return false;
}
if (com.tencent.b.b.s.a(J))
{
break MISSING_BLOCK_LABEL_33;
}
abyte0;
JVM INSTR monitorexit ;
return false;
q = new y(this);
N = new Handler(Looper.getMainLooper());
b = context;
j = b1;
com.tencent.b.b.t.a().a(b.getApplicationContext());
ConnectivityManager connectivitymanager = (ConnectivityManager)context.getSystemService("connectivity");
if (connectivitymanager == null)
{
break MISSING_BLOCK_LABEL_119;
}
if (connectivitymanager.getActiveNetworkInfo() != null)
{
K = connectivitymanager.getActiveNetworkInfo().isRoaming();
}
b.registerReceiver(P, new IntentFilter("android.net.conn.CONNECTIVITY_CHANGE"));
_L2:
boolean flag;
boolean flag1;
boolean flag2;
k = j.getReqType();
l = j.getReqLevel();
m = j.getReqGeoType();
v = -1L;
if (l == 7)
{
l = 0;
}
L = false;
D = 1;
flag = c.a(this, b);
flag1 = d.a(b, this);
flag2 = e.a(b, this, 1);
h = com.tencent.b.b.c.a();
i = com.tencent.b.b.b.a();
w = null;
x = null;
y = null;
z = null;
A = null;
B = 0;
if (h != null)
{
h.b();
}
C = 1;
if (!flag)
{
break MISSING_BLOCK_LABEL_327;
}
if (m != 0)
{
break MISSING_BLOCK_LABEL_327;
}
abyte0;
JVM INSTR monitorexit ;
return true;
if (flag1 || flag2)
{
return true;
}
return false;
Exception exception;
exception;
throw exception;
Exception exception1;
exception1;
if (true) goto _L2; else goto _L1
_L1:
}
public final boolean a(String s1, String s2)
{
byte abyte0[] = n;
abyte0;
JVM INSTR monitorenter ;
if (!com.tencent.b.b.a.a().a(s1, s2))
{
break MISSING_BLOCK_LABEL_27;
}
J = s1;
return true;
abyte0;
JVM INSTR monitorexit ;
return false;
Exception exception;
exception;
throw exception;
}
public final void b()
{
Exception exception;
synchronized (n)
{
try
{
if (j != null)
{
j = null;
N.removeCallbacks(O);
b.unregisterReceiver(P);
c.a();
d.a();
e.a();
}
}
// Misplaced declaration of an exception variable
catch (Exception exception) { }
}
}
public final void b(int i1)
{
synchronized (o)
{
Message message = q.obtainMessage(5, i1, 0);
q.sendMessage(message);
}
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.compute.v2019_11_01.implementation;
import com.microsoft.azure.management.compute.v2019_11_01.VirtualMachineAgentInstanceView;
import com.microsoft.azure.management.compute.v2019_11_01.MaintenanceRedeployStatus;
import java.util.List;
import com.microsoft.azure.management.compute.v2019_11_01.DiskInstanceView;
import com.microsoft.azure.management.compute.v2019_11_01.VirtualMachineExtensionInstanceView;
import com.microsoft.azure.management.compute.v2019_11_01.VirtualMachineHealthStatus;
import com.microsoft.azure.management.compute.v2019_11_01.BootDiagnosticsInstanceView;
import com.microsoft.azure.management.compute.v2019_11_01.InstanceViewStatus;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* The instance view of a virtual machine scale set VM.
*/
public class VirtualMachineScaleSetVMInstanceViewInner {
/**
* The Update Domain count.
*/
@JsonProperty(value = "platformUpdateDomain")
private Integer platformUpdateDomain;
/**
* The Fault Domain count.
*/
@JsonProperty(value = "platformFaultDomain")
private Integer platformFaultDomain;
/**
* The Remote desktop certificate thumbprint.
*/
@JsonProperty(value = "rdpThumbPrint")
private String rdpThumbPrint;
/**
* The VM Agent running on the virtual machine.
*/
@JsonProperty(value = "vmAgent")
private VirtualMachineAgentInstanceView vmAgent;
/**
* The Maintenance Operation status on the virtual machine.
*/
@JsonProperty(value = "maintenanceRedeployStatus")
private MaintenanceRedeployStatus maintenanceRedeployStatus;
/**
* The disks information.
*/
@JsonProperty(value = "disks")
private List<DiskInstanceView> disks;
/**
* The extensions information.
*/
@JsonProperty(value = "extensions")
private List<VirtualMachineExtensionInstanceView> extensions;
/**
* The health status for the VM.
*/
@JsonProperty(value = "vmHealth", access = JsonProperty.Access.WRITE_ONLY)
private VirtualMachineHealthStatus vmHealth;
/**
* Boot Diagnostics is a debugging feature which allows you to view Console
* Output and Screenshot to diagnose VM status. <br><br> You
* can easily view the output of your console log. <br><br>
* Azure also enables you to see a screenshot of the VM from the
* hypervisor.
*/
@JsonProperty(value = "bootDiagnostics")
private BootDiagnosticsInstanceView bootDiagnostics;
/**
* The resource status information.
*/
@JsonProperty(value = "statuses")
private List<InstanceViewStatus> statuses;
/**
* The placement group in which the VM is running. If the VM is deallocated
* it will not have a placementGroupId.
*/
@JsonProperty(value = "placementGroupId")
private String placementGroupId;
/**
* Get the Update Domain count.
*
* @return the platformUpdateDomain value
*/
public Integer platformUpdateDomain() {
return this.platformUpdateDomain;
}
/**
* Set the Update Domain count.
*
* @param platformUpdateDomain the platformUpdateDomain value to set
* @return the VirtualMachineScaleSetVMInstanceViewInner object itself.
*/
public VirtualMachineScaleSetVMInstanceViewInner withPlatformUpdateDomain(Integer platformUpdateDomain) {
this.platformUpdateDomain = platformUpdateDomain;
return this;
}
/**
* Get the Fault Domain count.
*
* @return the platformFaultDomain value
*/
public Integer platformFaultDomain() {
return this.platformFaultDomain;
}
/**
* Set the Fault Domain count.
*
* @param platformFaultDomain the platformFaultDomain value to set
* @return the VirtualMachineScaleSetVMInstanceViewInner object itself.
*/
public VirtualMachineScaleSetVMInstanceViewInner withPlatformFaultDomain(Integer platformFaultDomain) {
this.platformFaultDomain = platformFaultDomain;
return this;
}
/**
* Get the Remote desktop certificate thumbprint.
*
* @return the rdpThumbPrint value
*/
public String rdpThumbPrint() {
return this.rdpThumbPrint;
}
/**
* Set the Remote desktop certificate thumbprint.
*
* @param rdpThumbPrint the rdpThumbPrint value to set
* @return the VirtualMachineScaleSetVMInstanceViewInner object itself.
*/
public VirtualMachineScaleSetVMInstanceViewInner withRdpThumbPrint(String rdpThumbPrint) {
this.rdpThumbPrint = rdpThumbPrint;
return this;
}
/**
* Get the VM Agent running on the virtual machine.
*
* @return the vmAgent value
*/
public VirtualMachineAgentInstanceView vmAgent() {
return this.vmAgent;
}
/**
* Set the VM Agent running on the virtual machine.
*
* @param vmAgent the vmAgent value to set
* @return the VirtualMachineScaleSetVMInstanceViewInner object itself.
*/
public VirtualMachineScaleSetVMInstanceViewInner withVmAgent(VirtualMachineAgentInstanceView vmAgent) {
this.vmAgent = vmAgent;
return this;
}
/**
* Get the Maintenance Operation status on the virtual machine.
*
* @return the maintenanceRedeployStatus value
*/
public MaintenanceRedeployStatus maintenanceRedeployStatus() {
return this.maintenanceRedeployStatus;
}
/**
* Set the Maintenance Operation status on the virtual machine.
*
* @param maintenanceRedeployStatus the maintenanceRedeployStatus value to set
* @return the VirtualMachineScaleSetVMInstanceViewInner object itself.
*/
public VirtualMachineScaleSetVMInstanceViewInner withMaintenanceRedeployStatus(MaintenanceRedeployStatus maintenanceRedeployStatus) {
this.maintenanceRedeployStatus = maintenanceRedeployStatus;
return this;
}
/**
* Get the disks information.
*
* @return the disks value
*/
public List<DiskInstanceView> disks() {
return this.disks;
}
/**
* Set the disks information.
*
* @param disks the disks value to set
* @return the VirtualMachineScaleSetVMInstanceViewInner object itself.
*/
public VirtualMachineScaleSetVMInstanceViewInner withDisks(List<DiskInstanceView> disks) {
this.disks = disks;
return this;
}
/**
* Get the extensions information.
*
* @return the extensions value
*/
public List<VirtualMachineExtensionInstanceView> extensions() {
return this.extensions;
}
/**
* Set the extensions information.
*
* @param extensions the extensions value to set
* @return the VirtualMachineScaleSetVMInstanceViewInner object itself.
*/
public VirtualMachineScaleSetVMInstanceViewInner withExtensions(List<VirtualMachineExtensionInstanceView> extensions) {
this.extensions = extensions;
return this;
}
/**
* Get the health status for the VM.
*
* @return the vmHealth value
*/
public VirtualMachineHealthStatus vmHealth() {
return this.vmHealth;
}
/**
* Get boot Diagnostics is a debugging feature which allows you to view Console Output and Screenshot to diagnose VM status. <br><br> You can easily view the output of your console log. <br><br> Azure also enables you to see a screenshot of the VM from the hypervisor.
*
* @return the bootDiagnostics value
*/
public BootDiagnosticsInstanceView bootDiagnostics() {
return this.bootDiagnostics;
}
/**
* Set boot Diagnostics is a debugging feature which allows you to view Console Output and Screenshot to diagnose VM status. <br><br> You can easily view the output of your console log. <br><br> Azure also enables you to see a screenshot of the VM from the hypervisor.
*
* @param bootDiagnostics the bootDiagnostics value to set
* @return the VirtualMachineScaleSetVMInstanceViewInner object itself.
*/
public VirtualMachineScaleSetVMInstanceViewInner withBootDiagnostics(BootDiagnosticsInstanceView bootDiagnostics) {
this.bootDiagnostics = bootDiagnostics;
return this;
}
/**
* Get the resource status information.
*
* @return the statuses value
*/
public List<InstanceViewStatus> statuses() {
return this.statuses;
}
/**
* Set the resource status information.
*
* @param statuses the statuses value to set
* @return the VirtualMachineScaleSetVMInstanceViewInner object itself.
*/
public VirtualMachineScaleSetVMInstanceViewInner withStatuses(List<InstanceViewStatus> statuses) {
this.statuses = statuses;
return this;
}
/**
* Get the placement group in which the VM is running. If the VM is deallocated it will not have a placementGroupId.
*
* @return the placementGroupId value
*/
public String placementGroupId() {
return this.placementGroupId;
}
/**
* Set the placement group in which the VM is running. If the VM is deallocated it will not have a placementGroupId.
*
* @param placementGroupId the placementGroupId value to set
* @return the VirtualMachineScaleSetVMInstanceViewInner object itself.
*/
public VirtualMachineScaleSetVMInstanceViewInner withPlacementGroupId(String placementGroupId) {
this.placementGroupId = placementGroupId;
return this;
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.cache.infinispan;
import org.keycloak.models.*;
import org.keycloak.models.cache.CacheRealmProvider;
import org.keycloak.models.cache.infinispan.entities.CachedClient;
import java.util.*;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class ClientAdapter implements ClientModel {
protected RealmCacheSession cacheSession;
protected RealmModel cachedRealm;
protected RealmCache cache;
protected ClientModel updated;
protected CachedClient cached;
public ClientAdapter(RealmModel cachedRealm, CachedClient cached, RealmCacheSession cacheSession, RealmCache cache) {
this.cachedRealm = cachedRealm;
this.cache = cache;
this.cacheSession = cacheSession;
this.cached = cached;
}
private void getDelegateForUpdate() {
if (updated == null) {
cacheSession.registerClientInvalidation(cached.getId());
updated = cacheSession.getDelegate().getClientById(cached.getId(), cachedRealm);
if (updated == null) throw new IllegalStateException("Not found in database");
}
}
protected boolean invalidated;
public void invalidate() {
invalidated = true;
}
protected boolean isUpdated() {
if (updated != null) return true;
if (!invalidated) return false;
updated = cacheSession.getDelegate().getClientById(cached.getId(), cachedRealm);
if (updated == null) throw new IllegalStateException("Not found in database");
return true;
}
@Override
public void updateClient() {
if (updated != null) updated.updateClient();
}
@Override
public String getId() {
if (isUpdated()) return updated.getId();
return cached.getId();
}
public Set<String> getWebOrigins() {
if (isUpdated()) return updated.getWebOrigins();
return cached.getWebOrigins();
}
public void setWebOrigins(Set<String> webOrigins) {
getDelegateForUpdate();
updated.setWebOrigins(webOrigins);
}
@Override
public ClientTemplateModel getClientTemplate() {
if (isUpdated()) return updated.getClientTemplate();
if (cached.getClientTemplate() == null) return null;
return cacheSession.getClientTemplateById(cached.getClientTemplate(), cachedRealm);
}
@Override
public void setClientTemplate(ClientTemplateModel template) {
getDelegateForUpdate();
updated.setClientTemplate(template);
}
@Override
public boolean useTemplateScope() {
if (isUpdated()) return updated.useTemplateScope();
return cached.isUseTemplateScope();
}
@Override
public void setUseTemplateScope(boolean value) {
getDelegateForUpdate();
updated.setUseTemplateScope(value);
}
@Override
public boolean useTemplateConfig() {
if (isUpdated()) return updated.useTemplateConfig();
return cached.isUseTemplateConfig();
}
@Override
public void setUseTemplateConfig(boolean value) {
getDelegateForUpdate();
updated.setUseTemplateConfig(value);
}
@Override
public boolean useTemplateMappers() {
if (isUpdated()) return updated.useTemplateMappers();
return cached.isUseTemplateMappers();
}
@Override
public void setUseTemplateMappers(boolean value) {
getDelegateForUpdate();
updated.setUseTemplateMappers(value);
}
public void addWebOrigin(String webOrigin) {
getDelegateForUpdate();
updated.addWebOrigin(webOrigin);
}
public void removeWebOrigin(String webOrigin) {
getDelegateForUpdate();
updated.removeWebOrigin(webOrigin);
}
public Set<String> getRedirectUris() {
if (isUpdated()) return updated.getRedirectUris();
return cached.getRedirectUris();
}
public void setRedirectUris(Set<String> redirectUris) {
getDelegateForUpdate();
updated.setRedirectUris(redirectUris);
}
public void addRedirectUri(String redirectUri) {
getDelegateForUpdate();
updated.addRedirectUri(redirectUri);
}
public void removeRedirectUri(String redirectUri) {
getDelegateForUpdate();
updated.removeRedirectUri(redirectUri);
}
public boolean isEnabled() {
if (isUpdated()) return updated.isEnabled();
return cached.isEnabled();
}
public void setEnabled(boolean enabled) {
getDelegateForUpdate();
updated.setEnabled(enabled);
}
@Override
public String getClientAuthenticatorType() {
if (isUpdated()) return updated.getClientAuthenticatorType();
return cached.getClientAuthenticatorType();
}
@Override
public void setClientAuthenticatorType(String clientAuthenticatorType) {
getDelegateForUpdate();
updated.setClientAuthenticatorType(clientAuthenticatorType);
}
public boolean validateSecret(String secret) {
return secret.equals(getSecret());
}
public String getSecret() {
if (isUpdated()) return updated.getSecret();
return cached.getSecret();
}
public void setSecret(String secret) {
getDelegateForUpdate();
updated.setSecret(secret);
}
public String getRegistrationToken() {
if (isUpdated()) return updated.getRegistrationToken();
return cached.getRegistrationToken();
}
public void setRegistrationToken(String registrationToken) {
getDelegateForUpdate();
updated.setRegistrationToken(registrationToken);
}
public boolean isPublicClient() {
if (isUpdated()) return updated.isPublicClient();
return cached.isPublicClient();
}
public void setPublicClient(boolean flag) {
getDelegateForUpdate();
updated.setPublicClient(flag);
}
public boolean isFrontchannelLogout() {
if (isUpdated()) return updated.isPublicClient();
return cached.isFrontchannelLogout();
}
public void setFrontchannelLogout(boolean flag) {
getDelegateForUpdate();
updated.setFrontchannelLogout(flag);
}
@Override
public boolean isFullScopeAllowed() {
if (isUpdated()) return updated.isFullScopeAllowed();
return cached.isFullScopeAllowed();
}
@Override
public void setFullScopeAllowed(boolean value) {
getDelegateForUpdate();
updated.setFullScopeAllowed(value);
}
public Set<RoleModel> getScopeMappings() {
if (isUpdated()) return updated.getScopeMappings();
Set<RoleModel> roles = new HashSet<RoleModel>();
for (String id : cached.getScope()) {
roles.add(cacheSession.getRoleById(id, getRealm()));
}
return roles;
}
public void addScopeMapping(RoleModel role) {
getDelegateForUpdate();
updated.addScopeMapping(role);
}
public void deleteScopeMapping(RoleModel role) {
getDelegateForUpdate();
updated.deleteScopeMapping(role);
}
public Set<RoleModel> getRealmScopeMappings() {
Set<RoleModel> roleMappings = getScopeMappings();
Set<RoleModel> appRoles = new HashSet<RoleModel>();
for (RoleModel role : roleMappings) {
RoleContainerModel container = role.getContainer();
if (container instanceof RealmModel) {
if (((RealmModel) container).getId().equals(cachedRealm.getId())) {
appRoles.add(role);
}
}
}
return appRoles;
}
public RealmModel getRealm() {
return cachedRealm;
}
public int getNotBefore() {
if (isUpdated()) return updated.getNotBefore();
return cached.getNotBefore();
}
public void setNotBefore(int notBefore) {
getDelegateForUpdate();
updated.setNotBefore(notBefore);
}
@Override
public String getProtocol() {
if (isUpdated()) return updated.getProtocol();
return cached.getProtocol();
}
@Override
public void setProtocol(String protocol) {
getDelegateForUpdate();
updated.setProtocol(protocol);
}
@Override
public void setAttribute(String name, String value) {
getDelegateForUpdate();
updated.setAttribute(name, value);
}
@Override
public void removeAttribute(String name) {
getDelegateForUpdate();
updated.removeAttribute(name);
}
@Override
public String getAttribute(String name) {
if (isUpdated()) return updated.getAttribute(name);
return cached.getAttributes().get(name);
}
@Override
public Map<String, String> getAttributes() {
if (isUpdated()) return updated.getAttributes();
Map<String, String> copy = new HashMap<String, String>();
copy.putAll(cached.getAttributes());
return copy;
}
@Override
public Set<ProtocolMapperModel> getProtocolMappers() {
if (isUpdated()) return updated.getProtocolMappers();
return cached.getProtocolMappers();
}
@Override
public ProtocolMapperModel addProtocolMapper(ProtocolMapperModel model) {
getDelegateForUpdate();
return updated.addProtocolMapper(model);
}
@Override
public void removeProtocolMapper(ProtocolMapperModel mapping) {
getDelegateForUpdate();
updated.removeProtocolMapper(mapping);
}
@Override
public void updateProtocolMapper(ProtocolMapperModel mapping) {
getDelegateForUpdate();
updated.updateProtocolMapper(mapping);
}
@Override
public ProtocolMapperModel getProtocolMapperById(String id) {
for (ProtocolMapperModel mapping : cached.getProtocolMappers()) {
if (mapping.getId().equals(id)) return mapping;
}
return null;
}
@Override
public ProtocolMapperModel getProtocolMapperByName(String protocol, String name) {
for (ProtocolMapperModel mapping : cached.getProtocolMappers()) {
if (mapping.getProtocol().equals(protocol) && mapping.getName().equals(name)) return mapping;
}
return null;
}
@Override
public String getClientId() {
if (isUpdated()) return updated.getClientId();
return cached.getClientId();
}
@Override
public void setClientId(String clientId) {
getDelegateForUpdate();
updated.setClientId(clientId);
}
@Override
public String getName() {
if (isUpdated()) return updated.getName();
return cached.getName();
}
@Override
public void setName(String name) {
getDelegateForUpdate();
updated.setName(name);
}
@Override
public String getDescription() {
if (isUpdated()) return updated.getDescription();
return cached.getDescription();
}
@Override
public void setDescription(String description) {
getDelegateForUpdate();
updated.setDescription(description);
}
@Override
public boolean isSurrogateAuthRequired() {
if (isUpdated()) return updated.isSurrogateAuthRequired();
return cached.isSurrogateAuthRequired();
}
@Override
public void setSurrogateAuthRequired(boolean surrogateAuthRequired) {
getDelegateForUpdate();
updated.setSurrogateAuthRequired(surrogateAuthRequired);
}
@Override
public String getManagementUrl() {
if (isUpdated()) return updated.getManagementUrl();
return cached.getManagementUrl();
}
@Override
public void setManagementUrl(String url) {
getDelegateForUpdate();
updated.setManagementUrl(url);
}
@Override
public String getRootUrl() {
if (isUpdated()) return updated.getRootUrl();
return cached.getRootUrl();
}
@Override
public void setRootUrl(String url) {
getDelegateForUpdate();
updated.setRootUrl(url);
}
@Override
public String getBaseUrl() {
if (isUpdated()) return updated.getBaseUrl();
return cached.getBaseUrl();
}
@Override
public void setBaseUrl(String url) {
getDelegateForUpdate();
updated.setBaseUrl(url);
}
@Override
public List<String> getDefaultRoles() {
if (isUpdated()) return updated.getDefaultRoles();
return cached.getDefaultRoles();
}
@Override
public void addDefaultRole(String name) {
getDelegateForUpdate();
updated.addDefaultRole(name);
}
@Override
public void updateDefaultRoles(String... defaultRoles) {
getDelegateForUpdate();
updated.updateDefaultRoles(defaultRoles);
}
@Override
public void removeDefaultRoles(String... defaultRoles) {
getDelegateForUpdate();
updated.removeDefaultRoles(defaultRoles);
}
@Override
public boolean isBearerOnly() {
if (isUpdated()) return updated.isBearerOnly();
return cached.isBearerOnly();
}
@Override
public void setBearerOnly(boolean only) {
getDelegateForUpdate();
updated.setBearerOnly(only);
}
@Override
public boolean isConsentRequired() {
if (isUpdated()) return updated.isConsentRequired();
return cached.isConsentRequired();
}
@Override
public void setConsentRequired(boolean consentRequired) {
getDelegateForUpdate();
updated.setConsentRequired(consentRequired);
}
@Override
public boolean isStandardFlowEnabled() {
if (isUpdated()) return updated.isStandardFlowEnabled();
return cached.isStandardFlowEnabled();
}
@Override
public void setStandardFlowEnabled(boolean standardFlowEnabled) {
getDelegateForUpdate();
updated.setStandardFlowEnabled(standardFlowEnabled);
}
@Override
public boolean isImplicitFlowEnabled() {
if (isUpdated()) return updated.isImplicitFlowEnabled();
return cached.isImplicitFlowEnabled();
}
@Override
public void setImplicitFlowEnabled(boolean implicitFlowEnabled) {
getDelegateForUpdate();
updated.setImplicitFlowEnabled(implicitFlowEnabled);
}
@Override
public boolean isDirectAccessGrantsEnabled() {
if (isUpdated()) return updated.isDirectAccessGrantsEnabled();
return cached.isDirectAccessGrantsEnabled();
}
@Override
public void setDirectAccessGrantsEnabled(boolean directAccessGrantsEnabled) {
getDelegateForUpdate();
updated.setDirectAccessGrantsEnabled(directAccessGrantsEnabled);
}
@Override
public boolean isServiceAccountsEnabled() {
if (isUpdated()) return updated.isServiceAccountsEnabled();
return cached.isServiceAccountsEnabled();
}
@Override
public void setServiceAccountsEnabled(boolean serviceAccountsEnabled) {
getDelegateForUpdate();
updated.setServiceAccountsEnabled(serviceAccountsEnabled);
}
@Override
public RoleModel getRole(String name) {
for (RoleModel role : getRoles()) {
if (role.getName().equals(name)) return role;
}
return null;
}
@Override
public RoleModel addRole(String name) {
getDelegateForUpdate();
RoleModel role = updated.addRole(name);
cacheSession.registerRoleInvalidation(role.getId());
return role;
}
@Override
public RoleModel addRole(String id, String name) {
getDelegateForUpdate();
RoleModel role = updated.addRole(id, name);
cacheSession.registerRoleInvalidation(role.getId());
return role;
}
@Override
public boolean removeRole(RoleModel role) {
return cacheSession.removeRole(cachedRealm, role);
}
@Override
public Set<RoleModel> getRoles() {
return cacheSession.getClientRoles(cachedRealm, this);
}
@Override
public int getNodeReRegistrationTimeout() {
if (isUpdated()) return updated.getNodeReRegistrationTimeout();
return cached.getNodeReRegistrationTimeout();
}
@Override
public void setNodeReRegistrationTimeout(int timeout) {
getDelegateForUpdate();
updated.setNodeReRegistrationTimeout(timeout);
}
@Override
public Map<String, Integer> getRegisteredNodes() {
if (isUpdated()) return updated.getRegisteredNodes();
return cached.getRegisteredNodes();
}
@Override
public void registerNode(String nodeHost, int registrationTime) {
getDelegateForUpdate();
updated.registerNode(nodeHost, registrationTime);
}
@Override
public void unregisterNode(String nodeHost) {
getDelegateForUpdate();
updated.unregisterNode(nodeHost);
}
@Override
public boolean hasScope(RoleModel role) {
if (isUpdated()) return updated.hasScope(role);
if (cached.isFullScopeAllowed() || cached.getScope().contains(role.getId())) return true;
Set<RoleModel> roles = getScopeMappings();
for (RoleModel mapping : roles) {
if (mapping.hasRole(role)) return true;
}
roles = getRoles();
if (roles.contains(role)) return true;
for (RoleModel mapping : roles) {
if (mapping.hasRole(role)) return true;
}
return false;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || !(o instanceof ClientModel)) return false;
ClientModel that = (ClientModel) o;
return that.getId().equals(getId());
}
@Override
public int hashCode() {
return getId().hashCode();
}
}
| |
/* -----------------------------------------------------------------------------
* Displayer.java
* -----------------------------------------------------------------------------
*
* Producer : com.parse2.aparse.Parser 2.3
* Produced : Fri Apr 12 10:40:21 MUT 2013
*
* -----------------------------------------------------------------------------
*/
package com.litecoding.smali2java.parser;
import java.util.ArrayList;
public class Displayer implements Visitor
{
public Object visit(Rule_padding rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_optPadding rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_widePadding rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_optWidePadding rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_listSeparator rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_escSymbol rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_qualifier rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_className rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_commentSequence rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_comment rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_emptyLine rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_skipLine rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_fileName rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_accessMode rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_interfaceMode rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_type rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_boolValue rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_intDecValue rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_intHexValue rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_intValue rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_strValue rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_value rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_codeLabel rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_codeRegisterP rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_codeRegisterP64 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_codeRegisterV rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_codeRegisterV64 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_codeRegisterVDst rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_codeRegisterV64Dst rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_codeRegister rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_codeRegister64 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_codeRegisterRet rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_codeRegisterRet64 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_codeRegisterGroup rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirAnnotation rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirClass rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirEnd rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirEndAnnotation rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirEndField rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirEndMethod rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirEndSubannotation rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirImplements rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirField rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirLocal rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirMethod rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirRegisters rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirSubannotation rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirSuper rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_dirSource rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_smali rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_todoStubLine rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_classHeader rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_classClass rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_classSuper rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_classSource rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_classImplements rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_methodLocals rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_methodParam rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_methodPrologue rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_methodLine rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_methodRegisters rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_methodLocal rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_annotation rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_annotationVisibility rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_annotationBody rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_classField rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_classMethod rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_classConstructorName rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_classMethodProto rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_methodBody rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_label rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_smaliConstructorName rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_smaliClassRef rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_smaliFieldRef rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_smaliMethodRef rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_HTAB rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_CR rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_LF rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_SP rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_CRLF rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_QUOT rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_HASH rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_COMMA rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_DOT rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_COLON rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_SEMICOLON rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_EQ rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_UNDERSCORE rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_ALPHA rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_DIGIT rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_HEXDIG rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_VCHAR rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAny rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAddFloat rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAddFloat2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAddDouble rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAddDouble2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAddInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAddInt2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAddIntLit16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAddIntLit8 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAddLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAddLong2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAndInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAndInt2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAndIntLit16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAndIntLit8 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAndLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAndLong2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAget rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAgetBoolean rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAgetByte rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAgetChar rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAgetObject rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAgetShort rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAgetWide rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAput rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAputBoolean rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAputByte rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAputChar rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAputObject rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAputShort rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdAputWide rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdArrayLength rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdCheckCast rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdCmpgDouble rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdCmpgFloat rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdCmplDouble rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdCmplFloat rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdCmpLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdConst rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdConst16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdConst4 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdConstClass rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdConstHigh16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdConstString rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdConstStringJumbo rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdConstWide rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdConstWide16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdConstWide32 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdConstWideHigh16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdDivFloat rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdDivFloat2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdDivDouble rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdDivDouble2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdDivInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdDivInt2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdDivIntLit16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdDivIntLit8 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdDivLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdDivLong2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdDoubleToFloat rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdDoubleToInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdDoubleToLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdFillArrayData rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdFilledNewArray rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdFilledNewArrayRange rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdFloatToDouble rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdFloatToInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdFloatToLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdGoto rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdGoto16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdGoto32 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIfEq rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIfEqz rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIfGe rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIfGez rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIfGt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIfGtz rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIfLe rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIfLez rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIfLt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIfLtz rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIfNe rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIfNez rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIget rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIgetBoolean rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIgetByte rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIgetChar rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIgetObject rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIgetShort rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIgetWide rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdInstanceOf rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIntToByte rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIntToChar rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIntToDouble rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIntToFloat rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIntToLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIntToShort rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdInvokeDirect rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdInvokeDirectRange rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdInvokeInterface rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdInvokeInterfaceRange rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdInvokeStatic rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdInvokeStaticRange rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdInvokeSuper rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdInvokeSuperRange rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdInvokeVirtual rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdInvokeVirtualRange rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIput rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIputBoolean rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIputByte rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIputChar rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIputObject rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIputShort rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdIputWide rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdLongToDouble rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdLongToFloat rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdLongToInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMonitorEnter rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMonitorExit rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMove rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMove16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMoveException rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMoveFrom16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMoveObject rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMoveObject16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMoveObjectFrom16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMoveResult rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMoveResultObject rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMoveResultWide rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMoveWide rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMoveWide16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMoveWideFrom16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMulFloat rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMulFloat2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMulDouble rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMulDouble2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMulInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMulInt2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMulIntLit16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMulIntLit8 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMulLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdMulLong2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdNegDouble rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdNegFloat rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdNegInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdNegLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdNewArray rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdNewInstance rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdNop rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdNotInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdNotLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdOrInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdOrInt2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdOrIntLit16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdOrIntLit8 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdOrLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdOrLong2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdPackedSwitch rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdRemFloat rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdRemFloat2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdRemDouble rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdRemDouble2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdRemInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdRemInt2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdRemIntLit16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdRemIntLit8 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdRemLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdRemLong2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdReturn rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdReturnObject rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdReturnVoid rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdReturnWide rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdRsubInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdRsubIntLit8 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSget rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSgetBoolean rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSgetByte rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSgetChar rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSgetObject rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSgetShort rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSgetWide rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdShlInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdShlInt2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdShlIntLit8 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdShlLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdShlLong2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdShrInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdShrInt2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdShrIntLit8 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdShrLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdShrLong2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSparseSwitch rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSput rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSputBoolean rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSputByte rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSputChar rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSputObject rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSputShort rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSputWide rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSubFloat rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSubFloat2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSubDouble rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSubDouble2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSubInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSubInt2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSubLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdSubLong2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdThrow rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdUshrInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdUshrInt2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdUshrIntLit8 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdUshrLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdUshrLong2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdXorInt rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdXorInt2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdXorIntLit16 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdXorIntLit8 rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdXorLong rule)
{
return visitRules(rule.rules);
}
public Object visit(Rule_cmdXorLong2addr rule)
{
return visitRules(rule.rules);
}
public Object visit(Terminal_StringValue value)
{
System.out.print(value.spelling);
return null;
}
public Object visit(Terminal_NumericValue value)
{
System.out.print(value.spelling);
return null;
}
private Object visitRules(ArrayList<Rule> rules)
{
for (Rule rule : rules)
rule.accept(this);
return null;
}
}
/* -----------------------------------------------------------------------------
* eof
* -----------------------------------------------------------------------------
*/
| |
/*
Derby - Class com.pivotal.gemfirexd.internal.impl.sql.execute.TriggerInfo
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/*
* Changes for GemFireXD distributed data platform (some marked by "GemStone changes")
*
* Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.impl.sql.execute;
import com.pivotal.gemfirexd.internal.catalog.UUID;
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.services.io.ArrayUtil;
import com.pivotal.gemfirexd.internal.iapi.services.io.FormatIdUtil;
import com.pivotal.gemfirexd.internal.iapi.services.io.Formatable;
import com.pivotal.gemfirexd.internal.iapi.services.monitor.Monitor;
import com.pivotal.gemfirexd.internal.iapi.services.sanity.SanityManager;
import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.DataDictionary;
import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.GenericDescriptorList;
import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.TableDescriptor;
import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.TriggerDescriptor;
import com.pivotal.gemfirexd.internal.shared.common.StoredFormatIds;
import java.io.ObjectOutput;
import java.io.ObjectInput;
import java.io.IOException;
import java.util.Enumeration;
import java.util.Vector;
/**
* This is a simple class used to store the run time information
* about a foreign key. Used by DML to figure out what to
* check.
*
*/
public final class TriggerInfo implements Formatable
{
/********************************************************
**
** This class implements Formatable. That means that it
** can write itself to and from a formatted stream. If
** you add more fields to this class, make sure that you
** also write/read them with the writeExternal()/readExternal()
** methods.
**
** If, inbetween releases, you add more fields to this class,
** then you should bump the version number emitted by the getTypeFormatId()
** method. OR, since this is something that is used
** in stored prepared statements, it is ok to change it
** if you make sure that stored prepared statements are
** invalidated across releases.
**
********************************************************/
TriggerDescriptor[] triggerArray;
String[] columnNames;
int[] columnIds;
/**
* Niladic constructor for Formattable
*/
public TriggerInfo() {}
/**
* Constructor for TriggerInfo
*
* @param td the table upon which the trigger is declared
* @param changedCols the columns that are changed in the dml that is
* causing the trigger to fire
* @param triggers the list of trigger descriptors
*
*/
public TriggerInfo
(
TableDescriptor td,
int[] changedCols,
GenericDescriptorList triggers
)
{
this.columnIds = changedCols;
if (columnIds != null)
{
/*
** Find the names of all the columns that are
** being changd.
*/
columnNames = new String[columnIds.length];
for (int i = 0; i < columnIds.length; i++)
{
columnNames[i] = td.getColumnDescriptor(columnIds[i]).getColumnName();
}
}
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(triggers != null, "null trigger descriptor list");
SanityManager.ASSERT(triggers.size() > 0, "trigger descriptor list has no elements");
}
/*
** Copy the trigger descriptors into an array of the right type
*/
Enumeration descs = triggers.elements();
int size = triggers.size();
triggerArray = new TriggerDescriptor[size];
for (int i = 0; i < size; i++)
{
triggerArray[i] = (TriggerDescriptor) descs.nextElement();
}
}
/**
* Do we have a trigger or triggers that meet
* the criteria
*
* @param isBefore true for a before trigger, false
* for after trigger, null for either
* @param isRow true for a row trigger, false
* for statement trigger, null for either
*
* @return true if we have a trigger that meets the
* criteria
*/
boolean hasTrigger(boolean isBefore, boolean isRow)
{
if (triggerArray == null)
{
return false;
}
// GemStone changes BEGIN
// changed to use valueOf()
return hasTrigger(Boolean.valueOf(isBefore),
Boolean.valueOf(isRow));
/* (original code)
return hasTrigger(new Boolean(isBefore), new Boolean(isRow));
*/
// GemStone changes END
}
/**
* Do we have a trigger or triggers that meet
* the criteria
*
* @param isBefore true for a before trigger, false
* for after trigger, null for either
* @param isRow true for a row trigger, false
* for statement trigger, null for either
*
* @return true if we have a trigger that meets the
* criteria
*/
private boolean hasTrigger(Boolean isBefore, Boolean isRow)
{
if (triggerArray == null)
{
return false;
}
for (int i = 0; i < triggerArray.length; i++)
{
if (((isBefore == null) ||
(triggerArray[i].isBeforeTrigger() == isBefore.booleanValue())) &&
((isRow == null) ||
(triggerArray[i].isRowTrigger() == isRow.booleanValue())))
{
return true;
}
}
return false;
}
TriggerDescriptor[] getTriggerArray()
{
return triggerArray;
}
//////////////////////////////////////////////
//
// FORMATABLE
//
//////////////////////////////////////////////
/**
* Write this object out
*
* @param out write bytes here
*
* @exception IOException thrown on error
*/
public void writeExternal(ObjectOutput out) throws IOException
{
ArrayUtil.writeArray(out, triggerArray);
ArrayUtil.writeIntArray(out, columnIds);
ArrayUtil.writeArray(out, columnNames);
}
/**
* Read this object from a stream of stored objects.
*
* @param in read this.
*
* @exception IOException thrown on error
* @exception ClassNotFoundException thrown on error
*/
public void readExternal(ObjectInput in)
throws IOException, ClassNotFoundException
{
triggerArray = new TriggerDescriptor[ArrayUtil.readArrayLength(in)];
ArrayUtil.readArrayItems(in, triggerArray);
columnIds = ArrayUtil.readIntArray(in);
int len = ArrayUtil.readArrayLength(in);
if (len > 0)
{
columnNames = new String[len];
ArrayUtil.readArrayItems(in, columnNames);
}
}
/**
* Get the formatID which corresponds to this class.
*
* @return the formatID of this class
*/
public int getTypeFormatId() { return StoredFormatIds.TRIGGER_INFO_V01_ID; }
//////////////////////////////////////////////////////////////
//
// Misc
//
//////////////////////////////////////////////////////////////
public String toString()
{
if (SanityManager.DEBUG)
{
StringBuilder str = new StringBuilder();
str.append("\nColumn names modified:\t\t(");
for (int i = 0; i < columnNames.length; i++)
{
if (i > 0)
str.append(",");
str.append(columnNames[i]);
}
str.append(")");
str.append("\nColumn ids modified:\t\t(");
for (int i = 0; i < columnIds.length; i++)
{
if (i > 0)
str.append(",");
str.append(columnIds[i]);
}
str.append(")");
str.append("\nTriggers:");
for (int i = 0; i < triggerArray.length; i++)
{
str.append("\n"+triggerArray[i]);
}
return str.toString();
}
else
{
return "";
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2014, 2015 IBM Corporation
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*******************************************************************************/
package hulo.localization.sensor;
public class SensorUtils {
static int peakVoteThrethold = 3;
public static double computeAverage(double[] x){
int n= x.length;
double mean=0;
for(int i=0; i<n; i++){
mean+=x[i];
}
return mean/=n;
}
public static double computeAngleAverage(double[] angles){
int n = angles.length;
double x=0;
double y=0;
for(int i=0; i<n; i++){
double angle = angles[i];
x += Math.cos(angle);
y += Math.sin(angle);
}
x/=n;
y/=n;
double angleAve = Math.atan2(y, x);
return angleAve;
}
public static double[] subtract(double[] x, double c){
int n= x.length;
double[] y = new double[n];
for(int i=0; i<n; i++){
y[i] = x[i] -c;
}
return y;
}
public static double[] computeMovingAverage(double[] amps, int winsize){
int n = amps.length - winsize + 1;
if(n < 1){
winsize = amps.length;
n = 1;
}
double[] ma = new double[n];
for(int i=0; i<n ;i++){
double mean = 0;
for(int j=0; j<winsize; j++){
mean += amps[i+j];
}
mean /= winsize;
ma[i] = mean;
}
return ma;
}
public static double[] computeSlidingWindowDeviation(double[] amps, int winsize){
int n = amps.length - winsize + 1;
if(n < 1){
winsize = amps.length;
n = 1;
}
double[] devs = new double[n];
for(int i=0; i<n ;i++){
double mean = 0;
for(int j=0; j<winsize; j++){
mean += amps[i+j];
}
mean /= winsize;
double sqsum = 0;
for(int j=0; j<winsize; j++){
sqsum += (amps[i+j]-mean)*(amps[i+j]-mean);
}
double stdev = Math.sqrt(sqsum/winsize);
devs[i] = stdev;
}
return devs;
}
public static double[] thresholding(double[] deviations, double sigmaThresh){
int n = deviations.length;
double[] isWalkings = new double[n];
for(int i=0; i<n; i++){
isWalkings[i] = (deviations[i] > sigmaThresh) ? 1:0;
}
return isWalkings;
}
public static double[] derivative(double[] x){
int n= x.length - 1;
double[] dx = new double[n];
for(int i=0; i<n; i++){
dx[i] = x[i+1] - x[i];
}
return dx;
}
public static double[] accumulate(double[] x){
int n=x.length;
double[] cum = new double[n];
cum[0] = x[0];
for(int i=1; i<n ; i++){
cum[i] = cum[i-1]+x[i];
}
return cum;
}
public static double[] linearInterpolate(double[] x){
int n = x.length;
double[] intered = new double[n];
for(int i=0; i<n ;i++){
intered[i] = x[i];
}
double ref = x[0];
int idx_ref = 0;
double incre = 0;
for(int i=0; i<n ;i++){
double current = x[i];
if(current > ref){
double diff = current -ref;
int dc = i-idx_ref;
incre = diff/((double) dc);
for(int j=idx_ref; j<i ;j++){
intered[j] = intered[j] + incre* (double)(j-idx_ref);
}
ref = x[i];
idx_ref = i;
}
}
for(int i=idx_ref; i<n; i++) {
intered[i] = intered[i] + incre * (double)(i-idx_ref);
}
return intered;
}
public static double[] findLocalMaximum(double[] x, int winsize){
int n = x.length;
double[] isLocMax = new double[n];
int[] locMaxVotes = new int[n];
for(int i=0; i<n-winsize+1; i++){
int idxMax = 0;
double max = -Double.MAX_VALUE;
for(int j=0; j<winsize; j++){
int idx = i+j;
if(max<x[idx]){
idxMax = idx;
max=x[idx];
}
}
locMaxVotes[idxMax]++;
}
for(int i=0; i<n-winsize; i++){
isLocMax[i] = (locMaxVotes[i] >= winsize) ? 1:0;
}
int max = 0;
int maxi = 0;
for(int i=n-winsize; i<n; i++) {
if (max < locMaxVotes[i]) {
max = locMaxVotes[i];
maxi = i;
}
}
if (maxi < n-1 && max > peakVoteThrethold) {
isLocMax[maxi] = 1;
}
return isLocMax;
}
public static double sum(double[] steps) {
double t = 0;
for(double d:steps) {
t+=d;
}
return t;
}
}
| |
/**
* Vision - Created and owned by James T Saeed (EddieVanHalen98)
*
* RemoteListener.java
* Handles the controls for Remote
*
* File created on 22nd April 2016
*/
package com.evh98.vision.util;
import java.awt.AWTException;
import java.awt.Robot;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import com.esotericsoftware.kryonet.Connection;
import com.esotericsoftware.kryonet.Listener;
import com.evh98.vision.Vision;
public class RemoteListener extends Listener {
Robot robot;
public RemoteListener() {
try {
robot = new Robot();
} catch (AWTException e) {
e.printStackTrace();
}
}
public void received(Connection connection, Object object) {
if (object instanceof String) {
String request = (String) object;
/*
* Main
*/
if (request.equals("green")) {
robot.keyPress(KeyEvent.VK_ENTER);
robot.keyRelease(KeyEvent.VK_ENTER);
}
else if (request.equals("red")) {
robot.keyPress(KeyEvent.VK_ESCAPE);
robot.keyRelease(KeyEvent.VK_ESCAPE);
}
else if (request.equals("blue")) {
robot.keyPress(KeyEvent.VK_F);
robot.keyRelease(KeyEvent.VK_F);
}
else if (request.equals("yellow")) {
robot.keyPress(KeyEvent.VK_Q);
robot.keyRelease(KeyEvent.VK_Q);
}
else if (request.equals("up")) {
robot.keyPress(KeyEvent.VK_W);
robot.keyRelease(KeyEvent.VK_W);
}
else if (request.equals("down")) {
robot.keyPress(KeyEvent.VK_S);
robot.keyRelease(KeyEvent.VK_S);
}
else if (request.equals("right")) {
robot.keyPress(KeyEvent.VK_D);
robot.keyRelease(KeyEvent.VK_D);
}
else if (request.equals("left")) {
robot.keyPress(KeyEvent.VK_A);
robot.keyRelease(KeyEvent.VK_A);
}
else if (request.equals("search")) {
robot.keyPress(KeyEvent.VK_TAB);
robot.keyRelease(KeyEvent.VK_TAB);
}
else if (request.equals("a")) {
robot.keyPress(KeyEvent.VK_A);
robot.keyRelease(KeyEvent.VK_A);
}
else if (request.equals("b")) {
robot.keyPress(KeyEvent.VK_B);
robot.keyRelease(KeyEvent.VK_B);
}
else if (request.equals("c")) {
robot.keyPress(KeyEvent.VK_C);
robot.keyRelease(KeyEvent.VK_C);
}
else if (request.equals("d")) {
robot.keyPress(KeyEvent.VK_D);
robot.keyRelease(KeyEvent.VK_D);
}
else if (request.equals("e")) {
robot.keyPress(KeyEvent.VK_E);
robot.keyRelease(KeyEvent.VK_E);
}
else if (request.equals("f")) {
robot.keyPress(KeyEvent.VK_F);
robot.keyRelease(KeyEvent.VK_F);
}
else if (request.equals("g")) {
robot.keyPress(KeyEvent.VK_G);
robot.keyRelease(KeyEvent.VK_G);
}
else if (request.equals("h")) {
robot.keyPress(KeyEvent.VK_H);
robot.keyRelease(KeyEvent.VK_H);
}
else if (request.equals("i")) {
robot.keyPress(KeyEvent.VK_I);
robot.keyRelease(KeyEvent.VK_I);
}
else if (request.equals("j")) {
robot.keyPress(KeyEvent.VK_J);
robot.keyRelease(KeyEvent.VK_J);
}
else if (request.equals("k")) {
robot.keyPress(KeyEvent.VK_K);
robot.keyRelease(KeyEvent.VK_K);
}
else if (request.equals("l")) {
robot.keyPress(KeyEvent.VK_L);
robot.keyRelease(KeyEvent.VK_L);
}
else if (request.equals("m")) {
robot.keyPress(KeyEvent.VK_M);
robot.keyRelease(KeyEvent.VK_M);
}
else if (request.equals("n")) {
robot.keyPress(KeyEvent.VK_N);
robot.keyRelease(KeyEvent.VK_N);
}
else if (request.equals("o")) {
robot.keyPress(KeyEvent.VK_O);
robot.keyRelease(KeyEvent.VK_O);
}
else if (request.equals("p")) {
robot.keyPress(KeyEvent.VK_P);
robot.keyRelease(KeyEvent.VK_P);
}
else if (request.equals("q")) {
robot.keyPress(KeyEvent.VK_Q);
robot.keyRelease(KeyEvent.VK_Q);
}
else if (request.equals("r")) {
robot.keyPress(KeyEvent.VK_R);
robot.keyRelease(KeyEvent.VK_R);
}
else if (request.equals("s")) {
robot.keyPress(KeyEvent.VK_S);
robot.keyRelease(KeyEvent.VK_S);
}
else if (request.equals("t")) {
robot.keyPress(KeyEvent.VK_T);
robot.keyRelease(KeyEvent.VK_T);
}
else if (request.equals("u")) {
robot.keyPress(KeyEvent.VK_U);
robot.keyRelease(KeyEvent.VK_U);
}
else if (request.equals("v")) {
robot.keyPress(KeyEvent.VK_V);
robot.keyRelease(KeyEvent.VK_V);
}
else if (request.equals("w")) {
robot.keyPress(KeyEvent.VK_W);
robot.keyRelease(KeyEvent.VK_W);
}
else if (request.equals("x")) {
robot.keyPress(KeyEvent.VK_X);
robot.keyRelease(KeyEvent.VK_X);
}
else if (request.equals("y")) {
robot.keyPress(KeyEvent.VK_Y);
robot.keyRelease(KeyEvent.VK_Y);
}
else if (request.equals("z")) {
robot.keyPress(KeyEvent.VK_Z);
robot.keyRelease(KeyEvent.VK_Z);
}
else if (request.equals("0")) {
robot.keyPress(KeyEvent.VK_0);
robot.keyRelease(KeyEvent.VK_0);
}
else if (request.equals("1")) {
robot.keyPress(KeyEvent.VK_1);
robot.keyRelease(KeyEvent.VK_1);
}
else if (request.equals("2")) {
robot.keyPress(KeyEvent.VK_2);
robot.keyRelease(KeyEvent.VK_2);
}
else if (request.equals("3")) {
robot.keyPress(KeyEvent.VK_3);
robot.keyRelease(KeyEvent.VK_3);
}
else if (request.equals("4")) {
robot.keyPress(KeyEvent.VK_4);
robot.keyRelease(KeyEvent.VK_4);
}
else if (request.equals("5")) {
robot.keyPress(KeyEvent.VK_5);
robot.keyRelease(KeyEvent.VK_5);
}
else if (request.equals("6")) {
robot.keyPress(KeyEvent.VK_6);
robot.keyRelease(KeyEvent.VK_6);
}
else if (request.equals("7")) {
robot.keyPress(KeyEvent.VK_7);
robot.keyRelease(KeyEvent.VK_7);
}
else if (request.equals("8")) {
robot.keyPress(KeyEvent.VK_8);
robot.keyRelease(KeyEvent.VK_8);
}
else if (request.equals("9")) {
robot.keyPress(KeyEvent.VK_9);
robot.keyRelease(KeyEvent.VK_9);
}
else if (request.equals("space")) {
robot.keyPress(KeyEvent.VK_SPACE);
robot.keyRelease(KeyEvent.VK_SPACE);
}
else if (request.equals("backspace")) {
robot.keyPress(KeyEvent.VK_BACK_SPACE);
robot.keyRelease(KeyEvent.VK_BACK_SPACE);
}
else if (request.equals("enter")) {
robot.keyPress(KeyEvent.VK_ENTER);
robot.keyRelease(KeyEvent.VK_ENTER);
}
else if (request.equals("home")) {
Vision.setScreen(Vision.main_screen);
}
/*
* Main
*/
/*
* YouTube
*/
else if (request.equals("play")) {
robot.mousePress(InputEvent.BUTTON1_DOWN_MASK);
robot.mouseRelease(InputEvent.BUTTON1_DOWN_MASK);
}
else if (request.equals("rewind")) {
robot.keyPress(KeyEvent.VK_LEFT);
robot.keyRelease(KeyEvent.VK_LEFT);
}
else if (request.equals("forward")) {
robot.keyPress(KeyEvent.VK_RIGHT);
robot.keyRelease(KeyEvent.VK_RIGHT);
}
}
}
}
| |
package com.bitgirder.mingle.codec;
import com.bitgirder.validation.Inputs;
import com.bitgirder.validation.State;
import com.bitgirder.lang.ObjectReceiver;
import com.bitgirder.lang.Strings;
import com.bitgirder.lang.Lang;
import com.bitgirder.log.CodeLoggers;
import com.bitgirder.io.IoProcessor;
import com.bitgirder.io.IoUtils;
import com.bitgirder.io.IoTestFactory;
import com.bitgirder.io.IoTestSupport;
import com.bitgirder.io.FileWrapper;
import com.bitgirder.io.ProtocolProcessor;
import com.bitgirder.io.ProtocolProcessors;
import com.bitgirder.io.ProtocolProcessorTests;
import com.bitgirder.io.ProtocolCopies;
import com.bitgirder.io.ByteBufferAccumulator;
import com.bitgirder.process.AbstractProcess;
import com.bitgirder.process.AbstractVoidProcess;
import com.bitgirder.process.ProcessExit;
import com.bitgirder.mingle.model.ModelTestInstances;
import com.bitgirder.mingle.model.MingleModels;
import com.bitgirder.mingle.model.MingleIdentifier;
import com.bitgirder.mingle.model.MingleNull;
import com.bitgirder.mingle.model.MingleList;
import com.bitgirder.mingle.model.MingleStruct;
import com.bitgirder.test.Test;
import com.bitgirder.test.TestFailureExpector;
import com.bitgirder.test.InvocationFactory;
import com.bitgirder.test.LabeledTestObject;
import com.bitgirder.test.TestRuntime;
import java.util.List;
import java.util.Map;
import java.util.Iterator;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public
abstract
class AbstractMingleCodecTests
{
private final static Inputs inputs = new Inputs();
private final static State state = new State();
private static void code( Object... msg ) { CodeLoggers.code( msg ); }
private final TestRuntime rt;
protected
AbstractMingleCodecTests( TestRuntime rt )
{
this.rt = inputs.notNull( rt, "rt" );
}
protected final TestRuntime testRuntime() { return rt; }
protected
abstract
MingleCodec
getCodec()
throws Exception;
protected
final
MingleDecoder< MingleStruct >
structDecoder()
throws Exception
{
return getCodec().createDecoder( MingleStruct.class );
}
protected void debugEncoded( ByteBuffer ser ) throws Exception {}
protected
final
ByteBuffer
toByteBuffer( MingleStruct ms )
throws Exception
{
return MingleCodecs.toByteBuffer( getCodec(), ms );
}
protected
final
MingleStruct
fromByteBuffer( ByteBuffer bb )
throws Exception
{
return
MingleCodecs.fromByteBuffer( getCodec(), bb, MingleStruct.class );
}
protected
abstract
class CodecRoundtripTest< T extends CodecRoundtripTest< T > >
extends AbstractVoidProcess
implements TestFailureExpector
{
private MingleStruct expct;
private int encBufSz = 10;
private int decBufSz = 10;
private ByteOrder order = ByteOrder.LITTLE_ENDIAN;
private Class< ? extends Throwable > errCls;
private CharSequence errPat;
public Object getInvocationTarget() { return this; }
protected void addLabelToks( List< Object > toks ) {}
public
final
CharSequence
getLabel()
{
List< Object > toks = Lang.newList();
addLabelToks( toks );
toks.addAll(
Lang.< Object >asList(
"encBufSz", encBufSz,
"decBufSz", decBufSz,
"order", order
)
);
return Strings.crossJoin( "=", ",", toks );
}
public
Class< ? extends Throwable >
expectedFailureClass()
{
return errCls;
}
public CharSequence expectedFailurePattern() { return errPat; }
private T castThis() { return Lang.castUnchecked( this ); }
public
final
T
setStruct( MingleStruct expct )
{
this.expct = inputs.notNull( expct, "expct" );
return castThis();
}
public
final
T
setEncodeBufferSize( int encBufSz )
{
this.encBufSz = inputs.positiveI( encBufSz, "encBufSz" );
return castThis();
}
public
final
T
setByteOrder( ByteOrder order )
{
this.order = inputs.notNull( order, "order" );
return castThis();
}
public
final
T
setDecodeBufferSize( int decBufSz )
{
this.decBufSz = inputs.positiveI( decBufSz, "decBufSz" );
return castThis();
}
public
final
T
expectError( Class< ? extends Throwable > errCls,
CharSequence errPat )
{
this.errCls = inputs.notNull( errCls, "errCls" );
this.errPat = inputs.notNull( errPat, "errPat" );
return castThis();
}
private
ByteBuffer
createXferBuf( int sz )
{
return ByteBuffer.allocate( sz ).order( order );
}
private
void
beginAssert( MingleStruct actual )
throws Exception
{
ModelTestInstances.assertEqual( expct, actual );
exit();
}
private
ProtocolProcessor< ByteBuffer >
createSender( MingleCodec c )
{
return MingleCodecs.createSendProcessor( c, expct );
}
private
ProtocolProcessor< ByteBuffer >
getDecodeProcessor( MingleCodec codec )
throws Exception
{
return
MingleCodecs.createReceiveProcessor(
codec,
MingleStruct.class,
new ObjectReceiver< MingleStruct >() {
public void receive( MingleStruct actual )
throws Exception
{
beginAssert( actual );
}
}
);
}
private
void
encoded( ByteBuffer enc,
MingleCodec codec )
throws Exception
{
debugEncoded( enc );
ProtocolCopies.copyByteStream(
ProtocolProcessors.createBufferSend( enc ),
getDecodeProcessor( codec ),
createXferBuf( decBufSz ),
getActivityContext(),
new AbstractTask() { protected void runImpl() {} }
);
}
protected
final
void
startImpl()
throws Exception
{
state.notNull( expct, "expct" );
final MingleCodec codec = getCodec();
final ByteBufferAccumulator acc =
ByteBufferAccumulator.create( 512 );
ProtocolCopies.copyByteStream(
MingleCodecs.createSendProcessor( codec, expct ),
acc,
createXferBuf( encBufSz ),
getActivityContext(),
new AbstractTask() {
protected void runImpl() throws Exception
{
ByteBuffer enc =
ProtocolProcessorTests.toByteBuffer( acc );
encoded( enc, codec );
}
}
);
}
}
protected
final
class BasicRoundtripTest
extends CodecRoundtripTest< BasicRoundtripTest >
implements LabeledTestObject
{
private final CharSequence objName;
public
BasicRoundtripTest( CharSequence objName )
{
this.objName = inputs.notNull( objName, "objName" );
}
@Override
protected
void
addLabelToks( List< Object > toks )
{
toks.add( "objName" );
toks.add( objName );
}
}
private
void
addStandardBasicRoundtripTests( List< BasicRoundtripTest > l )
{
for ( Map.Entry< MingleIdentifier, MingleStruct > e :
MingleCodecTests.getStandardTestStructs().entrySet() )
{
l.add(
new BasicRoundtripTest( e.getKey().getExternalForm() ).
setStruct( e.getValue() )
);
}
}
protected
void
implAddBasicRoundtripTests( List< BasicRoundtripTest > l )
{}
@InvocationFactory
private
List< BasicRoundtripTest >
testBasicRoundtrip()
{
List< BasicRoundtripTest > res = Lang.newList();
addStandardBasicRoundtripTests( res );
implAddBasicRoundtripTests( res );
return res;
}
@Test
private
final
class CodecReceiverSquashesEmptyBufferTest
extends AbstractVoidProcess
{
private final MingleStruct ms =
MingleModels.structBuilder().
setType( "ns1@v1/S1" ).
build();
private
List< ByteBuffer >
getInputBufs( MingleCodec codec )
throws Exception
{
ByteBuffer bb = MingleCodecs.toByteBuffer( codec, ms );
List< ByteBuffer > res = Lang.newList();
res.add( (ByteBuffer) bb.slice().limit( 10 ) );
res.add( IoUtils.emptyByteBuffer() );
res.add( (ByteBuffer) bb.slice().position( 10 ) );
return res;
}
private
void
feed( final Iterator< ByteBuffer > it,
final ProtocolProcessor< ByteBuffer > proc )
throws Exception
{
ProtocolProcessors.process(
proc,
it.next(),
! it.hasNext(), // important that this follows it.next()
getActivityContext(),
new ObjectReceiver< Boolean >() {
public void receive( Boolean b ) throws Exception {
if ( ! b ) feed( it, proc );
}
}
);
}
private
void
checkDecode( MingleStruct ms2 )
throws Exception
{
ModelTestInstances.assertEqual( ms, ms2 );
exit();
}
protected
void
startImpl()
throws Exception
{
MingleCodec codec = getCodec();
feed(
getInputBufs( codec ).iterator(),
MingleCodecs.createReceiveProcessor(
codec.createDecoder( MingleStruct.class ),
new ObjectReceiver< MingleStruct >() {
public void receive( MingleStruct ms2 )
throws Exception
{
checkDecode( ms2 );
}
}
)
);
}
}
@Test
private
void
testFinalEmptyBuffer()
throws Exception
{
MingleCodec codec = getCodec();
MingleStruct ms = ModelTestInstances.TEST_STRUCT1_INST1;
ByteBuffer bb = MingleCodecs.toByteBuffer( codec, ms );
MingleDecoder< MingleStruct > dec =
codec.createDecoder( MingleStruct.class );
if ( ! dec.readFrom( bb, false ) )
{
bb.position( bb.limit() ); // make it empty
dec.readFrom( bb, true );
}
ModelTestInstances.assertEqual( ms, dec.getResult() );
}
@Test
private
final
class MingleStructFileRoundtripTest
extends AbstractVoidProcess
{
private final MingleStruct mgVal =
ModelTestInstances.TEST_STRUCT1_INST1;
private
MingleStructFileRoundtripTest()
{
super( IoTestSupport.create( rt ) );
}
private
void
readObject( MingleCodec codec,
FileWrapper fw )
{
MingleCodecs.fromFile(
codec,
fw,
MingleStruct.class,
getActivityContext(),
behavior( IoTestSupport.class ).ioProcessor(),
new ObjectReceiver< MingleStruct >() {
public void receive( MingleStruct ms )
throws Exception
{
ModelTestInstances.assertEqual( mgVal, ms );
exit();
}
}
);
}
protected
void
startImpl()
throws Exception
{
final MingleCodec codec = getCodec();
final FileWrapper fw = IoTestFactory.createTempFile();
MingleCodecs.toFile(
codec,
mgVal,
fw,
getActivityContext(),
behavior( IoTestSupport.class ).ioProcessor(),
new AbstractTask() {
protected void runImpl() { readObject( codec, fw ); }
}
);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.protocol;
import static org.apache.kafka.common.protocol.types.Type.BYTES;
import static org.apache.kafka.common.protocol.types.Type.INT16;
import static org.apache.kafka.common.protocol.types.Type.INT32;
import static org.apache.kafka.common.protocol.types.Type.INT64;
import static org.apache.kafka.common.protocol.types.Type.STRING;
import org.apache.kafka.common.protocol.types.ArrayOf;
import org.apache.kafka.common.protocol.types.Field;
import org.apache.kafka.common.protocol.types.Schema;
public class Protocol {
public static final Schema REQUEST_HEADER = new Schema(new Field("api_key", INT16, "The id of the request type."),
new Field("api_version", INT16, "The version of the API."),
new Field("correlation_id",
INT32,
"A user-supplied integer value that will be passed back with the response"),
new Field("client_id",
STRING,
"A user specified identifier for the client making the request."));
public static final Schema RESPONSE_HEADER = new Schema(new Field("correlation_id",
INT32,
"The user-supplied value passed in with the request"));
/* Metadata api */
public static final Schema METADATA_REQUEST_V0 = new Schema(new Field("topics",
new ArrayOf(STRING),
"An array of topics to fetch metadata for. If no topics are specified fetch metadtata for all topics."));
public static final Schema BROKER = new Schema(new Field("node_id", INT32, "The broker id."),
new Field("host", STRING, "The hostname of the broker."),
new Field("port",
INT32,
"The port on which the broker accepts requests."));
public static final Schema PARTITION_METADATA_V0 = new Schema(new Field("partition_error_code",
INT16,
"The error code for the partition, if any."),
new Field("partition_id",
INT32,
"The id of the partition."),
new Field("leader",
INT32,
"The id of the broker acting as leader for this partition."),
new Field("replicas",
new ArrayOf(INT32),
"The set of all nodes that host this partition."),
new Field("isr",
new ArrayOf(INT32),
"The set of nodes that are in sync with the leader for this partition."));
public static final Schema TOPIC_METADATA_V0 = new Schema(new Field("topic_error_code",
INT16,
"The error code for the given topic."),
new Field("topic", STRING, "The name of the topic"),
new Field("partition_metadata",
new ArrayOf(PARTITION_METADATA_V0),
"Metadata for each partition of the topic."));
public static final Schema METADATA_RESPONSE_V0 = new Schema(new Field("brokers",
new ArrayOf(BROKER),
"Host and port information for all brokers."),
new Field("topic_metadata",
new ArrayOf(TOPIC_METADATA_V0)));
public static final Schema[] METADATA_REQUEST = new Schema[] {METADATA_REQUEST_V0};
public static final Schema[] METADATA_RESPONSE = new Schema[] {METADATA_RESPONSE_V0};
/* Produce api */
public static final Schema TOPIC_PRODUCE_DATA_V0 = new Schema(new Field("topic", STRING),
new Field("data",
new ArrayOf(new Schema(new Field("partition",
INT32),
new Field("record_set",
BYTES)))));
public static final Schema PRODUCE_REQUEST_V0 = new Schema(new Field("acks",
INT16,
"The number of nodes that should replicate the produce before returning. -1 indicates the full ISR."),
new Field("timeout",
INT32,
"The time to await a response in ms."),
new Field("topic_data",
new ArrayOf(TOPIC_PRODUCE_DATA_V0)));
public static final Schema PRODUCE_RESPONSE_V0 = new Schema(new Field("responses",
new ArrayOf(new Schema(new Field("topic",
STRING),
new Field("partition_responses",
new ArrayOf(new Schema(new Field("partition",
INT32),
new Field("error_code",
INT16),
new Field("base_offset",
INT64))))))));
public static final Schema[] PRODUCE_REQUEST = new Schema[] {PRODUCE_REQUEST_V0};
public static final Schema[] PRODUCE_RESPONSE = new Schema[] {PRODUCE_RESPONSE_V0};
/* Offset commit api */
public static final Schema OFFSET_COMMIT_REQUEST_PARTITION_V0 = new Schema(new Field("partition",
INT32,
"Topic partition id."),
new Field("offset",
INT64,
"Message offset to be committed."),
new Field("timestamp",
INT64,
"Timestamp of the commit"),
new Field("metadata",
STRING,
"Any associated metadata the client wants to keep."));
public static final Schema OFFSET_COMMIT_REQUEST_TOPIC_V0 = new Schema(new Field("topic",
STRING,
"Topic to commit."),
new Field("partitions",
new ArrayOf(OFFSET_COMMIT_REQUEST_PARTITION_V0),
"Partitions to commit offsets."));
public static final Schema OFFSET_COMMIT_REQUEST_V0 = new Schema(new Field("group_id",
STRING,
"The consumer group id."),
new Field("topics",
new ArrayOf(OFFSET_COMMIT_REQUEST_TOPIC_V0),
"Topics to commit offsets."));
public static final Schema OFFSET_COMMIT_REQUEST_V1 = new Schema(new Field("group_id",
STRING,
"The consumer group id."),
new Field("group_generation_id",
INT32,
"The generation of the consumer group."),
new Field("consumer_id",
STRING,
"The consumer id assigned by the group coordinator."),
new Field("topics",
new ArrayOf(OFFSET_COMMIT_REQUEST_TOPIC_V0),
"Topics to commit offsets."));
public static final Schema OFFSET_COMMIT_RESPONSE_PARTITION_V0 = new Schema(new Field("partition",
INT32,
"Topic partition id."),
new Field("error_code", INT16));
public static final Schema OFFSET_COMMIT_RESPONSE_TOPIC_V0 = new Schema(new Field("topic", STRING),
new Field("partition_responses",
new ArrayOf(OFFSET_COMMIT_RESPONSE_PARTITION_V0)));
public static final Schema OFFSET_COMMIT_RESPONSE_V0 = new Schema(new Field("responses",
new ArrayOf(OFFSET_COMMIT_RESPONSE_TOPIC_V0)));
public static final Schema[] OFFSET_COMMIT_REQUEST = new Schema[] {OFFSET_COMMIT_REQUEST_V0, OFFSET_COMMIT_REQUEST_V1};
/* The response types for both V0 and V1 of OFFSET_COMMIT_REQUEST are the same. */
public static final Schema[] OFFSET_COMMIT_RESPONSE = new Schema[] {OFFSET_COMMIT_RESPONSE_V0, OFFSET_COMMIT_RESPONSE_V0};
/* Offset fetch api */
public static final Schema OFFSET_FETCH_REQUEST_PARTITION_V0 = new Schema(new Field("partition",
INT32,
"Topic partition id."));
public static final Schema OFFSET_FETCH_REQUEST_TOPIC_V0 = new Schema(new Field("topic",
STRING,
"Topic to fetch offset."),
new Field("partitions",
new ArrayOf(OFFSET_FETCH_REQUEST_PARTITION_V0),
"Partitions to fetch offsets."));
public static final Schema OFFSET_FETCH_REQUEST_V0 = new Schema(new Field("group_id",
STRING,
"The consumer group id."),
new Field("topics",
new ArrayOf(OFFSET_FETCH_REQUEST_TOPIC_V0),
"Topics to fetch offsets."));
public static final Schema OFFSET_FETCH_RESPONSE_PARTITION_V0 = new Schema(new Field("partition",
INT32,
"Topic partition id."),
new Field("offset",
INT64,
"Last committed message offset."),
new Field("metadata",
STRING,
"Any associated metadata the client wants to keep."),
new Field("error_code", INT16));
public static final Schema OFFSET_FETCH_RESPONSE_TOPIC_V0 = new Schema(new Field("topic", STRING),
new Field("partition_responses",
new ArrayOf(OFFSET_FETCH_RESPONSE_PARTITION_V0)));
public static final Schema OFFSET_FETCH_RESPONSE_V0 = new Schema(new Field("responses",
new ArrayOf(OFFSET_FETCH_RESPONSE_TOPIC_V0)));
public static final Schema[] OFFSET_FETCH_REQUEST = new Schema[] {OFFSET_FETCH_REQUEST_V0};
public static final Schema[] OFFSET_FETCH_RESPONSE = new Schema[] {OFFSET_FETCH_RESPONSE_V0};
/* List offset api */
public static final Schema LIST_OFFSET_REQUEST_PARTITION_V0 = new Schema(new Field("partition",
INT32,
"Topic partition id."),
new Field("timestamp", INT64, "Timestamp."),
new Field("max_num_offsets",
INT32,
"Maximum offsets to return."));
public static final Schema LIST_OFFSET_REQUEST_TOPIC_V0 = new Schema(new Field("topic",
STRING,
"Topic to list offset."),
new Field("partitions",
new ArrayOf(LIST_OFFSET_REQUEST_PARTITION_V0),
"Partitions to list offset."));
public static final Schema LIST_OFFSET_REQUEST_V0 = new Schema(new Field("replica_id",
INT32,
"Broker id of the follower. For normal consumers, use -1."),
new Field("topics",
new ArrayOf(LIST_OFFSET_REQUEST_TOPIC_V0),
"Topics to list offsets."));
public static final Schema LIST_OFFSET_RESPONSE_PARTITION_V0 = new Schema(new Field("partition",
INT32,
"Topic partition id."),
new Field("error_code", INT16),
new Field("offsets",
new ArrayOf(INT64),
"A list of offsets."));
public static final Schema LIST_OFFSET_RESPONSE_TOPIC_V0 = new Schema(new Field("topic", STRING),
new Field("partition_responses",
new ArrayOf(LIST_OFFSET_RESPONSE_PARTITION_V0)));
public static final Schema LIST_OFFSET_RESPONSE_V0 = new Schema(new Field("responses",
new ArrayOf(LIST_OFFSET_RESPONSE_TOPIC_V0)));
public static final Schema[] LIST_OFFSET_REQUEST = new Schema[] {LIST_OFFSET_REQUEST_V0};
public static final Schema[] LIST_OFFSET_RESPONSE = new Schema[] {LIST_OFFSET_RESPONSE_V0};
/* Fetch api */
public static final Schema FETCH_REQUEST_PARTITION_V0 = new Schema(new Field("partition",
INT32,
"Topic partition id."),
new Field("fetch_offset",
INT64,
"Message offset."),
new Field("max_bytes",
INT32,
"Maximum bytes to fetch."));
public static final Schema FETCH_REQUEST_TOPIC_V0 = new Schema(new Field("topic", STRING, "Topic to fetch."),
new Field("partitions",
new ArrayOf(FETCH_REQUEST_PARTITION_V0),
"Partitions to fetch."));
public static final Schema FETCH_REQUEST_V0 = new Schema(new Field("replica_id",
INT32,
"Broker id of the follower. For normal consumers, use -1."),
new Field("max_wait_time",
INT32,
"Maximum time in ms to wait for the response."),
new Field("min_bytes",
INT32,
"Minimum bytes to accumulate in the response."),
new Field("topics",
new ArrayOf(FETCH_REQUEST_TOPIC_V0),
"Topics to fetch."));
public static final Schema FETCH_RESPONSE_PARTITION_V0 = new Schema(new Field("partition",
INT32,
"Topic partition id."),
new Field("error_code", INT16),
new Field("high_watermark",
INT64,
"Last committed offset."),
new Field("record_set", BYTES));
public static final Schema FETCH_RESPONSE_TOPIC_V0 = new Schema(new Field("topic", STRING),
new Field("partition_responses",
new ArrayOf(FETCH_RESPONSE_PARTITION_V0)));
public static final Schema FETCH_RESPONSE_V0 = new Schema(new Field("responses",
new ArrayOf(FETCH_RESPONSE_TOPIC_V0)));
public static final Schema[] FETCH_REQUEST = new Schema[] {FETCH_REQUEST_V0};
public static final Schema[] FETCH_RESPONSE = new Schema[] {FETCH_RESPONSE_V0};
/* Consumer metadata api */
public static final Schema CONSUMER_METADATA_REQUEST_V0 = new Schema(new Field("group_id",
STRING,
"The consumer group id."));
public static final Schema CONSUMER_METADATA_RESPONSE_V0 = new Schema(new Field("error_code", INT16),
new Field("coordinator",
BROKER,
"Host and port information for the coordinator for a consumer group."));
public static final Schema[] CONSUMER_METADATA_REQUEST = new Schema[] {CONSUMER_METADATA_REQUEST_V0};
public static final Schema[] CONSUMER_METADATA_RESPONSE = new Schema[] {CONSUMER_METADATA_RESPONSE_V0};
/* Join group api */
public static final Schema JOIN_GROUP_REQUEST_V0 = new Schema(new Field("group_id",
STRING,
"The consumer group id."),
new Field("session_timeout",
INT32,
"The coordinator considers the consumer dead if it receives no heartbeat after this timeout in ms."),
new Field("topics",
new ArrayOf(STRING),
"An array of topics to subscribe to."),
new Field("consumer_id",
STRING,
"The assigned consumer id or an empty string for a new consumer."),
new Field("partition_assignment_strategy",
STRING,
"The strategy for the coordinator to assign partitions."));
public static final Schema JOIN_GROUP_RESPONSE_TOPIC_V0 = new Schema(new Field("topic", STRING),
new Field("partitions", new ArrayOf(INT32)));
public static final Schema JOIN_GROUP_RESPONSE_V0 = new Schema(new Field("error_code", INT16),
new Field("group_generation_id",
INT32,
"The generation of the consumer group."),
new Field("consumer_id",
STRING,
"The consumer id assigned by the group coordinator."),
new Field("assigned_partitions",
new ArrayOf(JOIN_GROUP_RESPONSE_TOPIC_V0)));
public static final Schema[] JOIN_GROUP_REQUEST = new Schema[] {JOIN_GROUP_REQUEST_V0};
public static final Schema[] JOIN_GROUP_RESPONSE = new Schema[] {JOIN_GROUP_RESPONSE_V0};
/* Heartbeat api */
public static final Schema HEARTBEAT_REQUEST_V0 = new Schema(new Field("group_id", STRING, "The consumer group id."),
new Field("group_generation_id",
INT32,
"The generation of the consumer group."),
new Field("consumer_id",
STRING,
"The consumer id assigned by the group coordinator."));
public static final Schema HEARTBEAT_RESPONSE_V0 = new Schema(new Field("error_code", INT16));
public static final Schema[] HEARTBEAT_REQUEST = new Schema[] {HEARTBEAT_REQUEST_V0};
public static final Schema[] HEARTBEAT_RESPONSE = new Schema[] {HEARTBEAT_RESPONSE_V0};
/* an array of all requests and responses with all schema versions */
public static final Schema[][] REQUESTS = new Schema[ApiKeys.MAX_API_KEY + 1][];
public static final Schema[][] RESPONSES = new Schema[ApiKeys.MAX_API_KEY + 1][];
/* the latest version of each api */
public static final short[] CURR_VERSION = new short[ApiKeys.MAX_API_KEY + 1];
static {
REQUESTS[ApiKeys.PRODUCE.id] = PRODUCE_REQUEST;
REQUESTS[ApiKeys.FETCH.id] = FETCH_REQUEST;
REQUESTS[ApiKeys.LIST_OFFSETS.id] = LIST_OFFSET_REQUEST;
REQUESTS[ApiKeys.METADATA.id] = METADATA_REQUEST;
REQUESTS[ApiKeys.LEADER_AND_ISR.id] = new Schema[] {};
REQUESTS[ApiKeys.STOP_REPLICA.id] = new Schema[] {};
REQUESTS[ApiKeys.OFFSET_COMMIT.id] = OFFSET_COMMIT_REQUEST;
REQUESTS[ApiKeys.OFFSET_FETCH.id] = OFFSET_FETCH_REQUEST;
REQUESTS[ApiKeys.CONSUMER_METADATA.id] = CONSUMER_METADATA_REQUEST;
REQUESTS[ApiKeys.JOIN_GROUP.id] = JOIN_GROUP_REQUEST;
REQUESTS[ApiKeys.HEARTBEAT.id] = HEARTBEAT_REQUEST;
RESPONSES[ApiKeys.PRODUCE.id] = PRODUCE_RESPONSE;
RESPONSES[ApiKeys.FETCH.id] = FETCH_RESPONSE;
RESPONSES[ApiKeys.LIST_OFFSETS.id] = LIST_OFFSET_RESPONSE;
RESPONSES[ApiKeys.METADATA.id] = METADATA_RESPONSE;
RESPONSES[ApiKeys.LEADER_AND_ISR.id] = new Schema[] {};
RESPONSES[ApiKeys.STOP_REPLICA.id] = new Schema[] {};
RESPONSES[ApiKeys.OFFSET_COMMIT.id] = OFFSET_COMMIT_RESPONSE;
RESPONSES[ApiKeys.OFFSET_FETCH.id] = OFFSET_FETCH_RESPONSE;
RESPONSES[ApiKeys.CONSUMER_METADATA.id] = CONSUMER_METADATA_RESPONSE;
RESPONSES[ApiKeys.JOIN_GROUP.id] = JOIN_GROUP_RESPONSE;
RESPONSES[ApiKeys.HEARTBEAT.id] = HEARTBEAT_RESPONSE;
/* set the maximum version of each api */
for (ApiKeys api : ApiKeys.values())
CURR_VERSION[api.id] = (short) (REQUESTS[api.id].length - 1);
/* sanity check that we have the same number of request and response versions for each api */
for (ApiKeys api : ApiKeys.values())
if (REQUESTS[api.id].length != RESPONSES[api.id].length)
throw new IllegalStateException(REQUESTS[api.id].length + " request versions for api " + api.name
+ " but " + RESPONSES[api.id].length + " response versions.");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.special;
import org.apache.commons.math3.TestUtils;
import org.apache.commons.math3.exception.NumberIsTooLargeException;
import org.apache.commons.math3.exception.NumberIsTooSmallException;
import org.apache.commons.math3.util.FastMath;
import org.junit.Assert;
import org.junit.Test;
/**
* @version $Id: GammaTest.java 1414531 2012-11-28 05:39:39Z celestin $
*/
public class GammaTest {
private void testRegularizedGamma(double expected, double a, double x) {
double actualP = Gamma.regularizedGammaP(a, x);
double actualQ = Gamma.regularizedGammaQ(a, x);
TestUtils.assertEquals(expected, actualP, 10e-15);
TestUtils.assertEquals(actualP, 1.0 - actualQ, 10e-15);
}
private void testLogGamma(double expected, double x) {
double actual = Gamma.logGamma(x);
TestUtils.assertEquals(expected, actual, 10e-15);
}
@Test
public void testRegularizedGammaNanPositive() {
testRegularizedGamma(Double.NaN, Double.NaN, 1.0);
}
@Test
public void testRegularizedGammaPositiveNan() {
testRegularizedGamma(Double.NaN, 1.0, Double.NaN);
}
@Test
public void testRegularizedGammaNegativePositive() {
testRegularizedGamma(Double.NaN, -1.5, 1.0);
}
@Test
public void testRegularizedGammaPositiveNegative() {
testRegularizedGamma(Double.NaN, 1.0, -1.0);
}
@Test
public void testRegularizedGammaZeroPositive() {
testRegularizedGamma(Double.NaN, 0.0, 1.0);
}
@Test
public void testRegularizedGammaPositiveZero() {
testRegularizedGamma(0.0, 1.0, 0.0);
}
@Test
public void testRegularizedGammaPositivePositive() {
testRegularizedGamma(0.632120558828558, 1.0, 1.0);
}
@Test
public void testLogGammaNan() {
testLogGamma(Double.NaN, Double.NaN);
}
@Test
public void testLogGammaNegative() {
testLogGamma(Double.NaN, -1.0);
}
@Test
public void testLogGammaZero() {
testLogGamma(Double.NaN, 0.0);
}
@Test
public void testLogGammaPositive() {
testLogGamma(0.6931471805599457, 3.0);
}
@Test
public void testDigammaLargeArgs() {
double eps = 1e-8;
Assert.assertEquals(4.6001618527380874002, Gamma.digamma(100), eps);
Assert.assertEquals(3.9019896734278921970, Gamma.digamma(50), eps);
Assert.assertEquals(2.9705239922421490509, Gamma.digamma(20), eps);
Assert.assertEquals(2.9958363947076465821, Gamma.digamma(20.5), eps);
Assert.assertEquals(2.2622143570941481605, Gamma.digamma(10.1), eps);
Assert.assertEquals(2.1168588189004379233, Gamma.digamma(8.8), eps);
Assert.assertEquals(1.8727843350984671394, Gamma.digamma(7), eps);
Assert.assertEquals(0.42278433509846713939, Gamma.digamma(2), eps);
Assert.assertEquals(-100.56088545786867450, Gamma.digamma(0.01), eps);
Assert.assertEquals(-4.0390398965921882955, Gamma.digamma(-0.8), eps);
Assert.assertEquals(4.2003210041401844726, Gamma.digamma(-6.3), eps);
}
@Test
public void testDigammaSmallArgs() {
// values for negative powers of 10 from 1 to 30 as computed by webMathematica with 20 digits
// see functions.wolfram.com
double[] expected = {-10.423754940411076795, -100.56088545786867450, -1000.5755719318103005,
-10000.577051183514335, -100000.57719921568107, -1.0000005772140199687e6, -1.0000000577215500408e7,
-1.0000000057721564845e8, -1.0000000005772156633e9, -1.0000000000577215665e10, -1.0000000000057721566e11,
-1.0000000000005772157e12, -1.0000000000000577216e13, -1.0000000000000057722e14, -1.0000000000000005772e15, -1e+16,
-1e+17, -1e+18, -1e+19, -1e+20, -1e+21, -1e+22, -1e+23, -1e+24, -1e+25, -1e+26,
-1e+27, -1e+28, -1e+29, -1e+30};
for (double n = 1; n < 30; n++) {
checkRelativeError(String.format("Test %.0f: ", n), expected[(int) (n - 1)], Gamma.digamma(FastMath.pow(10.0, -n)), 1e-8);
}
}
@Test
public void testTrigamma() {
double eps = 1e-8;
// computed using webMathematica. For example, to compute trigamma($i) = Polygamma(1, $i), use
//
// http://functions.wolfram.com/webMathematica/Evaluated.jsp?name=PolyGamma2&plottype=0&vars={%221%22,%22$i%22}&digits=20
double[] data = {
1e-4, 1.0000000164469368793e8,
1e-3, 1.0000016425331958690e6,
1e-2, 10001.621213528313220,
1e-1, 101.43329915079275882,
1, 1.6449340668482264365,
2, 0.64493406684822643647,
3, 0.39493406684822643647,
4, 0.28382295573711532536,
5, 0.22132295573711532536,
10, 0.10516633568168574612,
20, 0.051270822935203119832,
50, 0.020201333226697125806,
100, 0.010050166663333571395
};
for (int i = data.length - 2; i >= 0; i -= 2) {
Assert.assertEquals(String.format("trigamma %.0f", data[i]), data[i + 1], Gamma.trigamma(data[i]), eps);
}
}
/**
* Reference data for the {@link Gamma#logGamma(double)} function. This data
* was generated with the following <a
* href="http://maxima.sourceforge.net/">Maxima</a> script.
*
* <pre>
* kill(all);
*
* fpprec : 64;
* gamln(x) := log(gamma(x));
* x : append(makelist(bfloat(i / 8), i, 1, 80),
* [0.8b0, 1b2, 1b3, 1b4, 1b5, 1b6, 1b7, 1b8, 1b9, 1b10]);
*
* for i : 1 while i <= length(x) do
* print("{", float(x[i]), ",", float(gamln(x[i])), "},");
* </pre>
*/
private static final double[][] LOG_GAMMA_REF = {
{ 0.125 , 2.019418357553796 },
{ 0.25 , 1.288022524698077 },
{ 0.375 , .8630739822706475 },
{ 0.5 , .5723649429247001 },
{ 0.625 , .3608294954889402 },
{ 0.75 , .2032809514312954 },
{ 0.875 , .08585870722533433 },
{ 0.890625 , .07353860936979656 },
{ 0.90625 , .06169536624059108 },
{ 0.921875 , .05031670080005688 },
{ 0.9375 , 0.0393909017345823 },
{ 0.953125 , .02890678734595923 },
{ 0.96875 , .01885367233441289 },
{ 0.984375 , .009221337197578781 },
{ 1.0 , 0.0 },
{ 1.015625 , - 0.00881970970573307 },
{ 1.03125 , - .01724677500176807 },
{ 1.046875 , - .02528981394675729 },
{ 1.0625 , - .03295710029357782 },
{ 1.078125 , - .04025658272400143 },
{ 1.09375 , - .04719590272716985 },
{ 1.109375 , - .05378241123619192 },
{ 1.125 , - .06002318412603958 },
{ 1.25 , - .09827183642181316 },
{ 1.375 , - .1177552707410788 },
{ 1.5 , - .1207822376352452 },
{ 1.625 , - .1091741337567954 },
{ 1.75 , - .08440112102048555 },
{ 1.875 , - 0.0476726853991883 },
{ 1.890625 , - .04229320615532515 },
{ 1.90625 , - .03674470657266143 },
{ 1.921875 , - .03102893865389552 },
{ 1.9375 , - .02514761940298887 },
{ 1.953125 , - .01910243184040138 },
{ 1.96875 , - .01289502598016741 },
{ 1.984375 , - .006527019770560387 },
{ 2.0 , 0.0 },
{ 2.015625 , .006684476830232185 },
{ 2.03125 , .01352488366498562 },
{ 2.046875 , .02051972208453692 },
{ 2.0625 , .02766752152285702 },
{ 2.078125 , 0.0349668385135861 },
{ 2.09375 , .04241625596251728 },
{ 2.109375 , .05001438244545164 },
{ 2.125 , .05775985153034387 },
{ 2.25 , .1248717148923966 },
{ 2.375 , .2006984603774558 },
{ 2.5 , .2846828704729192 },
{ 2.625 , .3763336820249054 },
{ 2.75 , .4752146669149371 },
{ 2.875 , .5809359740231859 },
{ 2.890625 , .5946142560817441 },
{ 2.90625 , .6083932548009232 },
{ 2.921875 , .6222723333588501 },
{ 2.9375 , .6362508628423761 },
{ 2.953125 , .6503282221022278 },
{ 2.96875 , .6645037976116387 },
{ 2.984375 , 0.678776983328359 },
{ 3.0 , .6931471805599453 },
{ 3.015625 , .7076137978322324 },
{ 3.03125 , .7221762507608962 },
{ 3.046875 , .7368339619260166 },
{ 3.0625 , 0.751586360749556 },
{ 3.078125 , .7664328833756681 },
{ 3.09375 , .7813729725537568 },
{ 3.109375 , .7964060775242092 },
{ 3.125 , 0.811531653906724 },
{ 3.25 , .9358019311087253 },
{ 3.375 , 1.06569589786406 },
{ 3.5 , 1.200973602347074 },
{ 3.625 , 1.341414578068493 },
{ 3.75 , 1.486815578593417 },
{ 3.875 , 1.6369886482725 },
{ 4.0 , 1.791759469228055 },
{ 4.125 , 1.950965937095089 },
{ 4.25 , 2.114456927450371 },
{ 4.375 , 2.282091222188554 },
{ 4.5 , 2.453736570842442 },
{ 4.625 , 2.62926886637513 },
{ 4.75 , 2.808571418575736 },
{ 4.875 , 2.99153431107781 },
{ 5.0 , 3.178053830347946 },
{ 5.125 , 3.368031956881733 },
{ 5.25 , 3.561375910386697 },
{ 5.375 , 3.757997741998131 },
{ 5.5 , 3.957813967618717 },
{ 5.625 , 4.160745237339519 },
{ 5.75 , 4.366716036622286 },
{ 5.875 , 4.57565441552762 },
{ 6.0 , 4.787491742782046 },
{ 6.125 , 5.002162481906205 },
{ 6.25 , 5.219603986990229 },
{ 6.375 , 5.439756316011858 },
{ 6.5 , 5.662562059857142 },
{ 6.625 , 5.887966185430003 },
{ 6.75 , 6.115915891431546 },
{ 6.875 , 6.346360475557843 },
{ 7.0 , 6.579251212010101 },
{ 7.125 , 6.814541238336996 },
{ 7.25 , 7.05218545073854 },
{ 7.375 , 7.292140407056348 },
{ 7.5 , 7.534364236758733 },
{ 7.625 , 7.778816557302289 },
{ 7.75 , 8.025458396315983 },
{ 7.875 , 8.274252119110479 },
{ 8.0 , 8.525161361065415 },
{ 8.125 , 8.77815096449171 },
{ 8.25 , 9.033186919605123 },
{ 8.375 , 9.290236309282232 },
{ 8.5 , 9.549267257300997 },
{ 8.625 , 9.810248879795765 },
{ 8.75 , 10.07315123968124 },
{ 8.875 , 10.33794530382217 },
{ 9.0 , 10.60460290274525 },
{ 9.125 , 10.87309669270751 },
{ 9.25 , 11.14340011995171 },
{ 9.375 , 11.41548738699336 },
{ 9.5 , 11.68933342079727 },
{ 9.625 , 11.96491384271319 },
{ 9.75 , 12.24220494005076 },
{ 9.875 , 12.52118363918365 },
{ 10.0 , 12.80182748008147 },
{ 0.8 , .1520596783998376 },
{ 100.0 , 359.1342053695754 },
{ 1000.0 , 5905.220423209181 },
{ 10000.0 , 82099.71749644238 },
{ 100000.0 , 1051287.708973657 },
{ 1000000.0 , 1.2815504569147612e+7 },
{ 10000000.0 , 1.511809493694739e+8 },
{ 1.e+8 , 1.7420680661038346e+9 },
{ 1.e+9 , 1.972326582750371e+10 },
{ 1.e+10 , 2.202585092888106e+11 },
};
@Test
public void testLogGamma() {
final int ulps = 3;
for (int i = 0; i < LOG_GAMMA_REF.length; i++) {
final double[] data = LOG_GAMMA_REF[i];
final double x = data[0];
final double expected = data[1];
final double actual = Gamma.logGamma(x);
final double tol;
if (expected == 0.0) {
tol = 1E-15;
} else {
tol = ulps * FastMath.ulp(expected);
}
Assert.assertEquals(Double.toString(x), expected, actual, tol);
}
}
@Test
public void testLogGammaPrecondition1() {
Assert.assertTrue(Double.isNaN(Gamma.logGamma(0.0)));
}
@Test
public void testLogGammaPrecondition2() {
Assert.assertTrue(Double.isNaN(Gamma.logGamma(-1.0)));
}
/**
* <p>
* Reference values for the {@link Gamma#invGamma1pm1(double)} method.
* These values were generated with the following <a
* href="http://maxima.sourceforge.net/">Maxima</a> script
* </p>
*
* <pre>
* kill(all);
*
* fpprec : 64;
* gam1(x) := 1 / gamma(1 + x) - 1;
* x : makelist(bfloat(i / 8), i, -4, 12);
*
* for i : 1 while i <= length(x) do print("{",
* float(x[i]),
* ",",
* float(gam1(x[i])),
* "},");
* </pre>
*/
private static final double[][] INV_GAMMA1P_M1_REF = {
{ -0.5 , -.4358104164522437 },
{ -0.375 , -.3029021533379859 },
{ -0.25 , -0.183951060901737 },
{ -0.125 , -.08227611018520711 },
{ 0.0 , 0.0 },
{ 0.125 , .06186116458306091 },
{ 0.25 , .1032626513208373 },
{ 0.375 , .1249687649039041 },
{ 0.5 , .1283791670955126 },
{ 0.625 , .1153565546592225 },
{ 0.75 , 0.0880652521310173 },
{ 0.875 , .04882730264547758 },
{ 1.0 , 0.0 },
{ 1.125 , -.05612340925950141 },
{ 1.25 , -.1173898789433302 },
{ 1.375 , -.1818408982517061 },
{ 1.5 , -0.247747221936325 },
};
@Test
public void testInvGamma1pm1() {
final int ulps = 3;
for (int i = 0; i < INV_GAMMA1P_M1_REF.length; i++) {
final double[] ref = INV_GAMMA1P_M1_REF[i];
final double x = ref[0];
final double expected = ref[1];
final double actual = Gamma.invGamma1pm1(x);
final double tol = ulps * FastMath.ulp(expected);
Assert.assertEquals(Double.toString(x), expected, actual, tol);
}
}
@Test(expected = NumberIsTooSmallException.class)
public void testInvGamma1pm1Precondition1() {
Gamma.invGamma1pm1(-0.51);
}
@Test(expected = NumberIsTooLargeException.class)
public void testInvGamma1pm1Precondition2() {
Gamma.invGamma1pm1(1.51);
}
private static final double[][] LOG_GAMMA1P_REF = {
{ - 0.5 , .5723649429247001 },
{ - 0.375 , .3608294954889402 },
{ - 0.25 , .2032809514312954 },
{ - 0.125 , .08585870722533433 },
{ 0.0 , 0.0 },
{ 0.125 , - .06002318412603958 },
{ 0.25 , - .09827183642181316 },
{ 0.375 , - .1177552707410788 },
{ 0.5 , - .1207822376352452 },
{ 0.625 , - .1091741337567954 },
{ 0.75 , - .08440112102048555 },
{ 0.875 , - 0.0476726853991883 },
{ 1.0 , 0.0 },
{ 1.125 , .05775985153034387 },
{ 1.25 , .1248717148923966 },
{ 1.375 , .2006984603774558 },
{ 1.5 , .2846828704729192 },
};
@Test
public void testLogGamma1p() {
final int ulps = 3;
for (int i = 0; i < LOG_GAMMA1P_REF.length; i++) {
final double[] ref = LOG_GAMMA1P_REF[i];
final double x = ref[0];
final double expected = ref[1];
final double actual = Gamma.logGamma1p(x);
final double tol = ulps * FastMath.ulp(expected);
Assert.assertEquals(Double.toString(x), expected, actual, tol);
}
}
@Test(expected = NumberIsTooSmallException.class)
public void testLogGamma1pPrecondition1() {
Gamma.logGamma1p(-0.51);
}
@Test(expected = NumberIsTooLargeException.class)
public void testLogGamma1pPrecondition2() {
Gamma.logGamma1p(1.51);
}
/**
* Reference data for the {@link Gamma#gamma(double)} function. This
* data was generated with the following <a
* href="http://maxima.sourceforge.net/">Maxima</a> script.
*
* <pre>
* kill(all);
*
* fpprec : 64;
*
* EPSILON : 10**(-fpprec + 1);
* isInteger(x) := abs(x - floor(x)) <= EPSILON * abs(x);
*
* x : makelist(bfloat(i / 8), i, -160, 160);
* x : append(x, makelist(bfloat(i / 2), i, 41, 200));
*
* for i : 1 while i <= length(x) do if not(isInteger(x[i])) then
* print("{", float(x[i]), ",", float(gamma(x[i])), "},");
* </pre>
*/
private static final double[][] GAMMA_REF = {
{ - 19.875 , 4.920331854832504e-18 },
{ - 19.75 , 3.879938752480031e-18 },
{ - 19.625 , 4.323498423815027e-18 },
{ - 19.5 , 5.811045977502237e-18 },
{ - 19.375 , 9.14330910942125e-18 },
{ - 19.25 , 1.735229114436739e-17 },
{ - 19.125 , 4.653521565668223e-17 },
{ - 18.875 , - 9.779159561479603e-17 },
{ - 18.75 , - 7.662879036148061e-17 },
{ - 18.625 , - 8.484865656736991e-17 },
{ - 18.5 , - 1.133153965612936e-16 },
{ - 18.375 , - 1.771516139950367e-16 },
{ - 18.25 , - 3.340316045290721e-16 },
{ - 18.125 , - 8.899859994340475e-16 },
{ - 17.875 , 1.845816367229275e-15 },
{ - 17.75 , 1.436789819277761e-15 },
{ - 17.625 , 1.580306228567265e-15 },
{ - 17.5 , 2.096334836383932e-15 },
{ - 17.375 , 3.255160907158799e-15 },
{ - 17.25 , 6.096076782655566e-15 },
{ - 17.125 , 1.613099623974211e-14 },
{ - 16.875 , - 3.29939675642233e-14 },
{ - 16.75 , - 2.550301929218027e-14 },
{ - 16.625 , - 2.785289727849803e-14 },
{ - 16.5 , - 3.66858596367188e-14 },
{ - 16.375 , - 5.655842076188414e-14 },
{ - 16.25 , - 1.051573245008085e-13 },
{ - 16.125 , - 2.762433106055837e-13 },
{ - 15.875 , 5.567732026462681e-13 },
{ - 15.75 , 4.271755731440195e-13 },
{ - 15.625 , 4.630544172550298e-13 },
{ - 15.5 , 6.053166840058604e-13 },
{ - 15.375 , 9.261441399758529e-13 },
{ - 15.25 , 1.708806523138138e-12 },
{ - 15.125 , 4.454423383515037e-12 },
{ - 14.875 , - 8.838774592009505e-12 },
{ - 14.75 , - 6.728015277018307e-12 },
{ - 14.625 , - 7.235225269609841e-12 },
{ - 14.5 , - 9.382408602090835e-12 },
{ - 14.375 , - 1.423946615212874e-11 },
{ - 14.25 , - 2.605929947785661e-11 },
{ - 14.125 , - 6.737315367566492e-11 },
{ - 13.875 , 1.314767720561414e-10 },
{ - 13.75 , 9.923822533602004e-11 },
{ - 13.625 , 1.058151695680439e-10 },
{ - 13.5 , 1.360449247303171e-10 },
{ - 13.375 , 2.046923259368506e-10 },
{ - 13.25 , 3.713450175594567e-10 },
{ - 13.125 , 9.516457956687671e-10 },
{ - 12.875 , - 1.8242402122789617e-9 },
{ - 12.75 , - 1.3645255983702756e-9 },
{ - 12.625 , - 1.4417316853645984e-9 },
{ - 12.5 , - 1.836606483859281e-9 },
{ - 12.375 , - 2.7377598594053765e-9 },
{ - 12.25 , - 4.9203214826628017e-9 },
{ - 12.125 , - 1.2490351068152569e-8 },
{ - 11.875 , 2.3487092733091633e-8 },
{ - 11.75 , 1.7397701379221012e-8 },
{ - 11.625 , 1.8201862527728055e-8 },
{ - 11.5 , 2.295758104824101e-8 },
{ - 11.375 , 3.3879778260141535e-8 },
{ - 11.25 , 6.027393816261931e-8 },
{ - 11.125 , 1.5144550670134987e-7 },
{ - 10.875 , - 2.7890922620546316e-7 },
{ - 10.75 , - 2.044229912058469e-7 },
{ - 10.625 , - 2.1159665188483867e-7 },
{ - 10.5 , - 2.640121820547716e-7 },
{ - 10.375 , - 3.8538247770911e-7 },
{ - 10.25 , - 6.780818043294673e-7 },
{ - 10.125 , - 1.6848312620525174e-6 },
{ - 9.875 , 3.0331378349844124e-6 },
{ - 9.75 , 2.1975471554628537e-6 },
{ - 9.625 , 2.2482144262764103e-6 },
{ - 9.5 , 2.772127911575102e-6 },
{ - 9.375 , 3.998343206232017e-6 },
{ - 9.25 , 6.95033849437704e-6 },
{ - 9.125 , 1.7058916528281737e-5 },
{ - 8.875 , - 2.9952236120471065e-5 },
{ - 8.75 , - 2.1426084765762826e-5 },
{ - 8.625 , - 2.163906385291045e-5 },
{ - 8.5 , - 2.633521515996347e-5 },
{ - 8.375 , - 3.748446755842515e-5 },
{ - 8.25 , - 6.429063107298763e-5 },
{ - 8.125 , - 1.5566261332057085e-4 },
{ - 7.875 , 2.658260955691807e-4 },
{ - 7.75 , 1.874782417004247e-4 },
{ - 7.625 , 1.8663692573135265e-4 },
{ - 7.5 , 2.238493288596895e-4 },
{ - 7.375 , 3.1393241580181064e-4 },
{ - 7.25 , 5.303977063521479e-4 },
{ - 7.125 , .001264758733229638 },
{ - 6.875 , - .002093380502607298 },
{ - 6.75 , - .001452956373178292 },
{ - 6.625 , - .001423106558701564 },
{ - 6.5 , - .001678869966447671 },
{ - 6.375 , - .002315251566538353 },
{ - 6.25 , - .003845383371053072 },
{ - 6.125 , - .009011405974261174 },
{ - 5.875 , .01439199095542518 },
{ - 5.75 , .009807455518953468 },
{ - 5.625 , .009428080951397862 },
{ - 5.5 , .01091265478190986 },
{ - 5.375 , 0.014759728736682 },
{ - 5.25 , 0.0240336460690817 },
{ - 5.125 , .05519486159234969 },
{ - 4.875 , - 0.0845529468631229 },
{ - 4.75 , - .05639286923398244 },
{ - 4.625 , - .05303295535161297 },
{ - 4.5 , - .06001960130050425 },
{ - 4.375 , - .07933354195966577 },
{ - 4.25 , - .1261766418626789 },
{ - 4.125 , - .2828736656607921 },
{ - 3.875 , .4121956159577241 },
{ - 3.75 , .2678661288614166 },
{ - 3.625 , 0.24527741850121 },
{ - 3.5 , .2700882058522691 },
{ - 3.375 , .3470842460735378 },
{ - 3.25 , .5362507279163854 },
{ - 3.125 , 1.166853870850768 },
{ - 2.875 , - 1.597258011836181 },
{ - 2.75 , - 1.004497983230312 },
{ - 2.625 , - .8891306420668862 },
{ - 2.5 , - .9453087204829419 },
{ - 2.375 , - 1.17140933049819 },
{ - 2.25 , - 1.742814865728253 },
{ - 2.125 , - 3.646418346408649 },
{ - 1.875 , 4.59211678402902 },
{ - 1.75 , 2.762369453883359 },
{ - 1.625 , 2.333967935425576 },
{ - 1.5 , 2.363271801207355 },
{ - 1.375 , 2.782097159933201 },
{ - 1.25 , 3.921333447888569 },
{ - 1.125 , 7.748638986118379 },
{ - 0.875 , - 8.610218970054413 },
{ - 0.75 , - 4.834146544295877 },
{ - 0.625 , - 3.792697895066561 },
{ - 0.5 , - 3.544907701811032 },
{ - 0.375 , - 3.825383594908152 },
{ - 0.25 , - 4.901666809860711 },
{ - 0.125 , - 8.717218859383175 },
{ 0.125 , 7.533941598797612 },
{ 0.25 , 3.625609908221908 },
{ 0.375 , 2.370436184416601 },
{ 0.5 , 1.772453850905516 },
{ 0.625 , 1.434518848090557 },
{ 0.75 , 1.225416702465178 },
{ 0.875 , 1.089652357422897 },
{ 1.0 , 1.0 },
{ 1.125 , .9417426998497015 },
{ 1.25 , 0.906402477055477 },
{ 1.375 , .8889135691562253 },
{ 1.5 , 0.886226925452758 },
{ 1.625 , 0.896574280056598 },
{ 1.75 , .9190625268488832 },
{ 1.875 , .9534458127450348 },
{ 2.0 , 1.0 },
{ 2.125 , 1.059460537330914 },
{ 2.25 , 1.133003096319346 },
{ 2.375 , 1.22225615758981 },
{ 2.5 , 1.329340388179137 },
{ 2.625 , 1.456933205091972 },
{ 2.75 , 1.608359421985546 },
{ 2.875 , 1.78771089889694 },
{ 3.0 , 2.0 },
{ 3.125 , 2.251353641828193 },
{ 3.25 , 2.549256966718529 },
{ 3.375 , 2.902858374275799 },
{ 3.5 , 3.323350970447843 },
{ 3.625 , 3.824449663366426 },
{ 3.75 , 4.422988410460251 },
{ 3.875 , 5.139668834328703 },
{ 4.0 , 6.0 },
{ 4.125 , 7.035480130713102 },
{ 4.25 , 8.28508514183522 },
{ 4.375 , 9.797147013180819 },
{ 4.5 , 11.63172839656745 },
{ 4.625 , 13.86363002970329 },
{ 4.75 , 16.58620653922594 },
{ 4.875 , 19.91621673302373 },
{ 5.0 , 24.0 },
{ 5.125 , 29.02135553919155 },
{ 5.25 , 35.21161185279968 },
{ 5.375 , 42.86251818266609 },
{ 5.5 , 52.34277778455352 },
{ 5.625 , 64.11928888737773 },
{ 5.75 , 78.78448106132322 },
{ 5.875 , 97.09155657349066 },
{ 6.0 , 120.0 },
{ 6.125 , 148.7344471383567 },
{ 6.25 , 184.8609622271983 },
{ 6.375 , 230.3860352318302 },
{ 6.5 , 287.8852778150443 },
{ 6.625 , 360.6709999914997 },
{ 6.75 , 453.0107661026085 },
{ 6.875 , 570.4128948692577 },
{ 7.0 , 720.0 },
{ 7.125 , 910.9984887224346 },
{ 7.25 , 1155.38101391999 },
{ 7.375 , 1468.710974602918 },
{ 7.5 , 1871.254305797788 },
{ 7.625 , 2389.445374943686 },
{ 7.75 , 3057.822671192607 },
{ 7.875 , 3921.588652226146 },
{ 8.0 , 5040.0 },
{ 8.125 , 6490.864232147346 },
{ 8.25 , 8376.512350919926 },
{ 8.375 , 10831.74343769652 },
{ 8.5 , 14034.40729348341 },
{ 8.625 , 18219.5209839456 },
{ 8.75 , 23698.12570174271 },
{ 8.875 , 30882.5106362809 },
{ 9.0 , 40320.0 },
{ 9.125 , 52738.27188619719 },
{ 9.25 , 69106.22689508938 },
{ 9.375 , 90715.85129070834 },
{ 9.5 , 119292.461994609 },
{ 9.625 , 157143.3684865308 },
{ 9.75 , 207358.5998902487 },
{ 9.875 , 274082.281896993 },
{ 10.0 , 362880.0 },
{ 10.125 , 481236.7309615494 },
{ 10.25 , 639232.5987795768 },
{ 10.375 , 850461.1058503906 },
{ 10.5 , 1133278.388948786 },
{ 10.625 , 1512504.921682859 },
{ 10.75 , 2021746.348929925 },
{ 10.875 , 2706562.533732806 },
{ 11.0 , 3628800.0 },
{ 11.125 , 4872521.900985687 },
{ 11.25 , 6552134.137490662 },
{ 11.375 , 8823533.973197803 },
{ 11.5 , 1.1899423083962249e+7 },
{ 11.625 , 1.6070364792880382e+7 },
{ 11.75 , 2.1733773250996688e+7 },
{ 11.875 , 2.943386755434427e+7 },
{ 12.0 , 3.99168e+7 },
{ 12.125 , 5.420680614846578e+7 },
{ 12.25 , 7.371150904676994e+7 },
{ 12.375 , 1.0036769894512501e+8 },
{ 12.5 , 1.3684336546556586e+8 },
{ 12.625 , 1.8681799071723443e+8 },
{ 12.75 , 2.5537183569921107e+8 },
{ 12.875 , 3.4952717720783816e+8 },
{ 13.0 , 4.790016e+8 },
{ 13.125 , 6.572575245501475e+8 },
{ 13.25 , 9.029659858229319e+8 },
{ 13.375 , 1.2420502744459219e+9 },
{ 13.5 , 1.7105420683195732e+9 },
{ 13.625 , 2.3585771328050845e+9 },
{ 13.75 , 3.2559909051649416e+9 },
{ 13.875 , 4.500162406550916e+9 },
{ 14.0 , 6.2270208e+9 },
{ 14.125 , 8.626505009720685e+9 },
{ 14.25 , 1.196429931215385e+10 },
{ 14.375 , 1.66124224207142e+10 },
{ 14.5 , 2.309231792231424e+10 },
{ 14.625 , 3.213561343446927e+10 },
{ 14.75 , 4.476987494601794e+10 },
{ 14.875 , 6.243975339089396e+10 },
{ 15.0 , 8.71782912e+10 },
{ 15.125 , 1.218493832623047e+11 },
{ 15.25 , 1.704912651981923e+11 },
{ 15.375 , 2.388035722977667e+11 },
{ 15.5 , 3.348386098735565e+11 },
{ 15.625 , 4.699833464791132e+11 },
{ 15.75 , 6.603556554537646e+11 },
{ 15.875 , 9.287913316895475e+11 },
{ 16.0 , 1.307674368e+12 },
{ 16.125 , 1.842971921842358e+12 },
{ 16.25 , 2.599991794272433e+12 },
{ 16.375 , 3.671604924078163e+12 },
{ 16.5 , 5.189998453040126e+12 },
{ 16.625 , 7.343489788736144e+12 },
{ 16.75 , 1.040060157339679e+13 },
{ 16.875 , 1.474456239057157e+13 },
{ 17.0 , 2.0922789888e+13 },
{ 17.125 , 2.971792223970803e+13 },
{ 17.25 , 4.224986665692704e+13 },
{ 17.375 , 6.012253063177992e+13 },
{ 17.5 , 8.563497447516206e+13 },
{ 17.625 , 1.220855177377384e+14 },
{ 17.75 , 1.742100763543963e+14 },
{ 17.875 , 2.488144903408952e+14 },
{ 18.0 , 3.55687428096e+14 },
{ 18.125 , 5.08919418355e+14 },
{ 18.25 , 7.288101998319914e+14 },
{ 18.375 , 1.044628969727176e+15 },
{ 18.5 , 1.498612053315336e+15 },
{ 18.625 , 2.151757250127639e+15 },
{ 18.75 , 3.092228855290534e+15 },
{ 18.875 , 4.447559014843502e+15 },
{ 19.0 , 6.402373705728e+15 },
{ 19.125 , 9.224164457684374e+15 },
{ 19.25 , 1.330078614693384e+16 },
{ 19.375 , 1.919505731873686e+16 },
{ 19.5 , 2.772432298633372e+16 },
{ 19.625 , 4.007647878362728e+16 },
{ 19.75 , 5.797929103669752e+16 },
{ 19.875 , 8.39476764051711e+16 },
{ 20.0 , 1.21645100408832e+17 },
{ 20.5 , 5.406242982335075e+17 },
{ 21.0 , 2.43290200817664e+18 },
{ 21.5 , 1.10827981137869e+19 },
{ 22.0 , 5.109094217170944e+19 },
{ 22.5 , 2.382801594464184e+20 },
{ 23.0 , 1.124000727777608e+21 },
{ 23.5 , 5.361303587544415e+21 },
{ 24.0 , 2.585201673888498e+22 },
{ 24.5 , 1.259906343072938e+23 },
{ 25.0 , 6.204484017332395e+23 },
{ 25.5 , 3.086770540528697e+24 },
{ 26.0 , 1.551121004333099e+25 },
{ 26.5 , 7.871264878348176e+25 },
{ 27.0 , 4.032914611266056e+26 },
{ 27.5 , 2.085885192762267e+27 },
{ 28.0 , 1.088886945041835e+28 },
{ 28.5 , 5.736184280096234e+28 },
{ 29.0 , 3.048883446117139e+29 },
{ 29.5 , 1.634812519827427e+30 },
{ 30.0 , 8.841761993739702e+30 },
{ 30.5 , 4.822696933490909e+31 },
{ 31.0 , 2.65252859812191e+32 },
{ 31.5 , 1.470922564714727e+33 },
{ 32.0 , 8.222838654177922e+33 },
{ 32.5 , 4.633406078851391e+34 },
{ 33.0 , 2.631308369336935e+35 },
{ 33.5 , 1.505856975626702e+36 },
{ 34.0 , 8.683317618811885e+36 },
{ 34.5 , 5.044620868349451e+37 },
{ 35.0 , 2.952327990396041e+38 },
{ 35.5 , 1.740394199580561e+39 },
{ 36.0 , 1.033314796638614e+40 },
{ 36.5 , 6.178399408510991e+40 },
{ 37.0 , 3.719933267899013e+41 },
{ 37.5 , 2.255115784106512e+42 },
{ 38.0 , 1.376375309122634e+43 },
{ 38.5 , 8.456684190399419e+43 },
{ 39.0 , 5.230226174666011e+44 },
{ 39.5 , 3.255823413303776e+45 },
{ 40.0 , 2.039788208119745e+46 },
{ 40.5 , 1.286050248254992e+47 },
{ 41.0 , 8.159152832478976e+47 },
{ 41.5 , 5.208503505432716e+48 },
{ 42.0 , 3.345252661316381e+49 },
{ 42.5 , 2.161528954754577e+50 },
{ 43.0 , 1.40500611775288e+51 },
{ 43.5 , 9.186498057706953e+51 },
{ 44.0 , 6.041526306337383e+52 },
{ 44.5 , 3.996126655102524e+53 },
{ 45.0 , 2.658271574788449e+54 },
{ 45.5 , 1.778276361520623e+55 },
{ 46.0 , 1.196222208654802e+56 },
{ 46.5 , 8.091157444918836e+56 },
{ 47.0 , 5.502622159812088e+57 },
{ 47.5 , 3.762388211887259e+58 },
{ 48.0 , 2.586232415111682e+59 },
{ 48.5 , 1.787134400646448e+60 },
{ 49.0 , 1.241391559253607e+61 },
{ 49.5 , 8.667601843135273e+61 },
{ 50.0 , 6.082818640342675e+62 },
{ 50.5 , 4.290462912351959e+63 },
{ 51.0 , 3.041409320171338e+64 },
{ 51.5 , 2.16668377073774e+65 },
{ 52.0 , 1.551118753287382e+66 },
{ 52.5 , 1.115842141929936e+67 },
{ 53.0 , 8.065817517094388e+67 },
{ 53.5 , 5.858171245132164e+68 },
{ 54.0 , 4.274883284060025e+69 },
{ 54.5 , 3.134121616145708e+70 },
{ 55.0 , 2.308436973392413e+71 },
{ 55.5 , 1.70809628079941e+72 },
{ 56.0 , 1.269640335365828e+73 },
{ 56.5 , 9.479934358436728e+73 },
{ 57.0 , 7.109985878048635e+74 },
{ 57.5 , 5.356162912516752e+75 },
{ 58.0 , 4.052691950487721e+76 },
{ 58.5 , 3.079793674697132e+77 },
{ 59.0 , 2.350561331282878e+78 },
{ 59.5 , 1.801679299697822e+79 },
{ 60.0 , 1.386831185456898e+80 },
{ 60.5 , 1.071999183320204e+81 },
{ 61.0 , 8.320987112741391e+81 },
{ 61.5 , 6.485595059087236e+82 },
{ 62.0 , 5.075802138772249e+83 },
{ 62.5 , 3.98864096133865e+84 },
{ 63.0 , 3.146997326038794e+85 },
{ 63.5 , 2.492900600836656e+86 },
{ 64.0 , 1.98260831540444e+87 },
{ 64.5 , 1.582991881531277e+88 },
{ 65.0 , 1.268869321858841e+89 },
{ 65.5 , 1.021029763587673e+90 },
{ 66.0 , 8.247650592082472e+90 },
{ 66.5 , 6.687744951499262e+91 },
{ 67.0 , 5.443449390774431e+92 },
{ 67.5 , 4.447350392747009e+93 },
{ 68.0 , 3.647111091818868e+94 },
{ 68.5 , 3.001961515104231e+95 },
{ 69.0 , 2.48003554243683e+96 },
{ 69.5 , 2.056343637846398e+97 },
{ 70.0 , 1.711224524281413e+98 },
{ 70.5 , 1.429158828303247e+99 },
{ 71.0 , 1.19785716699699e+100 },
{ 71.5 , 1.00755697395379e+101 },
{ 72.0 , 8.50478588567862e+101 },
{ 72.5 , 7.20403236376959e+102 },
{ 73.0 , 6.12344583768861e+103 },
{ 73.5 , 5.22292346373295e+104 },
{ 74.0 , 4.47011546151268e+105 },
{ 74.5 , 3.83884874584372e+106 },
{ 75.0 , 3.30788544151939e+107 },
{ 75.5 , 2.85994231565357e+108 },
{ 76.0 , 2.48091408113954e+109 },
{ 76.5 , 2.15925644831845e+110 },
{ 77.0 , 1.88549470166605e+111 },
{ 77.5 , 1.65183118296361e+112 },
{ 78.0 , 1.45183092028286e+113 },
{ 78.5 , 1.2801691667968e+114 },
{ 79.0 , 1.13242811782063e+115 },
{ 79.5 , 1.00493279593549e+116 },
{ 80.0 , 8.94618213078298e+116 },
{ 80.5 , 7.98921572768712e+117 },
{ 81.0 , 7.15694570462638e+118 },
{ 81.5 , 6.43131866078814e+119 },
{ 82.0 , 5.79712602074737e+120 },
{ 82.5 , 5.24152470854233e+121 },
{ 83.0 , 4.75364333701284e+122 },
{ 83.5 , 4.32425788454742e+123 },
{ 84.0 , 3.94552396972066e+124 },
{ 84.5 , 3.6107553335971e+125 },
{ 85.0 , 3.31424013456535e+126 },
{ 85.5 , 3.05108825688955e+127 },
{ 86.0 , 2.81710411438055e+128 },
{ 86.5 , 2.60868045964056e+129 },
{ 87.0 , 2.42270953836727e+130 },
{ 87.5 , 2.25650859758909e+131 },
{ 88.0 , 2.10775729837953e+132 },
{ 88.5 , 1.97444502289045e+133 },
{ 89.0 , 1.85482642257398e+134 },
{ 89.5 , 1.74738384525805e+135 },
{ 90.0 , 1.65079551609085e+136 },
{ 90.5 , 1.56390854150595e+137 },
{ 91.0 , 1.48571596448176e+138 },
{ 91.5 , 1.41533723006289e+139 },
{ 92.0 , 1.3520015276784e+140 },
{ 92.5 , 1.29503356550754e+141 },
{ 93.0 , 1.24384140546413e+142 },
{ 93.5 , 1.19790604809448e+143 },
{ 94.0 , 1.15677250708164e+144 },
{ 94.5 , 1.12004215496834e+145 },
{ 95.0 , 1.08736615665674e+146 },
{ 95.5 , 1.05843983644508e+147 },
{ 96.0 , 1.03299784882391e+148 },
{ 96.5 , 1.01081004380505e+149 },
{ 97.0 , 9.9167793487095e+149 },
{ 97.5 , 9.75431692271873e+150 },
{ 98.0 , 9.61927596824821e+151 },
{ 98.5 , 9.51045899965076e+152 },
{ 99.0 , 9.42689044888325e+153 },
{ 99.5 , 9.367802114656e+154 },
{ 100.0 , 9.33262154439441e+155 },
};
@Test
public void testGamma() {
for (int i = 0; i < GAMMA_REF.length; i++) {
final double[] ref = GAMMA_REF[i];
final double x = ref[0];
final double expected = ref[1];
final double actual = Gamma.gamma(x);
final double absX = FastMath.abs(x);
final int ulps;
if (absX <= 8.0) {
ulps = 3;
} else if (absX <= 20.0) {
ulps = 5;
} else if (absX <= 30.0) {
ulps = 50;
} else if (absX <= 50.0) {
ulps = 180;
} else {
ulps = 500;
}
final double tol = ulps * FastMath.ulp(expected);
Assert.assertEquals(Double.toString(x), expected, actual, tol);
}
}
@Test
public void testGammaNegativeInteger() {
for (int i = -100; i <= 0; i++) {
Assert.assertTrue(Integer.toString(i), Double.isNaN(Gamma.gamma(i)));
}
}
private void checkRelativeError(String msg, double expected, double actual,
double tolerance) {
Assert.assertEquals(msg, expected, actual, FastMath.abs(tolerance * actual));
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.storagegateway.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* DescribeTapeRecoveryPointsOutput
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/storagegateway-2013-06-30/DescribeTapeRecoveryPoints"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeTapeRecoveryPointsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
private String gatewayARN;
/**
* <p>
* An array of TapeRecoveryPointInfos that are available for the specified gateway.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<TapeRecoveryPointInfo> tapeRecoveryPointInfos;
/**
* <p>
* An opaque string that indicates the position at which the virtual tape recovery points that were listed for
* description ended.
* </p>
* <p>
* Use this marker in your next request to list the next set of virtual tape recovery points in the list. If there
* are no more recovery points to describe, this field does not appear in the response.
* </p>
*/
private String marker;
/**
* @param gatewayARN
*/
public void setGatewayARN(String gatewayARN) {
this.gatewayARN = gatewayARN;
}
/**
* @return
*/
public String getGatewayARN() {
return this.gatewayARN;
}
/**
* @param gatewayARN
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeTapeRecoveryPointsResult withGatewayARN(String gatewayARN) {
setGatewayARN(gatewayARN);
return this;
}
/**
* <p>
* An array of TapeRecoveryPointInfos that are available for the specified gateway.
* </p>
*
* @return An array of TapeRecoveryPointInfos that are available for the specified gateway.
*/
public java.util.List<TapeRecoveryPointInfo> getTapeRecoveryPointInfos() {
if (tapeRecoveryPointInfos == null) {
tapeRecoveryPointInfos = new com.amazonaws.internal.SdkInternalList<TapeRecoveryPointInfo>();
}
return tapeRecoveryPointInfos;
}
/**
* <p>
* An array of TapeRecoveryPointInfos that are available for the specified gateway.
* </p>
*
* @param tapeRecoveryPointInfos
* An array of TapeRecoveryPointInfos that are available for the specified gateway.
*/
public void setTapeRecoveryPointInfos(java.util.Collection<TapeRecoveryPointInfo> tapeRecoveryPointInfos) {
if (tapeRecoveryPointInfos == null) {
this.tapeRecoveryPointInfos = null;
return;
}
this.tapeRecoveryPointInfos = new com.amazonaws.internal.SdkInternalList<TapeRecoveryPointInfo>(tapeRecoveryPointInfos);
}
/**
* <p>
* An array of TapeRecoveryPointInfos that are available for the specified gateway.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTapeRecoveryPointInfos(java.util.Collection)} or
* {@link #withTapeRecoveryPointInfos(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param tapeRecoveryPointInfos
* An array of TapeRecoveryPointInfos that are available for the specified gateway.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeTapeRecoveryPointsResult withTapeRecoveryPointInfos(TapeRecoveryPointInfo... tapeRecoveryPointInfos) {
if (this.tapeRecoveryPointInfos == null) {
setTapeRecoveryPointInfos(new com.amazonaws.internal.SdkInternalList<TapeRecoveryPointInfo>(tapeRecoveryPointInfos.length));
}
for (TapeRecoveryPointInfo ele : tapeRecoveryPointInfos) {
this.tapeRecoveryPointInfos.add(ele);
}
return this;
}
/**
* <p>
* An array of TapeRecoveryPointInfos that are available for the specified gateway.
* </p>
*
* @param tapeRecoveryPointInfos
* An array of TapeRecoveryPointInfos that are available for the specified gateway.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeTapeRecoveryPointsResult withTapeRecoveryPointInfos(java.util.Collection<TapeRecoveryPointInfo> tapeRecoveryPointInfos) {
setTapeRecoveryPointInfos(tapeRecoveryPointInfos);
return this;
}
/**
* <p>
* An opaque string that indicates the position at which the virtual tape recovery points that were listed for
* description ended.
* </p>
* <p>
* Use this marker in your next request to list the next set of virtual tape recovery points in the list. If there
* are no more recovery points to describe, this field does not appear in the response.
* </p>
*
* @param marker
* An opaque string that indicates the position at which the virtual tape recovery points that were listed
* for description ended.</p>
* <p>
* Use this marker in your next request to list the next set of virtual tape recovery points in the list. If
* there are no more recovery points to describe, this field does not appear in the response.
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* <p>
* An opaque string that indicates the position at which the virtual tape recovery points that were listed for
* description ended.
* </p>
* <p>
* Use this marker in your next request to list the next set of virtual tape recovery points in the list. If there
* are no more recovery points to describe, this field does not appear in the response.
* </p>
*
* @return An opaque string that indicates the position at which the virtual tape recovery points that were listed
* for description ended.</p>
* <p>
* Use this marker in your next request to list the next set of virtual tape recovery points in the list. If
* there are no more recovery points to describe, this field does not appear in the response.
*/
public String getMarker() {
return this.marker;
}
/**
* <p>
* An opaque string that indicates the position at which the virtual tape recovery points that were listed for
* description ended.
* </p>
* <p>
* Use this marker in your next request to list the next set of virtual tape recovery points in the list. If there
* are no more recovery points to describe, this field does not appear in the response.
* </p>
*
* @param marker
* An opaque string that indicates the position at which the virtual tape recovery points that were listed
* for description ended.</p>
* <p>
* Use this marker in your next request to list the next set of virtual tape recovery points in the list. If
* there are no more recovery points to describe, this field does not appear in the response.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeTapeRecoveryPointsResult withMarker(String marker) {
setMarker(marker);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getGatewayARN() != null)
sb.append("GatewayARN: ").append(getGatewayARN()).append(",");
if (getTapeRecoveryPointInfos() != null)
sb.append("TapeRecoveryPointInfos: ").append(getTapeRecoveryPointInfos()).append(",");
if (getMarker() != null)
sb.append("Marker: ").append(getMarker());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeTapeRecoveryPointsResult == false)
return false;
DescribeTapeRecoveryPointsResult other = (DescribeTapeRecoveryPointsResult) obj;
if (other.getGatewayARN() == null ^ this.getGatewayARN() == null)
return false;
if (other.getGatewayARN() != null && other.getGatewayARN().equals(this.getGatewayARN()) == false)
return false;
if (other.getTapeRecoveryPointInfos() == null ^ this.getTapeRecoveryPointInfos() == null)
return false;
if (other.getTapeRecoveryPointInfos() != null && other.getTapeRecoveryPointInfos().equals(this.getTapeRecoveryPointInfos()) == false)
return false;
if (other.getMarker() == null ^ this.getMarker() == null)
return false;
if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getGatewayARN() == null) ? 0 : getGatewayARN().hashCode());
hashCode = prime * hashCode + ((getTapeRecoveryPointInfos() == null) ? 0 : getTapeRecoveryPointInfos().hashCode());
hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode());
return hashCode;
}
@Override
public DescribeTapeRecoveryPointsResult clone() {
try {
return (DescribeTapeRecoveryPointsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.rollup.action;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostingQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.DisMaxQueryBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.rollup.RollupField;
import org.elasticsearch.xpack.core.rollup.action.RollupJobCaps;
import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction;
import org.elasticsearch.xpack.rollup.Rollup;
import org.elasticsearch.xpack.rollup.RollupJobIdentifierUtils;
import org.elasticsearch.xpack.rollup.RollupRequestTranslator;
import org.elasticsearch.xpack.rollup.RollupResponseTranslator;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
public class TransportRollupSearchAction extends TransportAction<SearchRequest, SearchResponse> {
private final Client client;
private final NamedWriteableRegistry registry;
private final BigArrays bigArrays;
private final ScriptService scriptService;
private final ClusterService clusterService;
private final IndexNameExpressionResolver resolver;
private static final Logger logger = LogManager.getLogger(RollupSearchAction.class);
@Inject
public TransportRollupSearchAction(TransportService transportService,
ActionFilters actionFilters, Client client, NamedWriteableRegistry registry, BigArrays bigArrays,
ScriptService scriptService, ClusterService clusterService, IndexNameExpressionResolver resolver) {
super(RollupSearchAction.NAME, actionFilters, transportService.getTaskManager());
this.client = client;
this.registry = registry;
this.bigArrays = bigArrays;
this.scriptService = scriptService;
this.clusterService = clusterService;
this.resolver = resolver;
transportService.registerRequestHandler(actionName, ThreadPool.Names.SAME, false, true, SearchRequest::new,
new TransportRollupSearchAction.TransportHandler());
}
@Override
protected void doExecute(Task task, SearchRequest request, ActionListener<SearchResponse> listener) {
String[] indices = resolver.concreteIndexNames(clusterService.state(), request.indicesOptions(), request.indices());
RollupSearchContext rollupSearchContext = separateIndices(indices, clusterService.state().getMetadata().indices());
MultiSearchRequest msearch = createMSearchRequest(request, registry, rollupSearchContext);
client.multiSearch(msearch, ActionListener.wrap(msearchResponse -> {
InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forPartialReduction(
bigArrays, scriptService, () -> PipelineAggregator.PipelineTree.EMPTY);
listener.onResponse(processResponses(rollupSearchContext, msearchResponse, context));
}, listener::onFailure));
}
static SearchResponse processResponses(RollupSearchContext rollupContext, MultiSearchResponse msearchResponse,
InternalAggregation.ReduceContext reduceContext) throws Exception {
if (rollupContext.hasLiveIndices() && rollupContext.hasRollupIndices()) {
// Both
return RollupResponseTranslator.combineResponses(msearchResponse.getResponses(), reduceContext);
} else if (rollupContext.hasLiveIndices()) {
// Only live
assert msearchResponse.getResponses().length == 1;
return RollupResponseTranslator.verifyResponse(msearchResponse.getResponses()[0]);
} else if (rollupContext.hasRollupIndices()) {
// Only rollup
return RollupResponseTranslator.translateResponse(msearchResponse.getResponses(), reduceContext);
}
throw new RuntimeException("MSearch response was empty, cannot unroll RollupSearch results");
}
static MultiSearchRequest createMSearchRequest(SearchRequest request, NamedWriteableRegistry registry, RollupSearchContext context) {
if (context.hasLiveIndices() == false && context.hasRollupIndices() == false) {
// Don't support _all on everything right now, for code simplicity
throw new IllegalArgumentException("Must specify at least one rollup index in _rollup_search API");
} else if (context.hasLiveIndices() && context.hasRollupIndices() == false) {
// not best practice, but if the user accidentally only sends "normal" indices we can support that
logger.debug("Creating msearch with only normal request");
final SearchRequest originalRequest = new SearchRequest(context.getLiveIndices(), request.source());
return new MultiSearchRequest().add(originalRequest);
}
// Rollup only supports a limited subset of the search API, validate and make sure
// nothing is set that we can't support
validateSearchRequest(request);
// The original request is added as-is (if normal indices exist), minus the rollup indices
final SearchRequest originalRequest = new SearchRequest(context.getLiveIndices(), request.source());
MultiSearchRequest msearch = new MultiSearchRequest();
if (context.hasLiveIndices()) {
msearch.add(originalRequest);
}
SearchSourceBuilder rolledSearchSource = new SearchSourceBuilder();
rolledSearchSource.size(0);
AggregatorFactories.Builder sourceAgg = request.source().aggregations();
// If there are no aggs in the request, our translation won't create any msearch.
// So just add an dummy request to the msearch and return. This is a bit silly
// but maintains how the regular search API behaves
if (sourceAgg == null || sourceAgg.count() == 0) {
// Note: we can't apply any query rewriting or filtering on the query because there
// are no validated caps, so we have no idea what job is intended here. The only thing
// this affects is doc count, since hits and aggs will both be empty it doesn't really matter.
msearch.add(new SearchRequest(context.getRollupIndices(), request.source()));
return msearch;
}
// Find our list of "best" job caps
Set<RollupJobCaps> validatedCaps = new HashSet<>();
sourceAgg.getAggregatorFactories()
.forEach(agg -> validatedCaps.addAll(RollupJobIdentifierUtils.findBestJobs(agg, context.getJobCaps())));
List<String> jobIds = validatedCaps.stream().map(RollupJobCaps::getJobID).collect(Collectors.toList());
for (AggregationBuilder agg : sourceAgg.getAggregatorFactories()) {
// TODO this filter agg is now redundant given we filter on job ID
// in the query and the translator doesn't add any clauses anymore
List<QueryBuilder> filterConditions = new ArrayList<>(5);
// Translate the agg tree, and collect any potential filtering clauses
List<AggregationBuilder> translatedAgg = RollupRequestTranslator.translateAggregation(agg, registry);
BoolQueryBuilder boolQuery = new BoolQueryBuilder();
filterConditions.forEach(boolQuery::must);
FilterAggregationBuilder filterAgg = new FilterAggregationBuilder(RollupField.FILTER + "_" + agg.getName(),
boolQuery);
translatedAgg.forEach(filterAgg::subAggregation);
rolledSearchSource.aggregation(filterAgg);
}
// Rewrite the user's query to our internal conventions, checking against the validated job caps
QueryBuilder rewritten = rewriteQuery(request.source().query(), validatedCaps);
for (String id : jobIds) {
SearchSourceBuilder copiedSource;
try {
copiedSource = copyWriteable(rolledSearchSource, registry, SearchSourceBuilder::new);
} catch (IOException e) {
throw new RuntimeException("Encountered IO exception while trying to build rollup request.", e);
}
// filter the rewritten query by JobID
copiedSource.query(new BoolQueryBuilder()
.must(rewritten)
.filter(new TermQueryBuilder(RollupField.formatMetaField(RollupField.ID.getPreferredName()), id))
// Both versions are acceptable right now since they are compatible at search time
.filter(new TermsQueryBuilder(RollupField.formatMetaField(RollupField.VERSION_FIELD),
new long[]{Rollup.ROLLUP_VERSION_V1, Rollup.ROLLUP_VERSION_V2})));
// And add a new msearch per JobID
msearch.add(new SearchRequest(context.getRollupIndices(), copiedSource));
}
return msearch;
}
/**
* Lifted from ESTestCase :s Don't reuse this anywhere!
*
* Create a copy of an original {@link SearchSourceBuilder} object by running it through a {@link BytesStreamOutput} and
* reading it in again using a {@link Writeable.Reader}. The stream that is wrapped around the {@link StreamInput}
* potentially need to use a {@link NamedWriteableRegistry}, so this needs to be provided too
*/
private static SearchSourceBuilder copyWriteable(SearchSourceBuilder original, NamedWriteableRegistry namedWriteableRegistry,
Writeable.Reader<SearchSourceBuilder> reader) throws IOException {
Writeable.Writer<SearchSourceBuilder> writer = (out, value) -> value.writeTo(out);
try (BytesStreamOutput output = new BytesStreamOutput()) {
output.setVersion(Version.CURRENT);
writer.write(output, original);
try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) {
in.setVersion(Version.CURRENT);
return reader.read(in);
}
}
}
static void validateSearchRequest(SearchRequest request) {
// Rollup does not support hits at the moment
if (request.source().size() != 0) {
throw new IllegalArgumentException("Rollup does not support returning search hits, please try again " +
"with [size: 0].");
}
if (request.source().postFilter() != null) {
throw new IllegalArgumentException("Rollup search does not support post filtering.");
}
if (request.source().suggest() != null) {
throw new IllegalArgumentException("Rollup search does not support suggestors.");
}
if (request.source().highlighter() != null) {
throw new IllegalArgumentException("Rollup search does not support highlighting.");
}
if (request.source().profile()) {
throw new IllegalArgumentException("Rollup search does not support profiling at the moment.");
}
if (request.source().explain() != null && request.source().explain()) {
throw new IllegalArgumentException("Rollup search does not support explaining.");
}
}
static QueryBuilder rewriteQuery(QueryBuilder builder, Set<RollupJobCaps> jobCaps) {
if (builder == null) {
return new MatchAllQueryBuilder();
}
if (builder.getWriteableName().equals(BoolQueryBuilder.NAME)) {
BoolQueryBuilder rewrittenBool = new BoolQueryBuilder();
((BoolQueryBuilder)builder).must().forEach(query -> rewrittenBool.must(rewriteQuery(query, jobCaps)));
((BoolQueryBuilder)builder).mustNot().forEach(query -> rewrittenBool.mustNot(rewriteQuery(query, jobCaps)));
((BoolQueryBuilder)builder).should().forEach(query -> rewrittenBool.should(rewriteQuery(query, jobCaps)));
((BoolQueryBuilder)builder).filter().forEach(query -> rewrittenBool.filter(rewriteQuery(query, jobCaps)));
return rewrittenBool;
} else if (builder.getWriteableName().equals(ConstantScoreQueryBuilder.NAME)) {
return new ConstantScoreQueryBuilder(rewriteQuery(((ConstantScoreQueryBuilder)builder).innerQuery(), jobCaps));
} else if (builder.getWriteableName().equals(BoostingQueryBuilder.NAME)) {
return new BoostingQueryBuilder(rewriteQuery(((BoostingQueryBuilder)builder).negativeQuery(), jobCaps),
rewriteQuery(((BoostingQueryBuilder)builder).positiveQuery(), jobCaps));
} else if (builder.getWriteableName().equals(DisMaxQueryBuilder.NAME)) {
DisMaxQueryBuilder rewritten = new DisMaxQueryBuilder();
((DisMaxQueryBuilder) builder).innerQueries().forEach(query -> rewritten.add(rewriteQuery(query, jobCaps)));
return rewritten;
} else if (builder.getWriteableName().equals(RangeQueryBuilder.NAME)) {
RangeQueryBuilder range = (RangeQueryBuilder) builder;
String fieldName = range.fieldName();
String rewrittenFieldName = rewriteFieldName(jobCaps, RangeQueryBuilder.NAME, fieldName);
RangeQueryBuilder rewritten = new RangeQueryBuilder(rewrittenFieldName)
.from(range.from())
.to(range.to())
.includeLower(range.includeLower())
.includeUpper(range.includeUpper());
if (range.timeZone() != null) {
rewritten.timeZone(range.timeZone());
}
if (range.format() != null) {
rewritten.format(range.format());
}
return rewritten;
} else if (builder.getWriteableName().equals(TermQueryBuilder.NAME)) {
TermQueryBuilder term = (TermQueryBuilder) builder;
String fieldName = term.fieldName();
String rewrittenFieldName = rewriteFieldName(jobCaps, TermQueryBuilder.NAME, fieldName);
return new TermQueryBuilder(rewrittenFieldName, term.value());
} else if (builder.getWriteableName().equals(TermsQueryBuilder.NAME)) {
TermsQueryBuilder terms = (TermsQueryBuilder) builder;
String fieldName = terms.fieldName();
String rewrittenFieldName = rewriteFieldName(jobCaps, TermQueryBuilder.NAME, fieldName);
return new TermsQueryBuilder(rewrittenFieldName, terms.values());
} else if (builder.getWriteableName().equals(MatchAllQueryBuilder.NAME)) {
// no-op
return builder;
} else {
throw new IllegalArgumentException("Unsupported Query in search request: [" + builder.getWriteableName() + "]");
}
}
private static String rewriteFieldName(Set<RollupJobCaps> jobCaps, String builderName, String fieldName) {
List<String> rewrittenFieldNames = jobCaps.stream()
// We only care about job caps that have the query's target field
.filter(caps -> caps.getFieldCaps().keySet().contains(fieldName))
.map(caps -> {
RollupJobCaps.RollupFieldCaps fieldCaps = caps.getFieldCaps().get(fieldName);
return fieldCaps.getAggs().stream()
// For now, we only allow filtering on grouping fields
.filter(agg -> {
String type = (String)agg.get(RollupField.AGG);
// make sure it's one of the three groups
return type.equals(TermsAggregationBuilder.NAME)
|| type.equals(DateHistogramAggregationBuilder.NAME)
|| type.equals(HistogramAggregationBuilder.NAME);
})
// Rewrite the field name to our convention (e.g. "foo" -> "date_histogram.foo.timestamp")
.map(agg -> {
if (agg.get(RollupField.AGG).equals(DateHistogramAggregationBuilder.NAME)) {
return RollupField.formatFieldName(fieldName, (String)agg.get(RollupField.AGG), RollupField.TIMESTAMP);
} else {
return RollupField.formatFieldName(fieldName, (String)agg.get(RollupField.AGG), RollupField.VALUE);
}
})
.collect(Collectors.toList());
})
.distinct()
.collect(ArrayList::new, List::addAll, List::addAll);
if (rewrittenFieldNames.isEmpty()) {
throw new IllegalArgumentException("Field [" + fieldName + "] in [" + builderName
+ "] query is not available in selected rollup indices, cannot query.");
} else if (rewrittenFieldNames.size() > 1) {
throw new IllegalArgumentException("Ambiguous field name resolution when mapping to rolled fields. Field name [" +
fieldName + "] was mapped to: [" + Strings.collectionToDelimitedString(rewrittenFieldNames, ",") + "].");
} else {
return rewrittenFieldNames.get(0);
}
}
static RollupSearchContext separateIndices(String[] indices, ImmutableOpenMap<String, IndexMetadata> indexMetadata) {
if (indices.length == 0) {
throw new IllegalArgumentException("Must specify at least one concrete index.");
}
List<String> rollup = new ArrayList<>();
List<String> normal = new ArrayList<>();
Set<RollupJobCaps> jobCaps = new HashSet<>();
Arrays.stream(indices).forEach(i -> {
if (i.equals(Metadata.ALL)) {
throw new IllegalArgumentException("Searching _all via RollupSearch endpoint is not supported at this time.");
}
Optional<RollupIndexCaps> caps = TransportGetRollupCapsAction.findRollupIndexCaps(i, indexMetadata.get(i));
if (caps.isPresent()) {
rollup.add(i);
jobCaps.addAll(caps.get().getJobCaps());
} else {
normal.add(i);
}
});
assert normal.size() + rollup.size() > 0;
if (rollup.size() > 1) {
throw new IllegalArgumentException("RollupSearch currently only supports searching one rollup index at a time. " +
"Found the following rollup indices: " + rollup);
}
return new RollupSearchContext(normal.toArray(new String[0]), rollup.toArray(new String[0]), jobCaps);
}
class TransportHandler implements TransportRequestHandler<SearchRequest> {
@Override
public final void messageReceived(final SearchRequest request, final TransportChannel channel, Task task) throws Exception {
// We already got the task created on the network layer - no need to create it again on the transport layer
execute(task, request, new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse response) {
try {
channel.sendResponse(response);
} catch (Exception e) {
onFailure(e);
}
}
@Override
public void onFailure(Exception e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn(
(org.apache.logging.log4j.util.Supplier<?>)
() -> new ParameterizedMessage(
"Failed to send error response for action [{}] and request [{}]",
actionName,
request),
e1);
}
}
});
}
}
static class RollupSearchContext {
private final String[] liveIndices;
private final String[] rollupIndices;
private final Set<RollupJobCaps> jobCaps;
RollupSearchContext(String[] liveIndices, String[] rollupIndices, Set<RollupJobCaps> jobCaps) {
this.liveIndices = Objects.requireNonNull(liveIndices);
this.rollupIndices = Objects.requireNonNull(rollupIndices);
this.jobCaps = Objects.requireNonNull(jobCaps);
}
boolean hasLiveIndices() {
return liveIndices.length != 0;
}
boolean hasRollupIndices() {
return rollupIndices.length != 0;
}
String[] getLiveIndices() {
return liveIndices;
}
String[] getRollupIndices() {
return rollupIndices;
}
Set<RollupJobCaps> getJobCaps() {
return jobCaps;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.util.typedef.CI1;
import org.apache.ignite.internal.util.typedef.CI2;
import org.apache.ignite.internal.util.typedef.PA;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.transactions.Transaction;
import org.apache.ignite.transactions.TransactionIsolation;
import org.apache.ignite.transactions.TransactionTimeoutException;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheMode.REPLICATED;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
import static org.apache.ignite.transactions.TransactionConcurrency.OPTIMISTIC;
import static org.apache.ignite.transactions.TransactionState.ACTIVE;
import static org.apache.ignite.transactions.TransactionState.COMMITTED;
import static org.apache.ignite.transactions.TransactionState.ROLLED_BACK;
import static org.apache.ignite.transactions.TransactionState.SUSPENDED;
/**
*
*/
public class IgniteOptimisticTxSuspendResumeTest extends GridCommonAbstractTest {
/** Transaction timeout. */
private static final long TX_TIMEOUT = 200;
/** Future timeout */
private static final int FUT_TIMEOUT = 5000;
/** */
private boolean client = false;
/**
* List of closures to execute transaction operation that prohibited in suspended state.
*/
private static final List<CI1Exc<Transaction>> SUSPENDED_TX_PROHIBITED_OPS = Arrays.asList(
new CI1Exc<Transaction>() {
@Override public void applyx(Transaction tx) throws Exception {
tx.suspend();
}
},
new CI1Exc<Transaction>() {
@Override public void applyx(Transaction tx) throws Exception {
tx.close();
}
},
new CI1Exc<Transaction>() {
@Override public void applyx(Transaction tx) throws Exception {
tx.commit();
}
},
new CI1Exc<Transaction>() {
@Override public void applyx(Transaction tx) throws Exception {
tx.commitAsync().get(FUT_TIMEOUT);
}
},
new CI1Exc<Transaction>() {
@Override public void applyx(Transaction tx) throws Exception {
tx.rollback();
}
},
new CI1Exc<Transaction>() {
@Override public void applyx(Transaction tx) throws Exception {
tx.rollbackAsync().get(FUT_TIMEOUT);
}
},
new CI1Exc<Transaction>() {
@Override public void applyx(Transaction tx) throws Exception {
tx.setRollbackOnly();
}
}
);
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
cfg.setClientMode(client);
return cfg;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
super.beforeTestsStarted();
startGrids(serversNumber());
if (serversNumber() > 1) {
client = true;
startGrid(serversNumber());
startGrid(serversNumber() + 1);
client = false;
}
awaitPartitionMapExchange();
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
stopAllGrids(true);
}
/**
* @return Number of server nodes.
*/
protected int serversNumber() {
return 1;
}
/**
* Test for transaction starting in one thread, continuing in another.
*
* @throws Exception If failed.
*/
public void testResumeTxInAnotherThread() throws Exception {
executeTestForAllCaches(new CI2Exc<Ignite, IgniteCache<Integer, Integer>>() {
@Override public void applyx(Ignite ignite, final IgniteCache<Integer, Integer> cache) throws Exception {
for (TransactionIsolation isolation : TransactionIsolation.values()) {
final Transaction tx = ignite.transactions().txStart(OPTIMISTIC, isolation);
final AtomicInteger cntr = new AtomicInteger(0);
cache.put(-1, -1);
cache.put(cntr.get(), cntr.getAndIncrement());
tx.suspend();
assertEquals(SUSPENDED, tx.state());
assertNull("Thread already have tx", ignite.transactions().tx());
assertNull(cache.get(-1));
assertNull(cache.get(cntr.get()));
for (int i = 0; i < 10; i++) {
GridTestUtils.runAsync(new Runnable() {
@Override public void run() {
assertEquals(SUSPENDED, tx.state());
tx.resume();
assertEquals(ACTIVE, tx.state());
cache.put(cntr.get(), cntr.getAndIncrement());
tx.suspend();
}
}).get(FUT_TIMEOUT);
}
tx.resume();
cache.remove(-1);
tx.commit();
assertEquals(COMMITTED, tx.state());
for (int i = 0; i < cntr.get(); i++)
assertEquals(i, (int)cache.get(i));
assertFalse(cache.containsKey(-1));
cache.removeAll();
}
}
});
}
/**
* Test for transaction starting in one thread, continuing in another, and resuming in initiating thread.
* Cache operations performed for a couple of caches.
*
* @throws Exception If failed.
*/
public void testCrossCacheTxInAnotherThread() throws Exception {
executeTestForAllCaches(new CI2Exc<Ignite, IgniteCache<Integer, Integer>>() {
@Override public void applyx(Ignite ignite, final IgniteCache<Integer, Integer> cache) throws Exception {
for (TransactionIsolation isolation : TransactionIsolation.values()) {
final IgniteCache<Integer, Integer> otherCache =
ignite.getOrCreateCache(cacheConfiguration(PARTITIONED, 0, false).setName("otherCache"));
final Transaction tx = ignite.transactions().txStart(OPTIMISTIC, isolation);
final AtomicInteger cntr = new AtomicInteger(0);
cache.put(-1, -1);
otherCache.put(-1, -1);
tx.suspend();
for (int i = 0; i < 10; i++) {
GridTestUtils.runAsync(new Runnable() {
@Override public void run() {
tx.resume();
assertEquals(ACTIVE, tx.state());
cache.put(cntr.get(), cntr.get());
otherCache.put(cntr.get(), cntr.getAndIncrement());
tx.suspend();
}
}).get(FUT_TIMEOUT);
}
tx.resume();
cache.remove(-1);
otherCache.remove(-1);
tx.commit();
assertEquals(COMMITTED, tx.state());
for (int i = 0; i < cntr.get(); i++) {
assertEquals(i, (int)cache.get(i));
assertEquals(i, (int)otherCache.get(i));
}
assertFalse(cache.containsKey(-1));
assertFalse(otherCache.containsKey(-1));
cache.removeAll();
otherCache.removeAll();
}
}
});
}
/**
* Test for transaction rollback.
*
* @throws Exception If failed.
*/
public void testTxRollback() throws Exception {
executeTestForAllCaches(new CI2Exc<Ignite, IgniteCache<Integer, Integer>>() {
@Override public void applyx(Ignite ignite, final IgniteCache<Integer, Integer> cache) throws Exception {
for (TransactionIsolation isolation : TransactionIsolation.values()) {
final Transaction tx = ignite.transactions().txStart(OPTIMISTIC, isolation);
cache.put(1, 1);
cache.put(2, 2);
tx.suspend();
assertNull("There is no transaction for current thread", ignite.transactions().tx());
assertEquals(SUSPENDED, tx.state());
GridTestUtils.runAsync(new Runnable() {
@Override public void run() {
tx.resume();
assertEquals(ACTIVE, tx.state());
cache.put(3, 3);
tx.rollback();
}
}).get(FUT_TIMEOUT);
assertTrue(GridTestUtils.waitForCondition(new PA() {
@Override public boolean apply() {
return tx.state() == ROLLED_BACK;
}
}, getTestTimeout()));
assertEquals(ROLLED_BACK, tx.state());
assertFalse(cache.containsKey(1));
assertFalse(cache.containsKey(2));
assertFalse(cache.containsKey(3));
cache.removeAll();
}
}
});
}
/**
* Test for starting and suspending transactions, and then resuming and committing in another thread.
*
* @throws Exception If failed.
*/
public void testMultiTxSuspendResume() throws Exception {
executeTestForAllCaches(new CI2Exc<Ignite, IgniteCache<Integer, Integer>>() {
@Override public void applyx(Ignite ignite, final IgniteCache<Integer, Integer> cache) throws Exception {
for (TransactionIsolation isolation : TransactionIsolation.values()) {
final List<Transaction> clientTxs = new ArrayList<>();
for (int i = 0; i < 10; i++) {
Transaction tx = ignite.transactions().txStart(OPTIMISTIC, isolation);
cache.put(i, i);
tx.suspend();
clientTxs.add(tx);
}
GridTestUtils.runMultiThreaded(new CI1Exc<Integer>() {
public void applyx(Integer idx) throws Exception {
Transaction tx = clientTxs.get(idx);
assertEquals(SUSPENDED, tx.state());
tx.resume();
assertEquals(ACTIVE, tx.state());
tx.commit();
}
}, 10, "th-suspend");
for (int i = 0; i < 10; i++)
assertEquals(i, (int)cache.get(i));
cache.removeAll();
}
}
});
}
/**
* Test checking all operations(exception resume) on suspended transaction from the other thread are prohibited.
*
* @throws Exception If failed.
*/
public void testOpsProhibitedOnSuspendedTxFromOtherThread() throws Exception {
executeTestForAllCaches(new CI2Exc<Ignite, IgniteCache<Integer, Integer>>() {
@Override public void applyx(Ignite ignite, final IgniteCache<Integer, Integer> cache) throws Exception {
for (final CI1Exc<Transaction> txOperation : SUSPENDED_TX_PROHIBITED_OPS) {
for (TransactionIsolation isolation : TransactionIsolation.values()) {
final Transaction tx = ignite.transactions().txStart(OPTIMISTIC, isolation);
cache.put(1, 1);
tx.suspend();
multithreaded(new RunnableX() {
@Override public void runx() throws Exception {
GridTestUtils.assertThrowsWithCause(txOperation, tx, IgniteException.class);
}
}, 1);
tx.resume();
tx.close();
assertNull(cache.get(1));
}
}
}
});
}
/**
* Test checking all operations(exception resume) on suspended transaction are prohibited.
*
* @throws Exception If failed.
*/
public void testOpsProhibitedOnSuspendedTx() throws Exception {
executeTestForAllCaches(new CI2Exc<Ignite, IgniteCache<Integer, Integer>>() {
@Override public void applyx(Ignite ignite, final IgniteCache<Integer, Integer> cache) throws Exception {
for (CI1Exc<Transaction> txOperation : SUSPENDED_TX_PROHIBITED_OPS) {
for (TransactionIsolation isolation : TransactionIsolation.values()) {
Transaction tx = ignite.transactions().txStart(OPTIMISTIC, isolation);
cache.put(1, 1);
tx.suspend();
GridTestUtils.assertThrowsWithCause(txOperation, tx, IgniteException.class);
tx.resume();
tx.close();
assertNull(cache.get(1));
}
}
}
});
}
/**
* Test checking timeout on resumed transaction.
*
* @throws Exception If failed.
*/
public void testTxTimeoutOnResumed() throws Exception {
executeTestForAllCaches(new CI2Exc<Ignite, IgniteCache<Integer, Integer>>() {
@Override public void applyx(Ignite ignite, final IgniteCache<Integer, Integer> cache) throws Exception {
for (TransactionIsolation isolation : TransactionIsolation.values()) {
final Transaction tx = ignite.transactions().txStart(OPTIMISTIC, isolation, TX_TIMEOUT, 0);
cache.put(1, 1);
tx.suspend();
long start = U.currentTimeMillis();
while(TX_TIMEOUT >= U.currentTimeMillis() - start)
Thread.sleep(TX_TIMEOUT * 2);
GridTestUtils.assertThrowsWithCause(new Callable<Object>() {
@Override public Object call() throws Exception {
tx.resume();
return null;
}
}, TransactionTimeoutException.class);
assertTrue(GridTestUtils.waitForCondition(new PA() {
@Override public boolean apply() {
return tx.state() == ROLLED_BACK;
}
}, getTestTimeout()));
assertEquals(ROLLED_BACK, tx.state());
tx.close();
}
}
});
}
/**
* Test checking timeout on suspended transaction.
*
* @throws Exception If failed.
*/
public void testTxTimeoutOnSuspend() throws Exception {
executeTestForAllCaches(new CI2Exc<Ignite, IgniteCache<Integer, Integer>>() {
@Override public void applyx(Ignite ignite, final IgniteCache<Integer, Integer> cache) throws Exception {
for (TransactionIsolation isolation : TransactionIsolation.values()) {
final Transaction tx = ignite.transactions().txStart(OPTIMISTIC, isolation, TX_TIMEOUT, 0);
cache.put(1, 1);
long start = U.currentTimeMillis();
while(TX_TIMEOUT >= U.currentTimeMillis() - start)
Thread.sleep(TX_TIMEOUT * 2);
GridTestUtils.assertThrowsWithCause(new Callable<Object>() {
@Override public Object call() throws Exception {
tx.suspend();
return null;
}
}, TransactionTimeoutException.class);
assertTrue(GridTestUtils.waitForCondition(new PA() {
@Override public boolean apply() {
return tx.state() == ROLLED_BACK;
}
}, getTestTimeout()));
assertEquals(ROLLED_BACK, tx.state());
tx.close();
assertNull(cache.get(1));
}
}
});
}
/**
* Test start 1 transaction, suspendTx it. And then start another transaction, trying to write
* the same key and commit it.
*
* @throws Exception If failed.
*/
public void testSuspendTxAndStartNew() throws Exception {
executeTestForAllCaches(new CI2Exc<Ignite, IgniteCache<Integer, Integer>>() {
@Override public void applyx(Ignite ignite, final IgniteCache<Integer, Integer> cache) throws Exception {
for (TransactionIsolation tx1Isolation : TransactionIsolation.values()) {
for (TransactionIsolation tx2Isolation : TransactionIsolation.values()) {
Transaction tx1 = ignite.transactions().txStart(OPTIMISTIC, tx1Isolation);
cache.put(1, 1);
tx1.suspend();
assertFalse(cache.containsKey(1));
Transaction tx2 = ignite.transactions().txStart(OPTIMISTIC, tx2Isolation);
cache.put(1, 2);
tx2.commit();
assertEquals(2, (int)cache.get(1));
tx1.resume();
assertEquals(1, (int)cache.get(1));
tx1.close();
cache.removeAll();
}
}
}
});
}
/**
* Test start 1 transaction, suspendTx it. And then start another transaction, trying to write
* the same key.
*
* @throws Exception If failed.
*/
public void testSuspendTxAndStartNewWithoutCommit() throws Exception {
executeTestForAllCaches(new CI2Exc<Ignite, IgniteCache<Integer, Integer>>() {
@Override public void applyx(Ignite ignite, final IgniteCache<Integer, Integer> cache) throws Exception {
for (TransactionIsolation tx1Isolation : TransactionIsolation.values()) {
for (TransactionIsolation tx2Isolation : TransactionIsolation.values()) {
Transaction tx1 = ignite.transactions().txStart(OPTIMISTIC, tx1Isolation);
cache.put(1, 1);
tx1.suspend();
assertFalse(cache.containsKey(1));
Transaction tx2 = ignite.transactions().txStart(OPTIMISTIC, tx2Isolation);
cache.put(1, 2);
tx2.suspend();
assertFalse(cache.containsKey(1));
tx1.resume();
assertEquals(1, (int)cache.get(1));
tx1.suspend();
tx2.resume();
assertEquals(2, (int)cache.get(1));
tx2.rollback();
tx1.resume();
tx1.rollback();
cache.removeAll();
}
}
}
});
}
/**
* Test we can resume and complete transaction if topology changed while transaction is suspended.
*
* @throws Exception If failed.
*/
public void testSuspendTxAndResumeAfterTopologyChange() throws Exception {
executeTestForAllCaches(new CI2Exc<Ignite, IgniteCache<Integer, Integer>>() {
@Override public void applyx(Ignite ignite, final IgniteCache<Integer, Integer> cache) throws Exception {
for (TransactionIsolation isolation : TransactionIsolation.values()) {
Transaction tx = ignite.transactions().txStart(OPTIMISTIC, isolation);
cache.put(1, 1);
tx.suspend();
assertEquals(SUSPENDED, tx.state());
try (IgniteEx g = startGrid(serversNumber() + 3)) {
tx.resume();
assertEquals(ACTIVE, tx.state());
assertEquals(1, (int)cache.get(1));
tx.commit();
assertEquals(1, (int)cache.get(1));
}
cache.removeAll();
}
}
});
}
/**
* Test for correct exception handling when misuse transaction API - resume active tx.
*
* @throws Exception If failed.
*/
public void testResumeActiveTx() throws Exception {
executeTestForAllCaches(new CI2Exc<Ignite, IgniteCache<Integer, Integer>>() {
@Override public void applyx(Ignite ignite, final IgniteCache<Integer, Integer> cache) throws Exception {
for (TransactionIsolation isolation : TransactionIsolation.values()) {
final Transaction tx = ignite.transactions().txStart(OPTIMISTIC, isolation);
cache.put(1, 1);
try {
tx.resume();
fail("Exception must be thrown");
}
catch (Throwable e) {
assertTrue(X.hasCause(e, IgniteException.class));
assertFalse(X.hasCause(e, AssertionError.class));
}
tx.close();
assertFalse(cache.containsKey(1));
}
}
});
}
/**
* @return Cache configurations to test.
*/
private List<CacheConfiguration<Integer, Integer>> cacheConfigurations() {
List<CacheConfiguration<Integer, Integer>> cfgs = new ArrayList<>();
cfgs.add(cacheConfiguration(PARTITIONED, 0, false));
cfgs.add(cacheConfiguration(PARTITIONED, 1, false));
cfgs.add(cacheConfiguration(PARTITIONED, 1, true));
cfgs.add(cacheConfiguration(REPLICATED, 0, false));
return cfgs;
}
/**
* @param cacheMode Cache mode.
* @param backups Number of backups.
* @param nearCache If {@code true} near cache is enabled.
* @return Cache configuration.
*/
private CacheConfiguration<Integer, Integer> cacheConfiguration(
CacheMode cacheMode,
int backups,
boolean nearCache) {
CacheConfiguration<Integer, Integer> ccfg = new CacheConfiguration<>(DEFAULT_CACHE_NAME);
ccfg.setCacheMode(cacheMode);
ccfg.setAtomicityMode(TRANSACTIONAL);
ccfg.setWriteSynchronizationMode(FULL_SYNC);
if (cacheMode == PARTITIONED)
ccfg.setBackups(backups);
if (nearCache)
ccfg.setNearConfiguration(new NearCacheConfiguration<Integer, Integer>());
return ccfg;
}
/**
* @param c Closure.
* @throws Exception If failed.
*/
private void executeTestForAllCaches(CI2<Ignite, IgniteCache<Integer, Integer>> c) throws Exception {
for (CacheConfiguration<Integer, Integer> ccfg : cacheConfigurations()) {
ignite(0).createCache(ccfg);
log.info("Run test for cache [cache=" + ccfg.getCacheMode() +
", backups=" + ccfg.getBackups() +
", near=" + (ccfg.getNearConfiguration() != null) + "]");
awaitPartitionMapExchange();
int srvNum = serversNumber();
if (serversNumber() > 1) {
ignite(serversNumber() + 1).createNearCache(ccfg.getName(), new NearCacheConfiguration<>());
srvNum += 2;
}
try {
for (int i = 0; i < srvNum; i++) {
Ignite ignite = ignite(i);
log.info("Run test for node [node=" + i + ", client=" + ignite.configuration().isClientMode() + ']');
c.apply(ignite, ignite.<Integer, Integer>cache(ccfg.getName()));
}
}
finally {
ignite(0).destroyCache(ccfg.getName());
}
}
}
/**
* Closure with 2 parameters that can throw any exception.
*
* @param <E1> Type of first closure parameter.
* @param <E2> Type of second closure parameter.
*/
public static abstract class CI2Exc<E1, E2> implements CI2<E1, E2> {
/**
* Closure body.
*
* @param e1 First closure argument.
* @param e2 Second closure argument.
* @throws Exception If failed.
*/
public abstract void applyx(E1 e1, E2 e2) throws Exception;
/** {@inheritdoc} */
@Override public void apply(E1 e1, E2 e2) {
try {
applyx(e1, e2);
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
}
/**
* Closure that can throw any exception.
*
* @param <T> Type of closure parameter.
*/
public static abstract class CI1Exc<T> implements CI1<T> {
/**
* Closure body.
*
* @param o Closure argument.
* @throws Exception If failed.
*/
public abstract void applyx(T o) throws Exception;
/** {@inheritdoc} */
@Override public void apply(T o) {
try {
applyx(o);
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
}
/**
* Runnable that can throw any exception.
*/
public static abstract class RunnableX implements Runnable {
/**
* Closure body.
*
* @throws Exception If failed.
*/
public abstract void runx() throws Exception;
/** {@inheritdoc} */
@Override public void run() {
try {
runx();
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequestBuilder;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder;
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsRequestBuilder;
import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder;
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequestBuilder;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequestBuilder;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequestBuilder;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequestBuilder;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder;
import org.elasticsearch.action.search.MultiSearchRequestBuilder;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.function.Function;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.startsWith;
public class IndicesOptionsIntegrationIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singletonList(TestPlugin.class);
}
public void testSpecifiedIndexUnavailableMultipleIndices() throws Exception {
assertAcked(prepareCreate("test1"));
// Verify defaults
verify(search("test1", "test2"), true);
verify(msearch(null, "test1", "test2"), true);
verify(clearCache("test1", "test2"), true);
verify(_flush("test1", "test2"),true);
verify(segments("test1", "test2"), true);
verify(stats("test1", "test2"), true);
verify(forceMerge("test1", "test2"), true);
verify(refreshBuilder("test1", "test2"), true);
verify(validateQuery("test1", "test2"), true);
verify(aliasExists("test1", "test2"), true);
verify(typesExists("test1", "test2"), true);
verify(getAliases("test1", "test2"), true);
verify(getFieldMapping("test1", "test2"), true);
verify(getMapping("test1", "test2"), true);
verify(getSettings("test1", "test2"), true);
IndicesOptions options = IndicesOptions.strictExpandOpen();
verify(search("test1", "test2").setIndicesOptions(options), true);
verify(msearch(options, "test1", "test2"), true);
verify(clearCache("test1", "test2").setIndicesOptions(options), true);
verify(_flush("test1", "test2").setIndicesOptions(options),true);
verify(segments("test1", "test2").setIndicesOptions(options), true);
verify(stats("test1", "test2").setIndicesOptions(options), true);
verify(forceMerge("test1", "test2").setIndicesOptions(options), true);
verify(refreshBuilder("test1", "test2").setIndicesOptions(options), true);
verify(validateQuery("test1", "test2").setIndicesOptions(options), true);
verify(aliasExists("test1", "test2").setIndicesOptions(options), true);
verify(typesExists("test1", "test2").setIndicesOptions(options), true);
verify(getAliases("test1", "test2").setIndicesOptions(options), true);
verify(getFieldMapping("test1", "test2").setIndicesOptions(options), true);
verify(getMapping("test1", "test2").setIndicesOptions(options), true);
verify(getSettings("test1", "test2").setIndicesOptions(options), true);
options = IndicesOptions.lenientExpandOpen();
verify(search("test1", "test2").setIndicesOptions(options), false);
verify(msearch(options, "test1", "test2").setIndicesOptions(options), false);
verify(clearCache("test1", "test2").setIndicesOptions(options), false);
verify(_flush("test1", "test2").setIndicesOptions(options), false);
verify(segments("test1", "test2").setIndicesOptions(options), false);
verify(stats("test1", "test2").setIndicesOptions(options), false);
verify(forceMerge("test1", "test2").setIndicesOptions(options), false);
verify(refreshBuilder("test1", "test2").setIndicesOptions(options), false);
verify(validateQuery("test1", "test2").setIndicesOptions(options), false);
verify(aliasExists("test1", "test2").setIndicesOptions(options), false);
verify(typesExists("test1", "test2").setIndicesOptions(options), false);
verify(getAliases("test1", "test2").setIndicesOptions(options), false);
verify(getFieldMapping("test1", "test2").setIndicesOptions(options), false);
verify(getMapping("test1", "test2").setIndicesOptions(options), false);
verify(getSettings("test1", "test2").setIndicesOptions(options), false);
options = IndicesOptions.strictExpandOpen();
assertAcked(prepareCreate("test2"));
verify(search("test1", "test2").setIndicesOptions(options), false);
verify(msearch(options, "test1", "test2").setIndicesOptions(options), false);
verify(clearCache("test1", "test2").setIndicesOptions(options), false);
verify(_flush("test1", "test2").setIndicesOptions(options),false);
verify(segments("test1", "test2").setIndicesOptions(options), false);
verify(stats("test1", "test2").setIndicesOptions(options), false);
verify(forceMerge("test1", "test2").setIndicesOptions(options), false);
verify(refreshBuilder("test1", "test2").setIndicesOptions(options), false);
verify(validateQuery("test1", "test2").setIndicesOptions(options), false);
verify(aliasExists("test1", "test2").setIndicesOptions(options), false);
verify(typesExists("test1", "test2").setIndicesOptions(options), false);
verify(getAliases("test1", "test2").setIndicesOptions(options), false);
verify(getFieldMapping("test1", "test2").setIndicesOptions(options), false);
verify(getMapping("test1", "test2").setIndicesOptions(options), false);
verify(getSettings("test1", "test2").setIndicesOptions(options), false);
}
public void testSpecifiedIndexUnavailableSingleIndexThatIsClosed() throws Exception {
assertAcked(prepareCreate("test1"));
// we need to wait until all shards are allocated since recovery from
// gateway will fail unless the majority of the replicas was allocated
// pre-closing. with lots of replicas this will fail.
ensureGreen();
assertAcked(client().admin().indices().prepareClose("test1"));
IndicesOptions options = IndicesOptions.strictExpandOpenAndForbidClosed();
verify(search("test1").setIndicesOptions(options), true);
verify(msearch(options, "test1"), true);
verify(clearCache("test1").setIndicesOptions(options), true);
verify(_flush("test1").setIndicesOptions(options),true);
verify(segments("test1").setIndicesOptions(options), true);
verify(stats("test1").setIndicesOptions(options), true);
verify(forceMerge("test1").setIndicesOptions(options), true);
verify(refreshBuilder("test1").setIndicesOptions(options), true);
verify(validateQuery("test1").setIndicesOptions(options), true);
verify(aliasExists("test1").setIndicesOptions(options), true);
verify(typesExists("test1").setIndicesOptions(options), true);
verify(getAliases("test1").setIndicesOptions(options), true);
verify(getFieldMapping("test1").setIndicesOptions(options), true);
verify(getMapping("test1").setIndicesOptions(options), true);
verify(getSettings("test1").setIndicesOptions(options), true);
options = IndicesOptions.fromOptions(true, options.allowNoIndices(), options.expandWildcardsOpen(),
options.expandWildcardsClosed(), options);
verify(search("test1").setIndicesOptions(options), false);
verify(msearch(options, "test1"), false);
verify(clearCache("test1").setIndicesOptions(options), false);
verify(_flush("test1").setIndicesOptions(options),false);
verify(segments("test1").setIndicesOptions(options), false);
verify(stats("test1").setIndicesOptions(options), false);
verify(forceMerge("test1").setIndicesOptions(options), false);
verify(refreshBuilder("test1").setIndicesOptions(options), false);
verify(validateQuery("test1").setIndicesOptions(options), false);
verify(aliasExists("test1").setIndicesOptions(options), false);
verify(typesExists("test1").setIndicesOptions(options), false);
verify(getAliases("test1").setIndicesOptions(options), false);
verify(getFieldMapping("test1").setIndicesOptions(options), false);
verify(getMapping("test1").setIndicesOptions(options), false);
verify(getSettings("test1").setIndicesOptions(options), false);
assertAcked(client().admin().indices().prepareOpen("test1"));
ensureYellow();
options = IndicesOptions.strictExpandOpenAndForbidClosed();
verify(search("test1").setIndicesOptions(options), false);
verify(msearch(options, "test1"), false);
verify(clearCache("test1").setIndicesOptions(options), false);
verify(_flush("test1").setIndicesOptions(options),false);
verify(segments("test1").setIndicesOptions(options), false);
verify(stats("test1").setIndicesOptions(options), false);
verify(forceMerge("test1").setIndicesOptions(options), false);
verify(refreshBuilder("test1").setIndicesOptions(options), false);
verify(validateQuery("test1").setIndicesOptions(options), false);
verify(aliasExists("test1").setIndicesOptions(options), false);
verify(typesExists("test1").setIndicesOptions(options), false);
verify(getAliases("test1").setIndicesOptions(options), false);
verify(getFieldMapping("test1").setIndicesOptions(options), false);
verify(getMapping("test1").setIndicesOptions(options), false);
verify(getSettings("test1").setIndicesOptions(options), false);
}
public void testSpecifiedIndexUnavailableSingleIndex() throws Exception {
IndicesOptions options = IndicesOptions.strictExpandOpenAndForbidClosed();
verify(search("test1").setIndicesOptions(options), true);
verify(msearch(options, "test1"), true);
verify(clearCache("test1").setIndicesOptions(options), true);
verify(_flush("test1").setIndicesOptions(options),true);
verify(segments("test1").setIndicesOptions(options), true);
verify(stats("test1").setIndicesOptions(options), true);
verify(forceMerge("test1").setIndicesOptions(options), true);
verify(refreshBuilder("test1").setIndicesOptions(options), true);
verify(validateQuery("test1").setIndicesOptions(options), true);
verify(aliasExists("test1").setIndicesOptions(options), true);
verify(typesExists("test1").setIndicesOptions(options), true);
verify(getAliases("test1").setIndicesOptions(options), true);
verify(getFieldMapping("test1").setIndicesOptions(options), true);
verify(getMapping("test1").setIndicesOptions(options), true);
verify(getSettings("test1").setIndicesOptions(options), true);
options = IndicesOptions.fromOptions(true, options.allowNoIndices(), options.expandWildcardsOpen(),
options.expandWildcardsClosed(), options);
verify(search("test1").setIndicesOptions(options), false);
verify(msearch(options, "test1"), false);
verify(clearCache("test1").setIndicesOptions(options), false);
verify(_flush("test1").setIndicesOptions(options),false);
verify(segments("test1").setIndicesOptions(options), false);
verify(stats("test1").setIndicesOptions(options), false);
verify(forceMerge("test1").setIndicesOptions(options), false);
verify(refreshBuilder("test1").setIndicesOptions(options), false);
verify(validateQuery("test1").setIndicesOptions(options), false);
verify(aliasExists("test1").setIndicesOptions(options), false);
verify(typesExists("test1").setIndicesOptions(options), false);
verify(getAliases("test1").setIndicesOptions(options), false);
verify(getFieldMapping("test1").setIndicesOptions(options), false);
verify(getMapping("test1").setIndicesOptions(options), false);
verify(getSettings("test1").setIndicesOptions(options), false);
assertAcked(prepareCreate("test1"));
options = IndicesOptions.strictExpandOpenAndForbidClosed();
verify(search("test1").setIndicesOptions(options), false);
verify(msearch(options, "test1"), false);
verify(clearCache("test1").setIndicesOptions(options), false);
verify(_flush("test1").setIndicesOptions(options),false);
verify(segments("test1").setIndicesOptions(options), false);
verify(stats("test1").setIndicesOptions(options), false);
verify(forceMerge("test1").setIndicesOptions(options), false);
verify(refreshBuilder("test1").setIndicesOptions(options), false);
verify(validateQuery("test1").setIndicesOptions(options), false);
verify(aliasExists("test1").setIndicesOptions(options), false);
verify(typesExists("test1").setIndicesOptions(options), false);
verify(getAliases("test1").setIndicesOptions(options), false);
verify(getFieldMapping("test1").setIndicesOptions(options), false);
verify(getMapping("test1").setIndicesOptions(options), false);
verify(getSettings("test1").setIndicesOptions(options), false);
}
public void testSpecifiedIndexUnavailableSnapshotRestore() throws Exception {
createIndex("test1");
ensureGreen("test1");
waitForRelocation();
AcknowledgedResponse putRepositoryResponse = client().admin().cluster().preparePutRepository("dummy-repo")
.setType("fs").setSettings(Settings.builder().put("location", randomRepoPath())).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
client().admin().cluster().prepareCreateSnapshot("dummy-repo", "snap1").setWaitForCompletion(true).get();
verify(snapshot("snap2", "test1", "test2"), true);
verify(restore("snap1", "test1", "test2"), true);
IndicesOptions options = IndicesOptions.strictExpandOpen();
verify(snapshot("snap2", "test1", "test2").setIndicesOptions(options), true);
verify(restore("snap1", "test1", "test2").setIndicesOptions(options), true);
options = IndicesOptions.lenientExpandOpen();
verify(snapshot("snap2", "test1", "test2").setIndicesOptions(options), false);
verify(restore("snap2", "test1", "test2").setIndicesOptions(options), false);
options = IndicesOptions.strictExpandOpen();
createIndex("test2");
//TODO: temporary work-around for #5531
ensureGreen("test2");
waitForRelocation();
verify(snapshot("snap3", "test1", "test2").setIndicesOptions(options), false);
verify(restore("snap3", "test1", "test2").setIndicesOptions(options), false);
}
public void testWildcardBehaviour() throws Exception {
// Verify defaults for wildcards, when specifying no indices (*, _all, /)
String[] indices = Strings.EMPTY_ARRAY;
verify(search(indices), false);
verify(msearch(null, indices), false);
verify(clearCache(indices), false);
verify(_flush(indices),false);
verify(segments(indices), false);
verify(stats(indices), false);
verify(forceMerge(indices), false);
verify(refreshBuilder(indices), false);
verify(validateQuery(indices), true);
verify(aliasExists(indices), false);
verify(typesExists(indices), false);
verify(getAliases(indices), false);
verify(getFieldMapping(indices), false);
verify(getMapping(indices), false);
verify(getSettings(indices), false);
// Now force allow_no_indices=true
IndicesOptions options = IndicesOptions.fromOptions(false, true, true, false);
verify(search(indices).setIndicesOptions(options), false);
verify(msearch(options, indices).setIndicesOptions(options), false);
verify(clearCache(indices).setIndicesOptions(options), false);
verify(_flush(indices).setIndicesOptions(options),false);
verify(segments(indices).setIndicesOptions(options), false);
verify(stats(indices).setIndicesOptions(options), false);
verify(forceMerge(indices).setIndicesOptions(options), false);
verify(refreshBuilder(indices).setIndicesOptions(options), false);
verify(validateQuery(indices).setIndicesOptions(options), false);
verify(aliasExists(indices).setIndicesOptions(options), false);
verify(typesExists(indices).setIndicesOptions(options), false);
verify(getAliases(indices).setIndicesOptions(options), false);
verify(getFieldMapping(indices).setIndicesOptions(options), false);
verify(getMapping(indices).setIndicesOptions(options), false);
verify(getSettings(indices).setIndicesOptions(options), false);
assertAcked(prepareCreate("foobar"));
client().prepareIndex("foobar", "type", "1").setSource("k", "v").setRefreshPolicy(IMMEDIATE).get();
// Verify defaults for wildcards, with one wildcard expression and one existing index
indices = new String[]{"foo*"};
verify(search(indices), false, 1);
verify(msearch(null, indices), false, 1);
verify(clearCache(indices), false);
verify(_flush(indices),false);
verify(segments(indices), false);
verify(stats(indices), false);
verify(forceMerge(indices), false);
verify(refreshBuilder(indices), false);
verify(validateQuery(indices), false);
verify(aliasExists(indices), false);
verify(typesExists(indices), false);
verify(getAliases(indices), false);
verify(getFieldMapping(indices), false);
verify(getMapping(indices), false);
verify(getSettings(indices).setIndicesOptions(options), false);
// Verify defaults for wildcards, with two wildcard expression and one existing index
indices = new String[]{"foo*", "bar*"};
verify(search(indices), false, 1);
verify(msearch(null, indices), false, 1);
verify(clearCache(indices), false);
verify(_flush(indices),false);
verify(segments(indices), false);
verify(stats(indices), false);
verify(forceMerge(indices), false);
verify(refreshBuilder(indices), false);
verify(validateQuery(indices), true);
verify(aliasExists(indices), false);
verify(typesExists(indices), false);
verify(getAliases(indices), false);
verify(getFieldMapping(indices), false);
verify(getMapping(indices), false);
verify(getSettings(indices).setIndicesOptions(options), false);
// Now force allow_no_indices=true
options = IndicesOptions.fromOptions(false, true, true, false);
verify(search(indices).setIndicesOptions(options), false, 1);
verify(msearch(options, indices).setIndicesOptions(options), false, 1);
verify(clearCache(indices).setIndicesOptions(options), false);
verify(_flush(indices).setIndicesOptions(options),false);
verify(segments(indices).setIndicesOptions(options), false);
verify(stats(indices).setIndicesOptions(options), false);
verify(forceMerge(indices).setIndicesOptions(options), false);
verify(refreshBuilder(indices).setIndicesOptions(options), false);
verify(validateQuery(indices).setIndicesOptions(options), false);
verify(aliasExists(indices).setIndicesOptions(options), false);
verify(typesExists(indices).setIndicesOptions(options), false);
verify(getAliases(indices).setIndicesOptions(options), false);
verify(getFieldMapping(indices).setIndicesOptions(options), false);
verify(getMapping(indices).setIndicesOptions(options), false);
verify(getSettings(indices).setIndicesOptions(options), false);
}
public void testWildcardBehaviourSnapshotRestore() throws Exception {
createIndex("foobar");
ensureGreen("foobar");
waitForRelocation();
AcknowledgedResponse putRepositoryResponse = client().admin().cluster().preparePutRepository("dummy-repo")
.setType("fs").setSettings(Settings.builder().put("location", randomRepoPath())).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
client().admin().cluster().prepareCreateSnapshot("dummy-repo", "snap1").setWaitForCompletion(true).get();
IndicesOptions options = IndicesOptions.fromOptions(false, false, true, false);
verify(snapshot("snap2", "foo*", "bar*").setIndicesOptions(options), true);
verify(restore("snap1", "foo*", "bar*").setIndicesOptions(options), true);
options = IndicesOptions.strictExpandOpen();
verify(snapshot("snap2", "foo*", "bar*").setIndicesOptions(options), false);
verify(restore("snap2", "foo*", "bar*").setIndicesOptions(options), false);
assertAcked(prepareCreate("barbaz"));
//TODO: temporary work-around for #5531
ensureGreen("barbaz");
waitForRelocation();
options = IndicesOptions.fromOptions(false, false, true, false);
verify(snapshot("snap3", "foo*", "bar*").setIndicesOptions(options), false);
verify(restore("snap3", "foo*", "bar*").setIndicesOptions(options), false);
options = IndicesOptions.fromOptions(false, false, true, false);
verify(snapshot("snap4", "foo*", "baz*").setIndicesOptions(options), true);
verify(restore("snap3", "foo*", "baz*").setIndicesOptions(options), true);
}
public void testAllMissingLenient() throws Exception {
createIndex("test1");
client().prepareIndex("test1", "type", "1").setSource("k", "v").setRefreshPolicy(IMMEDIATE).get();
SearchResponse response = client().prepareSearch("test2")
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setQuery(matchAllQuery())
.execute().actionGet();
assertHitCount(response, 0L);
response = client().prepareSearch("test2","test3").setQuery(matchAllQuery())
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.execute().actionGet();
assertHitCount(response, 0L);
//you should still be able to run empty searches without things blowing up
response = client().prepareSearch()
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setQuery(matchAllQuery())
.execute().actionGet();
assertHitCount(response, 1L);
}
public void testAllMissingStrict() throws Exception {
createIndex("test1");
expectThrows(IndexNotFoundException.class, () ->
client().prepareSearch("test2")
.setQuery(matchAllQuery())
.execute().actionGet());
expectThrows(IndexNotFoundException.class, () ->
client().prepareSearch("test2","test3")
.setQuery(matchAllQuery())
.execute().actionGet());
//you should still be able to run empty searches without things blowing up
client().prepareSearch().setQuery(matchAllQuery()).execute().actionGet();
}
// For now don't handle closed indices
public void testCloseApiSpecifiedIndices() throws Exception {
createIndex("test1", "test2");
ensureGreen();
verify(search("test1", "test2"), false);
assertAcked(client().admin().indices().prepareClose("test2").get());
verify(search("test1", "test2"), true);
IndicesOptions options = IndicesOptions.fromOptions(true, true, true, false, IndicesOptions.strictExpandOpenAndForbidClosed());
verify(search("test1", "test2").setIndicesOptions(options), false);
verify(search(), false);
verify(search("t*"), false);
}
public void testOpenCloseApiWildcards() throws Exception {
createIndex("foo", "foobar", "bar", "barbaz");
ensureGreen();
// if there are no indices to open/close and allow_no_indices=true (default), the open/close is a no-op
verify(client().admin().indices().prepareClose("bar*"), false);
verify(client().admin().indices().prepareClose("bar*"), false);
verify(client().admin().indices().prepareClose("foo*"), false);
verify(client().admin().indices().prepareClose("foo*"), false);
verify(client().admin().indices().prepareClose("_all"), false);
verify(client().admin().indices().prepareOpen("bar*"), false);
verify(client().admin().indices().prepareOpen("_all"), false);
verify(client().admin().indices().prepareOpen("_all"), false);
// if there are no indices to open/close throw an exception
IndicesOptions openIndicesOptions = IndicesOptions.fromOptions(false, false, false, true);
IndicesOptions closeIndicesOptions = IndicesOptions.fromOptions(false, false, true, false);
verify(client().admin().indices().prepareClose("bar*").setIndicesOptions(closeIndicesOptions), false);
verify(client().admin().indices().prepareClose("bar*").setIndicesOptions(closeIndicesOptions), true);
verify(client().admin().indices().prepareClose("foo*").setIndicesOptions(closeIndicesOptions), false);
verify(client().admin().indices().prepareClose("foo*").setIndicesOptions(closeIndicesOptions), true);
verify(client().admin().indices().prepareClose("_all").setIndicesOptions(closeIndicesOptions), true);
verify(client().admin().indices().prepareOpen("bar*").setIndicesOptions(openIndicesOptions), false);
verify(client().admin().indices().prepareOpen("_all").setIndicesOptions(openIndicesOptions), false);
verify(client().admin().indices().prepareOpen("_all").setIndicesOptions(openIndicesOptions), true);
}
public void testDeleteIndex() throws Exception {
createIndex("foobar");
verify(client().admin().indices().prepareDelete("foo"), true);
assertThat(client().admin().indices().prepareExists("foobar").get().isExists(), equalTo(true));
verify(client().admin().indices().prepareDelete("foobar"), false);
assertThat(client().admin().indices().prepareExists("foobar").get().isExists(), equalTo(false));
}
public void testDeleteIndexWildcard() throws Exception {
verify(client().admin().indices().prepareDelete("_all"), false);
createIndex("foo", "foobar", "bar", "barbaz");
verify(client().admin().indices().prepareDelete("foo*"), false);
assertThat(client().admin().indices().prepareExists("foo").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareExists("foobar").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareExists("bar").get().isExists(), equalTo(true));
assertThat(client().admin().indices().prepareExists("barbaz").get().isExists(), equalTo(true));
verify(client().admin().indices().prepareDelete("foo*"), false);
verify(client().admin().indices().prepareDelete("_all"), false);
assertThat(client().admin().indices().prepareExists("foo").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareExists("foobar").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareExists("bar").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareExists("barbaz").get().isExists(), equalTo(false));
}
public void testPutAlias() throws Exception {
createIndex("foobar");
verify(client().admin().indices().prepareAliases().addAlias("foobar", "foobar_alias"), false);
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("foobar").get().exists(), equalTo(true));
}
public void testPutAliasWildcard() throws Exception {
createIndex("foo", "foobar", "bar", "barbaz");
verify(client().admin().indices().prepareAliases().addAlias("foo*", "foobar_alias"), false);
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("foo").get().exists(), equalTo(true));
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("foobar").get().exists(), equalTo(true));
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("bar").get().exists(), equalTo(false));
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("barbaz").get().exists(), equalTo(false));
verify(client().admin().indices().prepareAliases().addAlias("*", "foobar_alias"), false);
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("foo").get().exists(), equalTo(true));
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("foobar").get().exists(), equalTo(true));
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("bar").get().exists(), equalTo(true));
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("barbaz").get().exists(), equalTo(true));
}
public void testPutMapping() throws Exception {
verify(client().admin().indices().preparePutMapping("foo").setType("type1").setSource("field", "type=text"), true);
verify(client().admin().indices().preparePutMapping("_all").setType("type1").setSource("field", "type=text"), true);
for (String index : Arrays.asList("foo", "foobar", "bar", "barbaz")) {
assertAcked(prepareCreate(index));
}
verify(client().admin().indices().preparePutMapping("foo").setType("type").setSource("field", "type=text"), false);
assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type"), notNullValue());
verify(client().admin().indices().preparePutMapping("b*").setType("type").setSource("field", "type=text"), false);
assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type"), notNullValue());
verify(client().admin().indices().preparePutMapping("_all").setType("type").setSource("field", "type=text"), false);
assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar").get("type"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type"), notNullValue());
verify(client().admin().indices().preparePutMapping().setType("type").setSource("field", "type=text"), false);
assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar").get("type"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type"), notNullValue());
verify(client().admin().indices().preparePutMapping("c*").setType("type").setSource("field", "type=text"), true);
assertAcked(client().admin().indices().prepareClose("barbaz").get());
verify(client().admin().indices().preparePutMapping("barbaz").setType("type").setSource("field", "type=text"), false);
assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type"), notNullValue());
}
public static final class TestPlugin extends Plugin {
private static final Setting<String> INDEX_A =
new Setting<>("index.a", "", Function.identity(), Property.Dynamic, Property.IndexScope);
private static final Setting<String> INDEX_C =
new Setting<>("index.c", "", Function.identity(), Property.Dynamic, Property.IndexScope);
private static final Setting<String> INDEX_E =
new Setting<>("index.e", "", Function.identity(), Property.IndexScope);
@Override
public List<Setting<?>> getSettings() {
return Arrays.asList(INDEX_A, INDEX_C, INDEX_E);
}
}
public void testUpdateSettings() throws Exception {
verify(client().admin().indices().prepareUpdateSettings("foo").setSettings(Settings.builder().put("a", "b")), true);
verify(client().admin().indices().prepareUpdateSettings("_all").setSettings(Settings.builder().put("a", "b")), true);
createIndex("foo", "foobar", "bar", "barbaz");
ensureGreen();
assertAcked(client().admin().indices().prepareClose("_all").get());
verify(client().admin().indices().prepareUpdateSettings("foo").setSettings(Settings.builder().put("a", "b")), false);
verify(client().admin().indices().prepareUpdateSettings("bar*").setSettings(Settings.builder().put("a", "b")), false);
verify(client().admin().indices().prepareUpdateSettings("_all").setSettings(Settings.builder().put("c", "d")), false);
GetSettingsResponse settingsResponse = client().admin().indices().prepareGetSettings("foo").get();
assertThat(settingsResponse.getSetting("foo", "index.a"), equalTo("b"));
settingsResponse = client().admin().indices().prepareGetSettings("bar*").get();
assertThat(settingsResponse.getSetting("bar", "index.a"), equalTo("b"));
assertThat(settingsResponse.getSetting("barbaz", "index.a"), equalTo("b"));
settingsResponse = client().admin().indices().prepareGetSettings("_all").get();
assertThat(settingsResponse.getSetting("foo", "index.c"), equalTo("d"));
assertThat(settingsResponse.getSetting("foobar", "index.c"), equalTo("d"));
assertThat(settingsResponse.getSetting("bar", "index.c"), equalTo("d"));
assertThat(settingsResponse.getSetting("barbaz", "index.c"), equalTo("d"));
assertAcked(client().admin().indices().prepareOpen("_all").get());
try {
verify(client().admin().indices().prepareUpdateSettings("barbaz").setSettings(Settings.builder().put("e", "f")), false);
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), startsWith("Can't update non dynamic settings [[index.e]] for open indices [[barbaz"));
}
verify(client().admin().indices().prepareUpdateSettings("baz*").setSettings(Settings.builder().put("a", "b")), true);
}
private static SearchRequestBuilder search(String... indices) {
return client().prepareSearch(indices).setQuery(matchAllQuery());
}
private static MultiSearchRequestBuilder msearch(IndicesOptions options, String... indices) {
MultiSearchRequestBuilder multiSearchRequestBuilder = client().prepareMultiSearch();
if (options != null) {
multiSearchRequestBuilder.setIndicesOptions(options);
}
return multiSearchRequestBuilder.add(client().prepareSearch(indices).setQuery(matchAllQuery()));
}
private static ClearIndicesCacheRequestBuilder clearCache(String... indices) {
return client().admin().indices().prepareClearCache(indices);
}
private static FlushRequestBuilder _flush(String... indices) {
return client().admin().indices().prepareFlush(indices);
}
private static IndicesSegmentsRequestBuilder segments(String... indices) {
return client().admin().indices().prepareSegments(indices);
}
private static IndicesStatsRequestBuilder stats(String... indices) {
return client().admin().indices().prepareStats(indices);
}
private static ForceMergeRequestBuilder forceMerge(String... indices) {
return client().admin().indices().prepareForceMerge(indices);
}
private static RefreshRequestBuilder refreshBuilder(String... indices) {
return client().admin().indices().prepareRefresh(indices);
}
private static ValidateQueryRequestBuilder validateQuery(String... indices) {
return client().admin().indices().prepareValidateQuery(indices);
}
private static AliasesExistRequestBuilder aliasExists(String... indices) {
return client().admin().indices().prepareAliasesExist("dummy").addIndices(indices);
}
private static TypesExistsRequestBuilder typesExists(String... indices) {
return client().admin().indices().prepareTypesExists(indices).setTypes("dummy");
}
private static GetAliasesRequestBuilder getAliases(String... indices) {
return client().admin().indices().prepareGetAliases("dummy").addIndices(indices);
}
private static GetFieldMappingsRequestBuilder getFieldMapping(String... indices) {
return client().admin().indices().prepareGetFieldMappings(indices);
}
private static GetMappingsRequestBuilder getMapping(String... indices) {
return client().admin().indices().prepareGetMappings(indices);
}
private static GetSettingsRequestBuilder getSettings(String... indices) {
return client().admin().indices().prepareGetSettings(indices);
}
private static CreateSnapshotRequestBuilder snapshot(String name, String... indices) {
return client().admin().cluster().prepareCreateSnapshot("dummy-repo", name).setWaitForCompletion(true).setIndices(indices);
}
private static RestoreSnapshotRequestBuilder restore(String name, String... indices) {
return client().admin().cluster().prepareRestoreSnapshot("dummy-repo", name)
.setRenamePattern("(.+)").setRenameReplacement("$1-copy-" + name)
.setWaitForCompletion(true)
.setIndices(indices);
}
static void verify(ActionRequestBuilder requestBuilder, boolean fail) {
verify(requestBuilder, fail, 0);
}
static void verify(ActionRequestBuilder requestBuilder, boolean fail, long expectedCount) {
if (fail) {
if (requestBuilder instanceof MultiSearchRequestBuilder) {
MultiSearchResponse multiSearchResponse = ((MultiSearchRequestBuilder) requestBuilder).get();
assertThat(multiSearchResponse.getResponses().length, equalTo(1));
assertThat(multiSearchResponse.getResponses()[0].isFailure(), is(true));
assertThat(multiSearchResponse.getResponses()[0].getResponse(), nullValue());
} else {
try {
requestBuilder.get();
fail("IndexNotFoundException or IndexClosedException was expected");
} catch (IndexNotFoundException | IndexClosedException e) {}
}
} else {
if (requestBuilder instanceof SearchRequestBuilder) {
SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) requestBuilder;
assertHitCount(searchRequestBuilder.get(), expectedCount);
} else if (requestBuilder instanceof MultiSearchRequestBuilder) {
MultiSearchResponse multiSearchResponse = ((MultiSearchRequestBuilder) requestBuilder).get();
assertThat(multiSearchResponse.getResponses().length, equalTo(1));
assertThat(multiSearchResponse.getResponses()[0].getResponse(), notNullValue());
} else {
requestBuilder.get();
}
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.personalize.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Provides a summary of the properties of a solution version. For a complete listing, call the
* <a>DescribeSolutionVersion</a> API.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/SolutionVersionSummary" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class SolutionVersionSummary implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The Amazon Resource Name (ARN) of the solution version.
* </p>
*/
private String solutionVersionArn;
/**
* <p>
* The status of the solution version.
* </p>
* <p>
* A solution version can be in one of the following states:
* </p>
* <ul>
* <li>
* <p>
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* </p>
* </li>
* </ul>
*/
private String status;
/**
* <p>
* The date and time (in Unix time) that this version of a solution was created.
* </p>
*/
private java.util.Date creationDateTime;
/**
* <p>
* The date and time (in Unix time) that the solution version was last updated.
* </p>
*/
private java.util.Date lastUpdatedDateTime;
/**
* <p>
* If a solution version fails, the reason behind the failure.
* </p>
*/
private String failureReason;
/**
* <p>
* The Amazon Resource Name (ARN) of the solution version.
* </p>
*
* @param solutionVersionArn
* The Amazon Resource Name (ARN) of the solution version.
*/
public void setSolutionVersionArn(String solutionVersionArn) {
this.solutionVersionArn = solutionVersionArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the solution version.
* </p>
*
* @return The Amazon Resource Name (ARN) of the solution version.
*/
public String getSolutionVersionArn() {
return this.solutionVersionArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the solution version.
* </p>
*
* @param solutionVersionArn
* The Amazon Resource Name (ARN) of the solution version.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SolutionVersionSummary withSolutionVersionArn(String solutionVersionArn) {
setSolutionVersionArn(solutionVersionArn);
return this;
}
/**
* <p>
* The status of the solution version.
* </p>
* <p>
* A solution version can be in one of the following states:
* </p>
* <ul>
* <li>
* <p>
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* </p>
* </li>
* </ul>
*
* @param status
* The status of the solution version.</p>
* <p>
* A solution version can be in one of the following states:
* </p>
* <ul>
* <li>
* <p>
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* </p>
* </li>
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* The status of the solution version.
* </p>
* <p>
* A solution version can be in one of the following states:
* </p>
* <ul>
* <li>
* <p>
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* </p>
* </li>
* </ul>
*
* @return The status of the solution version.</p>
* <p>
* A solution version can be in one of the following states:
* </p>
* <ul>
* <li>
* <p>
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* </p>
* </li>
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* The status of the solution version.
* </p>
* <p>
* A solution version can be in one of the following states:
* </p>
* <ul>
* <li>
* <p>
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* </p>
* </li>
* </ul>
*
* @param status
* The status of the solution version.</p>
* <p>
* A solution version can be in one of the following states:
* </p>
* <ul>
* <li>
* <p>
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SolutionVersionSummary withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* The date and time (in Unix time) that this version of a solution was created.
* </p>
*
* @param creationDateTime
* The date and time (in Unix time) that this version of a solution was created.
*/
public void setCreationDateTime(java.util.Date creationDateTime) {
this.creationDateTime = creationDateTime;
}
/**
* <p>
* The date and time (in Unix time) that this version of a solution was created.
* </p>
*
* @return The date and time (in Unix time) that this version of a solution was created.
*/
public java.util.Date getCreationDateTime() {
return this.creationDateTime;
}
/**
* <p>
* The date and time (in Unix time) that this version of a solution was created.
* </p>
*
* @param creationDateTime
* The date and time (in Unix time) that this version of a solution was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SolutionVersionSummary withCreationDateTime(java.util.Date creationDateTime) {
setCreationDateTime(creationDateTime);
return this;
}
/**
* <p>
* The date and time (in Unix time) that the solution version was last updated.
* </p>
*
* @param lastUpdatedDateTime
* The date and time (in Unix time) that the solution version was last updated.
*/
public void setLastUpdatedDateTime(java.util.Date lastUpdatedDateTime) {
this.lastUpdatedDateTime = lastUpdatedDateTime;
}
/**
* <p>
* The date and time (in Unix time) that the solution version was last updated.
* </p>
*
* @return The date and time (in Unix time) that the solution version was last updated.
*/
public java.util.Date getLastUpdatedDateTime() {
return this.lastUpdatedDateTime;
}
/**
* <p>
* The date and time (in Unix time) that the solution version was last updated.
* </p>
*
* @param lastUpdatedDateTime
* The date and time (in Unix time) that the solution version was last updated.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SolutionVersionSummary withLastUpdatedDateTime(java.util.Date lastUpdatedDateTime) {
setLastUpdatedDateTime(lastUpdatedDateTime);
return this;
}
/**
* <p>
* If a solution version fails, the reason behind the failure.
* </p>
*
* @param failureReason
* If a solution version fails, the reason behind the failure.
*/
public void setFailureReason(String failureReason) {
this.failureReason = failureReason;
}
/**
* <p>
* If a solution version fails, the reason behind the failure.
* </p>
*
* @return If a solution version fails, the reason behind the failure.
*/
public String getFailureReason() {
return this.failureReason;
}
/**
* <p>
* If a solution version fails, the reason behind the failure.
* </p>
*
* @param failureReason
* If a solution version fails, the reason behind the failure.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SolutionVersionSummary withFailureReason(String failureReason) {
setFailureReason(failureReason);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSolutionVersionArn() != null)
sb.append("SolutionVersionArn: ").append(getSolutionVersionArn()).append(",");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getCreationDateTime() != null)
sb.append("CreationDateTime: ").append(getCreationDateTime()).append(",");
if (getLastUpdatedDateTime() != null)
sb.append("LastUpdatedDateTime: ").append(getLastUpdatedDateTime()).append(",");
if (getFailureReason() != null)
sb.append("FailureReason: ").append(getFailureReason());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SolutionVersionSummary == false)
return false;
SolutionVersionSummary other = (SolutionVersionSummary) obj;
if (other.getSolutionVersionArn() == null ^ this.getSolutionVersionArn() == null)
return false;
if (other.getSolutionVersionArn() != null && other.getSolutionVersionArn().equals(this.getSolutionVersionArn()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getCreationDateTime() == null ^ this.getCreationDateTime() == null)
return false;
if (other.getCreationDateTime() != null && other.getCreationDateTime().equals(this.getCreationDateTime()) == false)
return false;
if (other.getLastUpdatedDateTime() == null ^ this.getLastUpdatedDateTime() == null)
return false;
if (other.getLastUpdatedDateTime() != null && other.getLastUpdatedDateTime().equals(this.getLastUpdatedDateTime()) == false)
return false;
if (other.getFailureReason() == null ^ this.getFailureReason() == null)
return false;
if (other.getFailureReason() != null && other.getFailureReason().equals(this.getFailureReason()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSolutionVersionArn() == null) ? 0 : getSolutionVersionArn().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getCreationDateTime() == null) ? 0 : getCreationDateTime().hashCode());
hashCode = prime * hashCode + ((getLastUpdatedDateTime() == null) ? 0 : getLastUpdatedDateTime().hashCode());
hashCode = prime * hashCode + ((getFailureReason() == null) ? 0 : getFailureReason().hashCode());
return hashCode;
}
@Override
public SolutionVersionSummary clone() {
try {
return (SolutionVersionSummary) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.personalize.model.transform.SolutionVersionSummaryMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.dmn.engine.impl;
import java.util.List;
import java.util.Set;
import org.flowable.common.engine.api.FlowableIllegalArgumentException;
import org.flowable.common.engine.api.query.CacheAwareQuery;
import org.flowable.common.engine.impl.interceptor.CommandContext;
import org.flowable.common.engine.impl.interceptor.CommandExecutor;
import org.flowable.common.engine.impl.query.AbstractQuery;
import org.flowable.dmn.api.DmnHistoricDecisionExecution;
import org.flowable.dmn.api.DmnHistoricDecisionExecutionQuery;
import org.flowable.dmn.engine.impl.persistence.entity.HistoricDecisionExecutionEntity;
import org.flowable.dmn.engine.impl.util.CommandContextUtil;
/**
* @author Tijs Rademakers
*/
public class HistoricDecisionExecutionQueryImpl extends AbstractQuery<DmnHistoricDecisionExecutionQuery, DmnHistoricDecisionExecution>
implements DmnHistoricDecisionExecutionQuery, CacheAwareQuery<HistoricDecisionExecutionEntity> {
private static final long serialVersionUID = 1L;
protected String id;
protected Set<String> ids;
protected String decisionDefinitionId;
protected String deploymentId;
protected String decisionKey;
protected String instanceId;
protected String executionId;
protected String activityId;
protected String scopeType;
protected String processInstanceIdWithChildren;
protected String caseInstanceIdWithChildren;
protected Boolean failed;
protected String tenantId;
protected String tenantIdLike;
protected boolean withoutTenantId;
public HistoricDecisionExecutionQueryImpl() {
}
public HistoricDecisionExecutionQueryImpl(CommandContext commandContext) {
super(commandContext);
}
public HistoricDecisionExecutionQueryImpl(CommandExecutor commandExecutor) {
super(commandExecutor);
}
@Override
public DmnHistoricDecisionExecutionQuery id(String id) {
this.id = id;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery ids(Set<String> ids) {
this.ids = ids;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery decisionDefinitionId(String decisionDefinitionId) {
if (decisionDefinitionId == null) {
throw new FlowableIllegalArgumentException("decisionDefinitionId is null");
}
this.decisionDefinitionId = decisionDefinitionId;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery deploymentId(String deploymentId) {
if (deploymentId == null) {
throw new FlowableIllegalArgumentException("deploymentId is null");
}
this.deploymentId = deploymentId;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery decisionKey(String decisionKey) {
if (decisionKey == null) {
throw new FlowableIllegalArgumentException("decisionKey is null");
}
this.decisionKey = decisionKey;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery instanceId(String instanceId) {
if (instanceId == null) {
throw new FlowableIllegalArgumentException("instanceId is null");
}
this.instanceId = instanceId;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery executionId(String executionId) {
if (executionId == null) {
throw new FlowableIllegalArgumentException("executionId is null");
}
this.executionId = executionId;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery activityId(String activityId) {
if (activityId == null) {
throw new FlowableIllegalArgumentException("activityId is null");
}
this.activityId = activityId;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery scopeType(String scopeType) {
if (scopeType == null) {
throw new FlowableIllegalArgumentException("scopeType is null");
}
this.scopeType = scopeType;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery processInstanceIdWithChildren(String processInstanceId) {
this.processInstanceIdWithChildren = processInstanceId;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery caseInstanceIdWithChildren(String caseInstanceId) {
this.caseInstanceIdWithChildren = caseInstanceId;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery failed(Boolean failed) {
if (failed == null) {
throw new FlowableIllegalArgumentException("failed is null");
}
this.failed = failed;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery tenantId(String tenantId) {
if (tenantId == null) {
throw new FlowableIllegalArgumentException("tenantId is null");
}
this.tenantId = tenantId;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery tenantIdLike(String tenantIdLike) {
if (tenantIdLike == null) {
throw new FlowableIllegalArgumentException("tenantId is null");
}
this.tenantIdLike = tenantIdLike;
return this;
}
@Override
public DmnHistoricDecisionExecutionQuery withoutTenantId() {
this.withoutTenantId = true;
return this;
}
// sorting ////////////////////////////////////////////
@Override
public DmnHistoricDecisionExecutionQuery orderByStartTime() {
return orderBy(HistoricDecisionExecutionQueryProperty.START_TIME);
}
@Override
public DmnHistoricDecisionExecutionQuery orderByEndTime() {
return orderBy(HistoricDecisionExecutionQueryProperty.END_TIME);
}
@Override
public DmnHistoricDecisionExecutionQuery orderByTenantId() {
return orderBy(HistoricDecisionExecutionQueryProperty.TENANT_ID);
}
// results ////////////////////////////////////////////
@Override
public long executeCount(CommandContext commandContext) {
return CommandContextUtil.getHistoricDecisionExecutionEntityManager().findHistoricDecisionExecutionCountByQueryCriteria(this);
}
@Override
public List<DmnHistoricDecisionExecution> executeList(CommandContext commandContext) {
return CommandContextUtil.getHistoricDecisionExecutionEntityManager().findHistoricDecisionExecutionsByQueryCriteria(this);
}
// getters ////////////////////////////////////////////
@Override
public String getId() {
return id;
}
public Set<String> getIds() {
return ids;
}
public String getDecisionDefinitionId() {
return decisionDefinitionId;
}
public String getDeploymentId() {
return deploymentId;
}
public String getDecisionKey() {
return decisionKey;
}
public String getInstanceId() {
return instanceId;
}
public String getExecutionId() {
return executionId;
}
public String getActivityId() {
return activityId;
}
public String getScopeType() {
return scopeType;
}
public String getProcessInstanceIdWithChildren() {
return processInstanceIdWithChildren;
}
public String getCaseInstanceIdWithChildren() {
return caseInstanceIdWithChildren;
}
public Boolean getFailed() {
return failed;
}
public String getTenantId() {
return tenantId;
}
public String getTenantIdLike() {
return tenantIdLike;
}
public boolean isWithoutTenantId() {
return withoutTenantId;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.util;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.IdentityHashMap;
import java.util.Locale;
import java.util.Map;
/**
* Estimates the size (memory representation) of Java objects.
* <p>
* This class uses assumptions that were discovered for the Hotspot
* virtual machine. If you use a non-OpenJDK/Oracle-based JVM,
* the measurements may be slightly wrong.
*
* @see #shallowSizeOf(Object)
* @see #shallowSizeOfInstance(Class)
*
* @lucene.internal
*/
public final class RamUsageEstimator {
/** One kilobyte bytes. */
public static final long ONE_KB = 1024;
/** One megabyte bytes. */
public static final long ONE_MB = ONE_KB * ONE_KB;
/** One gigabyte bytes.*/
public static final long ONE_GB = ONE_KB * ONE_MB;
/** No instantiation. */
private RamUsageEstimator() {}
/**
* Number of bytes used to represent a {@code boolean} in binary form
* @deprecated use {@code 1} instead.
*/
@Deprecated
public final static int NUM_BYTES_BOOLEAN = 1;
/**
* Number of bytes used to represent a {@code byte} in binary form
* @deprecated use {@code 1} instead.
*/
@Deprecated
public final static int NUM_BYTES_BYTE = 1;
/**
* Number of bytes used to represent a {@code char} in binary form
*/
@Deprecated
public final static int NUM_BYTES_CHAR = 2;
/**
* Number of bytes used to represent a {@code short} in binary form
*/
@Deprecated
public final static int NUM_BYTES_SHORT = 2;
/**
* Number of bytes used to represent an {@code int} in binary form
*/
@Deprecated
public final static int NUM_BYTES_INT = 4;
/**
* Number of bytes used to represent a {@code float} in binary form
*/
@Deprecated
public final static int NUM_BYTES_FLOAT = 4;
/**
* Number of bytes used to represent a {@code long} in binary form
*/
@Deprecated
public final static int NUM_BYTES_LONG = 8;
/**
* Number of bytes used to represent a {@code double} in binary form
*/
@Deprecated
public final static int NUM_BYTES_DOUBLE = 8;
/**
* True, iff compressed references (oops) are enabled by this JVM
*/
public final static boolean COMPRESSED_REFS_ENABLED;
/**
* Number of bytes this JVM uses to represent an object reference.
*/
public final static int NUM_BYTES_OBJECT_REF;
/**
* Number of bytes to represent an object header (no fields, no alignments).
*/
public final static int NUM_BYTES_OBJECT_HEADER;
/**
* Number of bytes to represent an array header (no content, but with alignments).
*/
public final static int NUM_BYTES_ARRAY_HEADER;
/**
* A constant specifying the object alignment boundary inside the JVM. Objects will
* always take a full multiple of this constant, possibly wasting some space.
*/
public final static int NUM_BYTES_OBJECT_ALIGNMENT;
/**
* Sizes of primitive classes.
*/
private static final Map<Class<?>,Integer> primitiveSizes = new IdentityHashMap<>();
static {
primitiveSizes.put(boolean.class, 1);
primitiveSizes.put(byte.class, 1);
primitiveSizes.put(char.class, 2);
primitiveSizes.put(short.class, 2);
primitiveSizes.put(int.class, 4);
primitiveSizes.put(float.class, 4);
primitiveSizes.put(double.class, 8);
primitiveSizes.put(long.class, 8);
}
/**
* JVMs typically cache small longs. This tries to find out what the range is.
*/
static final long LONG_CACHE_MIN_VALUE, LONG_CACHE_MAX_VALUE;
static final int LONG_SIZE;
/** For testing only */
static final boolean JVM_IS_HOTSPOT_64BIT;
static final String MANAGEMENT_FACTORY_CLASS = "java.lang.management.ManagementFactory";
static final String HOTSPOT_BEAN_CLASS = "com.sun.management.HotSpotDiagnosticMXBean";
/**
* Initialize constants and try to collect information about the JVM internals.
*/
static {
if (Constants.JRE_IS_64BIT) {
// Try to get compressed oops and object alignment (the default seems to be 8 on Hotspot);
// (this only works on 64 bit, on 32 bits the alignment and reference size is fixed):
boolean compressedOops = false;
int objectAlignment = 8;
boolean isHotspot = false;
/**
* I'm just going to comment this bit out. Doesn't look too important!
* This is because we can't use ManagementFactory on Google App Engine
try {
final Class<?> beanClazz = Class.forName(HOTSPOT_BEAN_CLASS);
// we use reflection for this, because the management factory is not part
// of Java 8's compact profile:
final Object hotSpotBean = Class.forName(MANAGEMENT_FACTORY_CLASS)
.getMethod("getPlatformMXBean", Class.class)
.invoke(null, beanClazz);
if (hotSpotBean != null) {
isHotspot = true;
final Method getVMOptionMethod = beanClazz.getMethod("getVMOption", String.class);
try {
final Object vmOption = getVMOptionMethod.invoke(hotSpotBean, "UseCompressedOops");
compressedOops = Boolean.parseBoolean(
vmOption.getClass().getMethod("getValue").invoke(vmOption).toString()
);
} catch (ReflectiveOperationException | RuntimeException e) {
isHotspot = false;
}
try {
final Object vmOption = getVMOptionMethod.invoke(hotSpotBean, "ObjectAlignmentInBytes");
objectAlignment = Integer.parseInt(
vmOption.getClass().getMethod("getValue").invoke(vmOption).toString()
);
} catch (ReflectiveOperationException | RuntimeException e) {
isHotspot = false;
}
}
} catch (ReflectiveOperationException | RuntimeException e) {
isHotspot = false;
}
*/
JVM_IS_HOTSPOT_64BIT = isHotspot;
COMPRESSED_REFS_ENABLED = compressedOops;
NUM_BYTES_OBJECT_ALIGNMENT = objectAlignment;
// reference size is 4, if we have compressed oops:
NUM_BYTES_OBJECT_REF = COMPRESSED_REFS_ENABLED ? 4 : 8;
// "best guess" based on reference size:
NUM_BYTES_OBJECT_HEADER = 8 + NUM_BYTES_OBJECT_REF;
// array header is NUM_BYTES_OBJECT_HEADER + NUM_BYTES_INT, but aligned (object alignment):
NUM_BYTES_ARRAY_HEADER = (int) alignObjectSize(NUM_BYTES_OBJECT_HEADER + 4);
} else {
JVM_IS_HOTSPOT_64BIT = false;
COMPRESSED_REFS_ENABLED = false;
NUM_BYTES_OBJECT_ALIGNMENT = 8;
NUM_BYTES_OBJECT_REF = 4;
NUM_BYTES_OBJECT_HEADER = 8;
// For 32 bit JVMs, no extra alignment of array header:
NUM_BYTES_ARRAY_HEADER = NUM_BYTES_OBJECT_HEADER + 4;
}
// get min/max value of cached Long class instances:
long longCacheMinValue = 0;
while (longCacheMinValue > Long.MIN_VALUE
&& Long.valueOf(longCacheMinValue - 1) == Long.valueOf(longCacheMinValue - 1)) {
longCacheMinValue -= 1;
}
long longCacheMaxValue = -1;
while (longCacheMaxValue < Long.MAX_VALUE
&& Long.valueOf(longCacheMaxValue + 1) == Long.valueOf(longCacheMaxValue + 1)) {
longCacheMaxValue += 1;
}
LONG_CACHE_MIN_VALUE = longCacheMinValue;
LONG_CACHE_MAX_VALUE = longCacheMaxValue;
LONG_SIZE = (int) shallowSizeOfInstance(Long.class);
}
/**
* Aligns an object size to be the next multiple of {@link #NUM_BYTES_OBJECT_ALIGNMENT}.
*/
public static long alignObjectSize(long size) {
size += (long) NUM_BYTES_OBJECT_ALIGNMENT - 1L;
return size - (size % NUM_BYTES_OBJECT_ALIGNMENT);
}
/**
* Return the size of the provided {@link Long} object, returning 0 if it is
* cached by the JVM and its shallow size otherwise.
*/
public static long sizeOf(Long value) {
if (value >= LONG_CACHE_MIN_VALUE && value <= LONG_CACHE_MAX_VALUE) {
return 0;
}
return LONG_SIZE;
}
/** Returns the size in bytes of the byte[] object. */
public static long sizeOf(byte[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + arr.length);
}
/** Returns the size in bytes of the boolean[] object. */
public static long sizeOf(boolean[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + arr.length);
}
/** Returns the size in bytes of the char[] object. */
public static long sizeOf(char[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) 2 * arr.length);
}
/** Returns the size in bytes of the short[] object. */
public static long sizeOf(short[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) 2 * arr.length);
}
/** Returns the size in bytes of the int[] object. */
public static long sizeOf(int[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) 4 * arr.length);
}
/** Returns the size in bytes of the float[] object. */
public static long sizeOf(float[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) 4 * arr.length);
}
/** Returns the size in bytes of the long[] object. */
public static long sizeOf(long[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) 8 * arr.length);
}
/** Returns the size in bytes of the double[] object. */
public static long sizeOf(double[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) 8 * arr.length);
}
/** Returns the shallow size in bytes of the Object[] object. */
// Use this method instead of #shallowSizeOf(Object) to avoid costly reflection
public static long shallowSizeOf(Object[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_OBJECT_REF * arr.length);
}
/**
* Estimates a "shallow" memory usage of the given object. For arrays, this will be the
* memory taken by array storage (no subreferences will be followed). For objects, this
* will be the memory taken by the fields.
*
* JVM object alignments are also applied.
*/
public static long shallowSizeOf(Object obj) {
if (obj == null) return 0;
final Class<?> clz = obj.getClass();
if (clz.isArray()) {
return shallowSizeOfArray(obj);
} else {
return shallowSizeOfInstance(clz);
}
}
/**
* Returns the shallow instance size in bytes an instance of the given class would occupy.
* This works with all conventional classes and primitive types, but not with arrays
* (the size then depends on the number of elements and varies from object to object).
*
* @see #shallowSizeOf(Object)
* @throws IllegalArgumentException if {@code clazz} is an array class.
*/
public static long shallowSizeOfInstance(Class<?> clazz) {
if (clazz.isArray())
throw new IllegalArgumentException("This method does not work with array classes.");
if (clazz.isPrimitive())
return primitiveSizes.get(clazz);
long size = NUM_BYTES_OBJECT_HEADER;
// Walk type hierarchy
for (;clazz != null; clazz = clazz.getSuperclass()) {
final Class<?> target = clazz;
final Field[] fields = AccessController.doPrivileged(new PrivilegedAction<Field[]>() {
@Override
public Field[] run() {
return target.getDeclaredFields();
}
});
for (Field f : fields) {
if (!Modifier.isStatic(f.getModifiers())) {
size = adjustForField(size, f);
}
}
}
return alignObjectSize(size);
}
/**
* Return shallow size of any <code>array</code>.
*/
private static long shallowSizeOfArray(Object array) {
long size = NUM_BYTES_ARRAY_HEADER;
final int len = Array.getLength(array);
if (len > 0) {
Class<?> arrayElementClazz = array.getClass().getComponentType();
if (arrayElementClazz.isPrimitive()) {
size += (long) len * primitiveSizes.get(arrayElementClazz);
} else {
size += (long) NUM_BYTES_OBJECT_REF * len;
}
}
return alignObjectSize(size);
}
/**
* This method returns the maximum representation size of an object. <code>sizeSoFar</code>
* is the object's size measured so far. <code>f</code> is the field being probed.
*
* <p>The returned offset will be the maximum of whatever was measured so far and
* <code>f</code> field's offset and representation size (unaligned).
*/
static long adjustForField(long sizeSoFar, final Field f) {
final Class<?> type = f.getType();
final int fsize = type.isPrimitive() ? primitiveSizes.get(type) : NUM_BYTES_OBJECT_REF;
// TODO: No alignments based on field type/ subclass fields alignments?
return sizeSoFar + fsize;
}
/**
* Returns <code>size</code> in human-readable units (GB, MB, KB or bytes).
*/
public static String humanReadableUnits(long bytes) {
return humanReadableUnits(bytes,
new DecimalFormat("0.#", DecimalFormatSymbols.getInstance(Locale.ROOT)));
}
/**
* Returns <code>size</code> in human-readable units (GB, MB, KB or bytes).
*/
public static String humanReadableUnits(long bytes, DecimalFormat df) {
if (bytes / ONE_GB > 0) {
return df.format((float) bytes / ONE_GB) + " GB";
} else if (bytes / ONE_MB > 0) {
return df.format((float) bytes / ONE_MB) + " MB";
} else if (bytes / ONE_KB > 0) {
return df.format((float) bytes / ONE_KB) + " KB";
} else {
return bytes + " bytes";
}
}
/**
* Return the size of the provided array of {@link Accountable}s by summing
* up the shallow size of the array and the
* {@link Accountable#ramBytesUsed() memory usage} reported by each
* {@link Accountable}.
*/
public static long sizeOf(Accountable[] accountables) {
long size = shallowSizeOf(accountables);
for (Accountable accountable : accountables) {
if (accountable != null) {
size += accountable.ramBytesUsed();
}
}
return size;
}
}
| |
/**
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2011 Eric Haddad Koenig
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.all.ultrapeer.services;
import static com.all.shared.messages.MessEngineConstants.FRIENDSHIP_REQUEST_RESULT_TYPE;
import static com.all.shared.messages.MessEngineConstants.FRIENDSHIP_REQUEST_TYPE;
import static com.all.shared.messages.MessEngineConstants.FRIENDSHIP_RESPONSE_TYPE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.Spy;
import com.all.messengine.Message;
import com.all.messengine.impl.StubMessEngine;
import com.all.shared.messages.FriendshipRequestStatus;
import com.all.shared.model.AllMessage;
import com.all.shared.model.ContactInfo;
import com.all.shared.model.ContactRequest;
import com.all.ultrapeer.util.AllServerProxy;
public class TestFriendshipService {
@SuppressWarnings("deprecation")
@InjectMocks
private FriendshipService service = new FriendshipService();
@Spy
private StubMessEngine testEngine = new StubMessEngine();
@Mock
private AllServerProxy userBackend;
@Mock
private ContactInfo requester;
@Mock
private ContactInfo requested;
private ContactRequest contactRequest;
private long requesterId;
private long requestedId;
@Before
public void init() throws Exception {
MockitoAnnotations.initMocks(this);
testEngine.setup(service);
contactRequest = new ContactRequest(requester, requested);
contactRequest.setId(100L);
requesterId = 1L;
when(requester.getId()).thenReturn(requesterId);
requestedId = 2L;
when(requested.getId()).thenReturn(requestedId);
}
@Test
public void shouldProcessFriendshipRequest() throws Exception {
long contactRequestId = 5L;
when(userBackend.getForObject(anyString(), eq(String.class), eq(requesterId), eq(requestedId))).thenReturn(
"" + contactRequestId);
testEngine.send(new AllMessage<ContactRequest>(FRIENDSHIP_REQUEST_TYPE, contactRequest));
verify(userBackend).getForObject(anyString(), eq(String.class), eq(requesterId), eq(requestedId));
assertNotNull(testEngine.getMessage(FRIENDSHIP_REQUEST_TYPE));
assertNotNull(testEngine.getMessage(FRIENDSHIP_REQUEST_RESULT_TYPE));
}
@Test
public void shouldCreateFriendshipResponseAlertInCaseOfReciprocalInvitations() throws Exception {
contactRequest.setId(null);
when(userBackend.getForObject(anyString(), eq(String.class), eq(requesterId), eq(requestedId))).thenReturn(
FriendshipRequestStatus.RECIPROCAL_INVITATION.toString());
testEngine.send(new AllMessage<ContactRequest>(FRIENDSHIP_REQUEST_TYPE, contactRequest));
verify(userBackend).getForObject(anyString(), eq(String.class), eq(requesterId), eq(requestedId));
Message<?> responseMessage = testEngine.getMessage(FRIENDSHIP_REQUEST_RESULT_TYPE);
assertNotNull(responseMessage);
assertEquals(FriendshipRequestStatus.RECIPROCAL_INVITATION.toString(), responseMessage.getBody());
// 4 Messages: The request, 2 FriendshipResponseAlerts and the friendship request result response.
assertEquals(2, testEngine.sentMessagesCount());
}
@Test
public void shouldCreateFriendshipResponseAlertInCaseOfRequesterInvitation() throws Exception {
contactRequest.setId(null);
when(userBackend.getForObject(anyString(), eq(String.class), eq(requesterId), eq(requestedId))).thenReturn(
FriendshipRequestStatus.REACTIVATED_FRIENDSHIP.toString());
testEngine.send(new AllMessage<ContactRequest>(FRIENDSHIP_REQUEST_TYPE, contactRequest));
verify(userBackend).getForObject(anyString(), eq(String.class), eq(requesterId), eq(requestedId));
Message<?> responseMessage = testEngine.getMessage(FRIENDSHIP_REQUEST_RESULT_TYPE);
assertNotNull(responseMessage);
assertEquals(FriendshipRequestStatus.REACTIVATED_FRIENDSHIP.toString(), responseMessage.getBody());
// 3 Messages: The request, 2 FriendshipResponseAlerts and the friendship request result response.
assertEquals(2, testEngine.sentMessagesCount());
}
@Test
public void shouldProcessFriendshipResponse() throws Exception {
contactRequest.accept();
testEngine.send(new AllMessage<ContactRequest>(FRIENDSHIP_RESPONSE_TYPE, contactRequest));
Thread.sleep(300);
verify(userBackend).put(anyString(), eq("true"), eq(contactRequest.getId()));
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.fileEditor.impl;
import com.intellij.ide.ui.UISettings;
import com.intellij.ide.ui.UISettingsListener;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.AbstractProjectComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.*;
import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.project.DumbAwareRunnable;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.InvalidDataException;
import com.intellij.openapi.util.JDOMExternalizable;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.util.ArrayUtilRt;
import com.intellij.util.messages.MessageBusConnection;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
public final class EditorHistoryManager extends AbstractProjectComponent implements JDOMExternalizable {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.fileEditor.impl.EditorHistoryManager");
private Element myElement;
public static EditorHistoryManager getInstance(final Project project){
return project.getComponent(EditorHistoryManager.class);
}
/**
* State corresponding to the most recent file is the last
*/
private final ArrayList<HistoryEntry> myEntriesList = new ArrayList<HistoryEntry>();
/** Invoked by reflection */
EditorHistoryManager(final Project project, final UISettings uiSettings){
super(project);
uiSettings.addUISettingsListener(new MyUISettingsListener(), project);
}
public void projectOpened(){
MessageBusConnection connection = myProject.getMessageBus().connect();
connection.subscribe(FileEditorManagerListener.Before.FILE_EDITOR_MANAGER, new MyEditorManagerBeforeListener());
connection.subscribe(FileEditorManagerListener.FILE_EDITOR_MANAGER, new MyEditorManagerListener());
StartupManager.getInstance(myProject).runWhenProjectIsInitialized(
new DumbAwareRunnable() {
public void run() {
// myElement may be null if node that corresponds to this manager does not exist
if (myElement != null) {
final List children = myElement.getChildren(HistoryEntry.TAG);
myElement = null;
//noinspection unchecked
for (final Element e : (Iterable<Element>)children) {
try {
myEntriesList.add(new HistoryEntry(myProject, e));
}
catch (InvalidDataException e1) {
// OK here
}
catch (ProcessCanceledException e1) {
// OK here
}
catch (Exception anyException) {
LOG.error(anyException);
}
}
trimToSize();
}
}
}
);
}
@NotNull
public String getComponentName(){
return "editorHistoryManager";
}
private void fileOpenedImpl(@NotNull final VirtualFile file) {
fileOpenedImpl(file, null, null);
}
/**
* Makes file most recent one
*/
private void fileOpenedImpl(@NotNull final VirtualFile file,
@Nullable final FileEditor fallbackEditor,
@Nullable FileEditorProvider fallbackProvider)
{
ApplicationManager.getApplication().assertIsDispatchThread();
// don't add files that cannot be found via VFM (light & etc.)
if (VirtualFileManager.getInstance().findFileByUrl(file.getUrl()) == null) return;
final FileEditorManagerEx editorManager = FileEditorManagerEx.getInstanceEx(myProject);
final Pair<FileEditor[], FileEditorProvider[]> editorsWithProviders = editorManager.getEditorsWithProviders(file);
FileEditor[] editors = editorsWithProviders.getFirst();
FileEditorProvider[] oldProviders = editorsWithProviders.getSecond();
if (editors.length <= 0 && fallbackEditor != null) {
editors = new FileEditor[] { fallbackEditor };
}
if (oldProviders.length <= 0 && fallbackProvider != null) {
oldProviders = new FileEditorProvider[] { fallbackProvider };
}
if (editors.length <= 0) {
LOG.error("No editors for file " + file.getPresentableUrl());
}
FileEditor selectedEditor = editorManager.getSelectedEditor(file);
if (selectedEditor == null) {
selectedEditor = fallbackEditor;
}
LOG.assertTrue(selectedEditor != null);
final int selectedProviderIndex = ArrayUtilRt.find(editors, selectedEditor);
LOG.assertTrue(selectedProviderIndex != -1);
final HistoryEntry entry = getEntry(file);
if(entry != null){
myEntriesList.remove(entry);
myEntriesList.add(entry);
}
else {
final FileEditorState[] states=new FileEditorState[editors.length];
final FileEditorProvider[] providers=new FileEditorProvider[editors.length];
for (int i = states.length - 1; i >= 0; i--) {
final FileEditorProvider provider = oldProviders [i];
LOG.assertTrue(provider != null);
providers[i] = provider;
states[i] = editors[i].getState(FileEditorStateLevel.FULL);
}
myEntriesList.add(new HistoryEntry(file, providers, states, providers[selectedProviderIndex]));
trimToSize();
}
}
public void updateHistoryEntry(@Nullable final VirtualFile file, final boolean changeEntryOrderOnly) {
updateHistoryEntry(file, null, null, changeEntryOrderOnly);
}
private void updateHistoryEntry(@Nullable final VirtualFile file,
@Nullable final FileEditor fallbackEditor,
@Nullable FileEditorProvider fallbackProvider,
final boolean changeEntryOrderOnly)
{
if (file == null) {
return;
}
final FileEditorManagerEx editorManager = FileEditorManagerEx.getInstanceEx(myProject);
final Pair<FileEditor[], FileEditorProvider[]> editorsWithProviders = editorManager.getEditorsWithProviders(file);
FileEditor[] editors = editorsWithProviders.getFirst();
FileEditorProvider[] providers = editorsWithProviders.getSecond();
if (editors.length <= 0 && fallbackEditor != null) {
editors = new FileEditor[] {fallbackEditor};
providers = new FileEditorProvider[] {fallbackProvider};
}
if (editors.length == 0) {
// obviously not opened in any editor at the moment,
// makes no sense to put the file in the history
return;
}
final HistoryEntry entry = getEntry(file);
if(entry == null){
// Size of entry list can be less than number of opened editors (some entries can be removed)
if (file.isValid()) {
// the file could have been deleted, so the isValid() check is essential
fileOpenedImpl(file, fallbackEditor, fallbackProvider);
}
return;
}
if (!changeEntryOrderOnly) { // update entry state
//LOG.assertTrue(editors.length > 0);
for (int i = editors.length - 1; i >= 0; i--) {
final FileEditor editor = editors [i];
final FileEditorProvider provider = providers [i];
if (!editor.isValid()) {
// this can happen for example if file extension was changed
// and this method was called during correponding myEditor close up
continue;
}
final FileEditorState oldState = entry.getState(provider);
final FileEditorState newState = editor.getState(FileEditorStateLevel.FULL);
if (!newState.equals(oldState)) {
entry.putState(provider, newState);
}
}
}
final Pair <FileEditor, FileEditorProvider> selectedEditorWithProvider = editorManager.getSelectedEditorWithProvider(file);
if (selectedEditorWithProvider != null) {
//LOG.assertTrue(selectedEditorWithProvider != null);
entry.mySelectedProvider = selectedEditorWithProvider.getSecond ();
LOG.assertTrue(entry.mySelectedProvider != null);
if(changeEntryOrderOnly){
myEntriesList.remove(entry);
myEntriesList.add(entry);
}
}
}
/**
* Removes all entries that correspond to invalid files
*/
private void validateEntries(){
for(int i=myEntriesList.size()-1; i>=0; i--){
final HistoryEntry entry = myEntriesList.get(i);
if(!entry.myFile.isValid()){
myEntriesList.remove(i);
}
}
}
/**
* @return array of valid files that are in the history. The greater is index the more recent the file is.
*/
public VirtualFile[] getFiles(){
validateEntries();
final VirtualFile[] result = new VirtualFile[myEntriesList.size()];
for(int i=myEntriesList.size()-1; i>=0 ;i--){
result[i] = myEntriesList.get(i).myFile;
}
return result;
}
public boolean hasBeenOpen(@NotNull VirtualFile f) {
for (HistoryEntry each : myEntriesList) {
if (Comparing.equal(each.myFile, f)) return true;
}
return false;
}
/**
* Removes specified <code>file</code> from history. The method does
* nothing if <code>file</code> is not in the history.
*
* @exception java.lang.IllegalArgumentException if <code>file</code>
* is <code>null</code>
*/
public void removeFile(@NotNull final VirtualFile file){
final HistoryEntry entry = getEntry(file);
if(entry != null){
myEntriesList.remove(entry);
}
}
public FileEditorState getState(final VirtualFile file, final FileEditorProvider provider) {
validateEntries();
final HistoryEntry entry = getEntry(file);
return entry != null ? entry.getState(provider) : null;
}
/**
* @return may be null
*/
public FileEditorProvider getSelectedProvider(final VirtualFile file) {
validateEntries();
final HistoryEntry entry = getEntry(file);
return entry != null ? entry.mySelectedProvider : null;
}
private HistoryEntry getEntry(final VirtualFile file){
validateEntries();
for (int i = myEntriesList.size() - 1; i >= 0; i--) {
final HistoryEntry entry = myEntriesList.get(i);
if(file.equals(entry.myFile)){
return entry;
}
}
return null;
}
/**
* If total number of files in history more then <code>UISettings.RECENT_FILES_LIMIT</code>
* then removes the oldest ones to fit the history to new size.
*/
private void trimToSize(){
final int limit = UISettings.getInstance().RECENT_FILES_LIMIT + 1;
while(myEntriesList.size()>limit){
myEntriesList.remove(0);
}
}
public void readExternal(final Element element) {
// we have to delay xml processing because history entries require EditorStates to be created
// which is done via corresponding EditorProviders, those are not accessible before their
// is initComponent() called
myElement = (Element)element.clone();
}
public void writeExternal(final Element element){
// update history before saving
final VirtualFile[] openFiles = FileEditorManager.getInstance(myProject).getOpenFiles();
for (int i = openFiles.length - 1; i >= 0; i--) {
final VirtualFile file = openFiles[i];
if(getEntry(file) != null){ // we have to update only files that are in history
updateHistoryEntry(file, false);
}
}
for (final HistoryEntry entry : myEntriesList) {
entry.writeExternal(element, myProject);
}
}
/**
* Updates history
*/
private final class MyEditorManagerListener extends FileEditorManagerAdapter{
public void fileOpened(@NotNull final FileEditorManager source, @NotNull final VirtualFile file){
fileOpenedImpl(file);
}
public void selectionChanged(@NotNull final FileEditorManagerEvent event){
updateHistoryEntry(event.getOldFile(), event.getOldEditor(), event.getOldProvider(), false);
updateHistoryEntry(event.getNewFile(), true);
}
}
private final class MyEditorManagerBeforeListener extends FileEditorManagerListener.Before.Adapter {
@Override
public void beforeFileClosed(@NotNull FileEditorManager source, @NotNull VirtualFile file) {
updateHistoryEntry(file, false);
}
}
/**
* Cuts/extends history length
*/
private final class MyUISettingsListener implements UISettingsListener{
public void uiSettingsChanged(final UISettings source) {
trimToSize();
}
}
}
| |
// Copyright 2020 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.bazel.rules.ninja.actions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.actions.AbstractAction;
import com.google.devtools.build.lib.actions.ActionCacheAwareAction;
import com.google.devtools.build.lib.actions.ActionEnvironment;
import com.google.devtools.build.lib.actions.ActionExecutionContext;
import com.google.devtools.build.lib.actions.ActionOwner;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.Artifact.NinjaMysteryArtifact;
import com.google.devtools.build.lib.actions.ArtifactResolver;
import com.google.devtools.build.lib.actions.ArtifactRoot;
import com.google.devtools.build.lib.actions.CommandLines;
import com.google.devtools.build.lib.actions.CommandLines.CommandLineLimits;
import com.google.devtools.build.lib.actions.EnvironmentalExecException;
import com.google.devtools.build.lib.actions.ExecException;
import com.google.devtools.build.lib.actions.RunfilesSupplier;
import com.google.devtools.build.lib.actions.SpawnResult;
import com.google.devtools.build.lib.analysis.actions.SpawnAction;
import com.google.devtools.build.lib.bazel.rules.ninja.parser.NinjaTarget;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.cmdline.RepositoryName;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.packages.semantics.BuildLanguageOptions;
import com.google.devtools.build.lib.rules.cpp.CppIncludeExtractionContext;
import com.google.devtools.build.lib.server.FailureDetails;
import com.google.devtools.build.lib.server.FailureDetails.FailureDetail;
import com.google.devtools.build.lib.server.FailureDetails.NinjaAction.Code;
import com.google.devtools.build.lib.skyframe.TrackSourceDirectoriesFlag;
import com.google.devtools.build.lib.util.DependencySet;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.Root;
import java.io.IOException;
import java.util.List;
import javax.annotation.Nullable;
/** Generic class for Ninja actions. Corresponds to the {@link NinjaTarget} in the Ninja file. */
public class NinjaAction extends SpawnAction implements ActionCacheAwareAction {
private static final String MNEMONIC = "NinjaGenericAction";
private final Root sourceRoot;
@Nullable private final Artifact depFile;
private final ImmutableMap<PathFragment, Artifact> allowedDerivedInputs;
private final ArtifactRoot derivedOutputRoot;
private final NestedSet<Artifact> originalInputs; // This does not include order-only inputs.
private final NestedSet<Artifact> orderOnlyInputs;
public NinjaAction(
ActionOwner owner,
Root sourceRoot,
NestedSet<Artifact> tools,
NestedSet<Artifact> inputs,
NestedSet<Artifact> orderOnlyInputs,
List<? extends Artifact> outputs,
CommandLines commandLines,
ActionEnvironment env,
ImmutableMap<String, String> executionInfo,
CharSequence progressMessage,
RunfilesSupplier runfilesSupplier,
boolean executeUnconditionally,
@Nullable Artifact depFile,
ArtifactRoot derivedOutputRoot) {
super(
/* owner= */ owner,
/* tools= */ tools,
/* inputs= */ inputs,
/* outputs= */ outputs,
/* primaryOutput= */ Iterables.getFirst(outputs, null),
/* resourceSetOrBuilder= */ AbstractAction.DEFAULT_RESOURCE_SET,
/* commandLines= */ commandLines,
/* commandLineLimits= */ CommandLineLimits.UNLIMITED,
/* isShellCommand= */ true,
/* env= */ env,
/* executionInfo= */ executionInfo,
/* progressMessage= */ progressMessage,
/* runfilesSupplier= */ runfilesSupplier,
/* mnemonic= */ MNEMONIC,
/* executeUnconditionally= */ executeUnconditionally,
/* extraActionInfoSupplier= */ null,
/* resultConsumer= */ null,
/*stripOutputPaths=*/ false);
this.sourceRoot = sourceRoot;
this.depFile = depFile;
ImmutableMap.Builder<PathFragment, Artifact> allowedDerivedInputsBuilder =
ImmutableMap.builder();
for (Artifact input : inputs.toList()) {
if (!input.isSourceArtifact()) {
allowedDerivedInputsBuilder.put(input.getExecPath(), input);
}
}
this.allowedDerivedInputs = allowedDerivedInputsBuilder.buildOrThrow();
this.derivedOutputRoot = derivedOutputRoot;
this.originalInputs = inputs;
this.orderOnlyInputs = orderOnlyInputs;
}
@Override
protected void beforeExecute(ActionExecutionContext actionExecutionContext) throws ExecException {
if (!TrackSourceDirectoriesFlag.trackSourceDirectories()) {
checkInputsForDirectories(
actionExecutionContext.getEventHandler(), actionExecutionContext.getMetadataProvider());
}
}
@Override
protected void afterExecute(
ActionExecutionContext actionExecutionContext, List<SpawnResult> spawnResults)
throws EnvironmentalExecException {
checkOutputsForDirectories(actionExecutionContext);
if (depFile == null) {
// The inputs may have been modified during input discovery to include order-only inputs.
// Restore the original inputs to exclude those order-only inputs, so that order-only inputs
// do not cause the action to be rerun. This needs to be done when there is no dep file
// because updateInputsFromDepfile() will recalculate all the dependencies, and even if an
// input is order-only, it will cause a rebuild if it's listed in the depfile.
// Note also that this must check if the order-only inputs are empty, because if they are,
// discoversInputs() will return false, causing updateInputs() to throw an error.
if (!orderOnlyInputs.isEmpty()) {
updateInputs(originalInputs);
}
} else {
updateInputsFromDepfile(actionExecutionContext);
}
}
private void updateInputsFromDepfile(ActionExecutionContext actionExecutionContext)
throws EnvironmentalExecException {
boolean siblingRepositoryLayout =
actionExecutionContext
.getOptions()
.getOptions(BuildLanguageOptions.class)
.experimentalSiblingRepositoryLayout;
CppIncludeExtractionContext scanningContext =
actionExecutionContext.getContext(CppIncludeExtractionContext.class);
ArtifactResolver artifactResolver = scanningContext.getArtifactResolver();
Path execRoot = actionExecutionContext.getExecRoot();
try {
DependencySet depSet =
new DependencySet(execRoot).read(actionExecutionContext.getInputPath(depFile));
NestedSetBuilder<Artifact> inputsBuilder = NestedSetBuilder.stableOrder();
for (Path inputPath : depSet.getDependencies()) {
PathFragment execRelativePath;
// This branch needed in case the depfile contains an absolute path to a source file.
if (sourceRoot.contains(inputPath)) {
execRelativePath = inputPath.asFragment().relativeTo(sourceRoot.asPath().asFragment());
} else {
execRelativePath = inputPath.asFragment().relativeTo(execRoot.asFragment());
}
Artifact inputArtifact = null;
if (allowedDerivedInputs.containsKey(execRelativePath)) {
// Predeclared generated input.
inputArtifact = allowedDerivedInputs.get(execRelativePath);
}
if (inputArtifact == null) {
RepositoryName repository =
PackageIdentifier.discoverFromExecPath(
execRelativePath, false, siblingRepositoryLayout)
.getRepository();
if (execRelativePath.startsWith(derivedOutputRoot.getExecPath())) {
// This input is a generated file which was not declared in the original inputs for
// this action.
inputArtifact = new NinjaMysteryArtifact(derivedOutputRoot, execRelativePath);
} else {
// Source file input.
inputArtifact = artifactResolver.resolveSourceArtifact(execRelativePath, repository);
}
}
if (inputArtifact == null) {
throw new EnvironmentalExecException(
createFailureDetail(
String.format(
"depfile-declared dependency '%s' is invalid: it must either be "
+ "a source input, or a pre-declared generated input",
execRelativePath),
Code.INVALID_DEPFILE_DECLARED_DEPENDENCY));
}
inputsBuilder.add(inputArtifact);
}
updateInputs(inputsBuilder.build());
} catch (IOException e) {
// Some kind of IO or parse exception--wrap & rethrow it to stop the build.
String message = "error while parsing .d file: " + e.getMessage();
throw new EnvironmentalExecException(
e, createFailureDetail(message, Code.D_FILE_PARSE_FAILURE));
}
}
@Override
public boolean discoversInputs() {
return depFile != null || !orderOnlyInputs.isEmpty();
}
@Override
public NestedSet<Artifact> getAllowedDerivedInputs() {
return getInputs();
}
@Override
public NestedSet<Artifact> discoverInputs(ActionExecutionContext actionExecutionContext) {
NestedSet<Artifact> inputs =
NestedSetBuilder.<Artifact>stableOrder()
.addTransitive(getInputs())
.addTransitive(orderOnlyInputs)
.build();
updateInputs(inputs);
return inputs;
}
@Override
protected NestedSet<Artifact> getOriginalInputs() {
return originalInputs;
}
private static FailureDetail createFailureDetail(String message, Code detailedCode) {
return FailureDetail.newBuilder()
.setMessage(message)
.setNinjaAction(FailureDetails.NinjaAction.newBuilder().setCode(detailedCode))
.build();
}
/**
* NinjaAction relies on the action cache entry's file list to avoid re-running input discovery
* after a shutdown.
*/
@Override
public boolean storeInputsExecPathsInActionCache() {
return discoversInputs();
}
}
| |
package org.apache.rya.indexing.mongodb.geo;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.apache.rya.api.domain.RyaStatement;
import org.apache.rya.api.resolver.RyaToRdfConversions;
import org.apache.rya.indexing.accumulo.geo.GeoParseUtils;
import org.apache.rya.indexing.mongodb.IndexingMongoDBStorageStrategy;
import org.bson.Document;
import org.eclipse.rdf4j.model.Statement;
import org.eclipse.rdf4j.query.MalformedQueryException;
import com.mongodb.BasicDBObject;
import com.mongodb.BasicDBObjectBuilder;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.Point;
import com.vividsolutions.jts.geom.Polygon;
import com.vividsolutions.jts.io.ParseException;
import com.vividsolutions.jts.io.WKTReader;
public class GeoMongoDBStorageStrategy extends IndexingMongoDBStorageStrategy {
private static final Logger LOG = Logger.getLogger(GeoMongoDBStorageStrategy.class);
private static final String GEO = "location";
public enum GeoQueryType {
INTERSECTS {
@Override
public String getKeyword() {
return "$geoIntersects";
}
}, WITHIN {
@Override
public String getKeyword() {
return "$geoWithin";
}
}, EQUALS {
@Override
public String getKeyword() {
return "$near";
}
}, NEAR {
@Override
public String getKeyword() {
return "$near";
}
};
public abstract String getKeyword();
}
public static class GeoQuery {
private final GeoQueryType queryType;
private final Geometry geo;
private final Double maxDistance;
private final Double minDistance;
public GeoQuery(final GeoQueryType queryType, final Geometry geo) {
this(queryType, geo, 0, 0);
}
public GeoQuery(final GeoQueryType queryType, final Geometry geo, final double maxDistance,
final double minDistance) {
this.queryType = queryType;
this.geo = geo;
this.maxDistance = maxDistance;
this.minDistance = minDistance;
}
public GeoQueryType getQueryType() {
return queryType;
}
public Geometry getGeo() {
return geo;
}
public Double getMaxDistance() {
return maxDistance;
}
public Double getMinDistance() {
return minDistance;
}
}
private final Double maxDistance;
public GeoMongoDBStorageStrategy(final Double maxDistance) {
this.maxDistance = maxDistance;
}
@Override
public void createIndices(final DBCollection coll){
coll.createIndex(new BasicDBObject(GEO, "2dsphere"));
}
public DBObject getQuery(final GeoQuery queryObj) throws MalformedQueryException {
final Geometry geo = queryObj.getGeo();
final GeoQueryType queryType = queryObj.getQueryType();
if (queryType == GeoQueryType.WITHIN && !(geo instanceof Polygon)) {
//They can also be applied to MultiPolygons, but those are not supported either.
throw new MalformedQueryException("Mongo Within operations can only be performed on Polygons.");
} else if(queryType == GeoQueryType.NEAR && !(geo instanceof Point)) {
//They can also be applied to Point, but those are not supported either.
throw new MalformedQueryException("Mongo near operations can only be performed on Points.");
}
BasicDBObject query;
if (queryType.equals(GeoQueryType.EQUALS)){
if(geo.getNumPoints() == 1) {
final List circle = new ArrayList();
circle.add(getPoint(geo));
circle.add(maxDistance);
final BasicDBObject polygon = new BasicDBObject("$centerSphere", circle);
query = new BasicDBObject(GEO, new BasicDBObject(GeoQueryType.WITHIN.getKeyword(), polygon));
} else {
query = new BasicDBObject(GEO, getCorrespondingPoints(geo));
}
} else if(queryType.equals(GeoQueryType.NEAR)) {
final BasicDBObject geoDoc = new BasicDBObject("$geometry", getDBPoint(geo));
if(queryObj.getMaxDistance() != 0) {
geoDoc.append("$maxDistance", queryObj.getMaxDistance());
}
if(queryObj.getMinDistance() != 0) {
geoDoc.append("$minDistance", queryObj.getMinDistance());
}
query = new BasicDBObject(GEO, new BasicDBObject(queryType.getKeyword(), geoDoc));
} else {
final BasicDBObject geoDoc = new BasicDBObject("$geometry", getCorrespondingPoints(geo));
query = new BasicDBObject(GEO, new BasicDBObject(queryType.getKeyword(), geoDoc));
}
return query;
}
@Override
public DBObject serialize(final RyaStatement ryaStatement) {
// if the object is wkt, then try to index it
// write the statement data to the fields
try {
final Statement statement = RyaToRdfConversions.convertStatement(ryaStatement);
final Geometry geo = (new WKTReader()).read(GeoParseUtils.getWellKnownText(statement));
if(geo == null) {
LOG.error("Failed to parse geo statement: " + statement.toString());
return null;
}
final BasicDBObject base = (BasicDBObject) super.serialize(ryaStatement);
if (geo.getNumPoints() > 1) {
base.append(GEO, getCorrespondingPoints(geo));
} else {
base.append(GEO, getDBPoint(geo));
}
return base;
} catch(final ParseException e) {
LOG.error("Could not create geometry for statement " + ryaStatement, e);
return null;
}
}
public Document getCorrespondingPoints(final Geometry geo) {
//Polygons must be a 3 dimensional array.
//polygons must be a closed loop
final Document geoDoc = new Document();
if (geo instanceof Polygon) {
final Polygon poly = (Polygon) geo;
final List<List<List<Double>>> DBpoints = new ArrayList<>();
// outer shell of the polygon
final List<List<Double>> ring = new ArrayList<>();
for (final Coordinate coord : poly.getExteriorRing().getCoordinates()) {
ring.add(getPoint(coord));
}
DBpoints.add(ring);
// each hold in the polygon
for (int ii = 0; ii < poly.getNumInteriorRing(); ii++) {
final List<List<Double>> holeCoords = new ArrayList<>();
for (final Coordinate coord : poly.getInteriorRingN(ii).getCoordinates()) {
holeCoords.add(getPoint(coord));
}
DBpoints.add(holeCoords);
}
geoDoc.append("coordinates", DBpoints)
.append("type", "Polygon");
} else {
final List<List<Double>> points = getPoints(geo);
geoDoc.append("coordinates", points)
.append("type", "LineString");
}
return geoDoc;
}
private List<List<Double>> getPoints(final Geometry geo) {
final List<List<Double>> points = new ArrayList<>();
for (final Coordinate coord : geo.getCoordinates()) {
points.add(getPoint(coord));
}
return points;
}
public Document getDBPoint(final Geometry geo) {
return new Document()
.append("coordinates", getPoint(geo))
.append("type", "Point");
}
private List<Double> getPoint(final Coordinate coord) {
final List<Double> point = new ArrayList<>();
point.add(coord.x);
point.add(coord.y);
return point;
}
private List<Double> getPoint(final Geometry geo) {
final List<Double> point = new ArrayList<>();
point.add(geo.getCoordinate().x);
point.add(geo.getCoordinate().y);
return point;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import org.apache.geode.CancelException;
import org.apache.geode.DataSerializer;
import org.apache.geode.cache.CacheException;
import org.apache.geode.cache.EntryExistsException;
import org.apache.geode.cache.EntryNotFoundException;
import org.apache.geode.cache.Operation;
import org.apache.geode.cache.TransactionDataNotColocatedException;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.distributed.internal.DM;
import org.apache.geode.distributed.internal.DirectReplyProcessor;
import org.apache.geode.distributed.internal.DistributionManager;
import org.apache.geode.distributed.internal.InternalDistributedSystem;
import org.apache.geode.distributed.internal.ReplyException;
import org.apache.geode.distributed.internal.ReplyMessage;
import org.apache.geode.distributed.internal.ReplyProcessor21;
import org.apache.geode.distributed.internal.ReplySender;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.Assert;
import org.apache.geode.internal.ByteArrayDataInput;
import org.apache.geode.internal.InternalDataSerializer;
import org.apache.geode.internal.NanoTimer;
import org.apache.geode.internal.Version;
import org.apache.geode.internal.cache.DistributedPutAllOperation.EntryVersionsList;
import org.apache.geode.internal.cache.DistributedRemoveAllOperation.RemoveAllEntryData;
import org.apache.geode.internal.cache.partitioned.RemoveAllPRMessage;
import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID;
import org.apache.geode.internal.cache.tier.sockets.VersionedObjectList;
import org.apache.geode.internal.cache.versions.VersionTag;
import org.apache.geode.internal.i18n.LocalizedStrings;
import org.apache.geode.internal.logging.LogService;
import org.apache.geode.internal.logging.log4j.LogMarker;
import org.apache.geode.internal.offheap.annotations.Released;
/**
* A Replicate Region removeAll message. Meant to be sent only to the peer who hosts transactional
* data.
*
* @since GemFire 8.1
*/
public class RemoteRemoveAllMessage extends RemoteOperationMessageWithDirectReply {
private static final Logger logger = LogService.getLogger();
private RemoveAllEntryData[] removeAllData;
private int removeAllDataCount = 0;
/**
* An additional object providing context for the operation, e.g., for BridgeServer notification
*/
ClientProxyMembershipID bridgeContext;
private transient InternalDistributedSystem internalDs;
private boolean posDup;
protected static final short HAS_BRIDGE_CONTEXT = UNRESERVED_FLAGS_START;
private EventID eventId;
private Object callbackArg;
public void addEntry(RemoveAllEntryData entry) {
this.removeAllData[this.removeAllDataCount++] = entry;
}
@Override
public boolean isSevereAlertCompatible() {
// allow forced-disconnect processing for all cache op messages
return true;
}
public int getSize() {
return removeAllDataCount;
}
/*
* this is similar to send() but it selects an initialized replicate that is used to proxy the
* message
*
*/
public static boolean distribute(EntryEventImpl event, RemoveAllEntryData[] data, int dataCount) {
boolean successful = false;
DistributedRegion r = (DistributedRegion) event.getRegion();
Collection replicates = r.getCacheDistributionAdvisor().adviseInitializedReplicates();
if (replicates.isEmpty()) {
return false;
}
if (replicates.size() > 1) {
ArrayList l = new ArrayList(replicates);
Collections.shuffle(l);
replicates = l;
}
int attempts = 0;
for (Iterator<InternalDistributedMember> it = replicates.iterator(); it.hasNext();) {
InternalDistributedMember replicate = it.next();
try {
attempts++;
final boolean posDup = (attempts > 1);
RemoveAllResponse response = send(replicate, event, data, dataCount, false,
DistributionManager.SERIAL_EXECUTOR, posDup);
response.waitForCacheException();
VersionedObjectList result = response.getResponse();
// Set successful version tags in RemoveAllEntryData.
List successfulKeys = result.getKeys();
List<VersionTag> versions = result.getVersionTags();
for (RemoveAllEntryData removeAllEntry : data) {
Object key = removeAllEntry.getKey();
if (successfulKeys.contains(key)) {
int index = successfulKeys.indexOf(key);
removeAllEntry.versionTag = versions.get(index);
}
}
return true;
} catch (TransactionDataNotColocatedException enfe) {
throw enfe;
} catch (CancelException e) {
event.getRegion().getCancelCriterion().checkCancelInProgress(e);
} catch (CacheException e) {
if (logger.isDebugEnabled()) {
logger.debug("RemoteRemoveAllMessage caught CacheException during distribution", e);
}
successful = true; // not a cancel-exception, so don't complain any more about it
} catch (RemoteOperationException e) {
if (logger.isTraceEnabled(LogMarker.DM)) {
logger.trace(LogMarker.DM,
"RemoteRemoveAllMessage caught an unexpected exception during distribution", e);
}
}
}
return successful;
}
RemoteRemoveAllMessage(EntryEventImpl event, Set recipients, DirectReplyProcessor p,
RemoveAllEntryData[] removeAllData, int removeAllDataCount, boolean useOriginRemote,
int processorType, boolean possibleDuplicate) {
super(recipients, event.getRegion().getFullPath(), p);
this.resetRecipients();
if (recipients != null) {
setRecipients(recipients);
}
this.processor = p;
this.processorId = p == null ? 0 : p.getProcessorId();
if (p != null && this.isSevereAlertCompatible()) {
p.enableSevereAlertProcessing();
}
this.removeAllData = removeAllData;
this.removeAllDataCount = removeAllDataCount;
this.posDup = possibleDuplicate;
this.eventId = event.getEventId();
this.callbackArg = event.getCallbackArgument();
}
public RemoteRemoveAllMessage() {}
/*
* Sends a LocalRegion RemoteRemoveAllMessage to the recipient
*
* @param recipient the member to which the message is sent
*
* @param r the LocalRegion for which the op was performed
*
* @return the processor used to await acknowledgement that the message was sent, or null to
* indicate that no acknowledgement will be sent
*
* @throws ForceReattemptException if the peer is no longer available
*/
public static RemoveAllResponse send(DistributedMember recipient, EntryEventImpl event,
RemoveAllEntryData[] removeAllData, int removeAllDataCount, boolean useOriginRemote,
int processorType, boolean possibleDuplicate) throws RemoteOperationException {
// Assert.assertTrue(recipient != null, "RemoteRemoveAllMessage NULL recipient"); recipient can
// be null for event notifications
Set recipients = Collections.singleton(recipient);
RemoveAllResponse p = new RemoveAllResponse(event.getRegion().getSystem(), recipients);
RemoteRemoveAllMessage msg = new RemoteRemoveAllMessage(event, recipients, p, removeAllData,
removeAllDataCount, useOriginRemote, processorType, possibleDuplicate);
msg.setTransactionDistributed(event.getRegion().getCache().getTxManager().isDistributed());
Set failures = event.getRegion().getDistributionManager().putOutgoing(msg);
if (failures != null && failures.size() > 0) {
throw new RemoteOperationException(
LocalizedStrings.RemotePutMessage_FAILED_SENDING_0.toLocalizedString(msg));
}
return p;
}
public void setBridgeContext(ClientProxyMembershipID contx) {
Assert.assertTrue(contx != null);
this.bridgeContext = contx;
}
public int getDSFID() {
return REMOTE_REMOVE_ALL_MESSAGE;
}
@Override
public final void fromData(DataInput in) throws IOException, ClassNotFoundException {
super.fromData(in);
this.eventId = (EventID) DataSerializer.readObject(in);
this.callbackArg = DataSerializer.readObject(in);
this.posDup = (flags & POS_DUP) != 0;
if ((flags & HAS_BRIDGE_CONTEXT) != 0) {
this.bridgeContext = DataSerializer.readObject(in);
}
this.removeAllDataCount = (int) InternalDataSerializer.readUnsignedVL(in);
this.removeAllData = new RemoveAllEntryData[removeAllDataCount];
if (this.removeAllDataCount > 0) {
final Version version = InternalDataSerializer.getVersionForDataStreamOrNull(in);
final ByteArrayDataInput bytesIn = new ByteArrayDataInput();
for (int i = 0; i < this.removeAllDataCount; i++) {
this.removeAllData[i] = new RemoveAllEntryData(in, this.eventId, i, version, bytesIn);
}
boolean hasTags = in.readBoolean();
if (hasTags) {
EntryVersionsList versionTags = EntryVersionsList.create(in);
for (int i = 0; i < this.removeAllDataCount; i++) {
this.removeAllData[i].versionTag = versionTags.get(i);
}
}
}
}
@Override
public void toData(DataOutput out) throws IOException {
super.toData(out);
DataSerializer.writeObject(this.eventId, out);
DataSerializer.writeObject(this.callbackArg, out);
if (this.bridgeContext != null) {
DataSerializer.writeObject(this.bridgeContext, out);
}
InternalDataSerializer.writeUnsignedVL(this.removeAllDataCount, out);
if (this.removeAllDataCount > 0) {
EntryVersionsList versionTags = new EntryVersionsList(removeAllDataCount);
boolean hasTags = false;
for (int i = 0; i < this.removeAllDataCount; i++) {
if (!hasTags && removeAllData[i].versionTag != null) {
hasTags = true;
}
VersionTag<?> tag = removeAllData[i].versionTag;
versionTags.add(tag);
removeAllData[i].versionTag = null;
this.removeAllData[i].toData(out);
this.removeAllData[i].versionTag = tag;
}
out.writeBoolean(hasTags);
if (hasTags) {
InternalDataSerializer.invokeToData(versionTags, out);
}
}
}
@Override
protected short computeCompressedShort() {
short flags = super.computeCompressedShort();
if (this.posDup)
flags |= POS_DUP;
if (this.bridgeContext != null)
flags |= HAS_BRIDGE_CONTEXT;
return flags;
}
@Override
public EventID getEventID() {
return this.eventId;
}
@Override
protected boolean operateOnRegion(DistributionManager dm, LocalRegion r, long startTime)
throws RemoteOperationException {
final boolean sendReply;
InternalDistributedMember eventSender = getSender();
try {
sendReply = doLocalRemoveAll(r, eventSender);
} catch (RemoteOperationException e) {
sendReply(getSender(), getProcessorId(), dm, new ReplyException(e), r, startTime);
return false;
}
if (sendReply) {
sendReply(getSender(), getProcessorId(), dm, null, r, startTime);
}
return false;
}
/* we need a event with content for waitForNodeOrCreateBucket() */
/**
* This method is called by both operateOnLocalRegion() when processing a remote msg or by
* sendMsgByBucket() when processing a msg targeted to local Jvm. LocalRegion Note: It is very
* important that this message does NOT cause any deadlocks as the sender will wait indefinitely
* for the acknowledgment
*
* @param r partitioned region
* @param eventSender the endpoint server who received request from client
* @return If succeeds, return true, otherwise, throw exception
*/
public boolean doLocalRemoveAll(final LocalRegion r, final InternalDistributedMember eventSender)
throws EntryExistsException, RemoteOperationException {
final DistributedRegion dr = (DistributedRegion) r;
// create a base event and a op for RemoveAllMessage distributed btw redundant buckets
@Released
EntryEventImpl baseEvent = EntryEventImpl.create(r, Operation.REMOVEALL_DESTROY, null, null,
this.callbackArg, false, eventSender, true);
try {
baseEvent.setCausedByMessage(this);
// set baseEventId to the first entry's event id. We need the thread id for DACE
baseEvent.setEventId(this.eventId);
if (this.bridgeContext != null) {
baseEvent.setContext(this.bridgeContext);
}
baseEvent.setPossibleDuplicate(this.posDup);
if (logger.isDebugEnabled()) {
logger.debug(
"RemoteRemoveAllMessage.doLocalRemoveAll: eventSender is {}, baseEvent is {}, msg is {}",
eventSender, baseEvent, this);
}
final DistributedRemoveAllOperation op =
new DistributedRemoveAllOperation(baseEvent, removeAllDataCount, false);
try {
final VersionedObjectList versions =
new VersionedObjectList(removeAllDataCount, true, dr.concurrencyChecksEnabled);
dr.syncBulkOp(new Runnable() {
@SuppressWarnings("synthetic-access")
public void run() {
InternalDistributedMember myId = r.getDistributionManager().getDistributionManagerId();
for (int i = 0; i < removeAllDataCount; ++i) {
@Released
EntryEventImpl ev = RemoveAllPRMessage.getEventFromEntry(r, myId, eventSender, i,
removeAllData, false, bridgeContext, posDup, false);
try {
ev.setRemoveAllOperation(op);
if (logger.isDebugEnabled()) {
logger.debug("invoking basicDestroy with {}", ev);
}
try {
dr.basicDestroy(ev, true, null);
} catch (EntryNotFoundException ignore) {
}
removeAllData[i].versionTag = ev.getVersionTag();
versions.addKeyAndVersion(removeAllData[i].key, ev.getVersionTag());
} finally {
ev.release();
}
}
}
}, baseEvent.getEventId());
if (getTXUniqId() != TXManagerImpl.NOTX || dr.getConcurrencyChecksEnabled()) {
dr.getDataView().postRemoveAll(op, versions, dr);
}
RemoveAllReplyMessage.send(getSender(), this.processorId,
getReplySender(r.getDistributionManager()), versions, this.removeAllData,
this.removeAllDataCount);
return false;
} finally {
op.freeOffHeapResources();
}
} finally {
baseEvent.release();
}
}
// override reply processor type from PartitionMessage
RemoteOperationResponse createReplyProcessor(LocalRegion r, Set recipients, Object key) {
return new RemoveAllResponse(r.getSystem(), recipients);
}
// override reply message type from PartitionMessage
@Override
protected void sendReply(InternalDistributedMember member, int procId, DM dm, ReplyException ex,
LocalRegion r, long startTime) {
ReplyMessage.send(member, procId, ex, getReplySender(dm), r != null && r.isInternalRegion());
}
@Override
protected void appendFields(StringBuffer buff) {
super.appendFields(buff);
buff.append("; removeAllDataCount=").append(removeAllDataCount);
if (this.bridgeContext != null) {
buff.append("; bridgeContext=").append(this.bridgeContext);
}
for (int i = 0; i < removeAllDataCount; i++) {
buff.append("; entry" + i + ":")
.append(removeAllData[i] == null ? "null" : removeAllData[i].getKey());
}
}
public static class RemoveAllReplyMessage extends ReplyMessage {
/** Result of the RemoveAll operation */
private VersionedObjectList versions;
@Override
public boolean getInlineProcess() {
return true;
}
private RemoveAllReplyMessage(int processorId, VersionedObjectList versionList,
RemoveAllEntryData[] removeAllData, int removeAllCount) {
super();
this.versions = versionList;
setProcessorId(processorId);
}
/** Send an ack */
public static void send(InternalDistributedMember recipient, int processorId, ReplySender dm,
VersionedObjectList versions, RemoveAllEntryData[] removeAllData, int removeAllDataCount) {
Assert.assertTrue(recipient != null, "RemoveAllReplyMessage NULL reply message");
RemoveAllReplyMessage m =
new RemoveAllReplyMessage(processorId, versions, removeAllData, removeAllDataCount);
m.setRecipient(recipient);
dm.putOutgoing(m);
}
/**
* Processes this message. This method is invoked by the receiver of the message.
*
* @param dm the distribution manager that is processing the message.
*/
@Override
public void process(final DM dm, final ReplyProcessor21 rp) {
final long startTime = getTimestamp();
if (rp == null) {
if (logger.isTraceEnabled(LogMarker.DM)) {
logger.debug("RemoveAllReplyMessage processor not found");
}
return;
}
if (rp instanceof RemoveAllResponse) {
RemoveAllResponse processor = (RemoveAllResponse) rp;
processor.setResponse(this);
}
rp.process(this);
if (logger.isTraceEnabled(LogMarker.DM)) {
logger.trace(LogMarker.DM, "{} Processed {}", rp, this);
}
dm.getStats().incReplyMessageTime(NanoTimer.getTime() - startTime);
}
@Override
public int getDSFID() {
return REMOTE_REMOVE_ALL_REPLY_MESSAGE;
}
public RemoveAllReplyMessage() {}
@Override
public void fromData(DataInput in) throws IOException, ClassNotFoundException {
super.fromData(in);
this.versions = (VersionedObjectList) DataSerializer.readObject(in);
}
@Override
public void toData(DataOutput out) throws IOException {
super.toData(out);
DataSerializer.writeObject(this.versions, out);
}
@Override
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append("RemoveAllReplyMessage ").append(" processorid=").append(this.processorId)
.append(" returning versionTags=").append(this.versions);
return sb.toString();
}
}
/**
* A processor to capture the value returned by {@link RemoteRemoveAllMessage}
*/
public static class RemoveAllResponse extends RemoteOperationResponse {
private VersionedObjectList versions;
public RemoveAllResponse(InternalDistributedSystem ds, Set recipients) {
super(ds, recipients, false);
}
public void setResponse(RemoveAllReplyMessage removeAllReplyMessage) {
if (removeAllReplyMessage.versions != null) {
this.versions = removeAllReplyMessage.versions;
this.versions.replaceNullIDs(removeAllReplyMessage.getSender());
}
}
public VersionedObjectList getResponse() {
return this.versions;
}
}
}
| |
/*
**The MIT License (MIT)
**Copyright (c) <2014> <CIn-UFPE>
**
**Permission is hereby granted, free of charge, to any person obtaining a copy
**of this software and associated documentation files (the "Software"), to deal
**in the Software without restriction, including without limitation the rights
**to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
**copies of the Software, and to permit persons to whom the Software is
**furnished to do so, subject to the following conditions:
**
**The above copyright notice and this permission notice shall be included in
**all copies or substantial portions of the Software.
**
**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
**IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
**FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
**AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
**LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
**OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
**THE SOFTWARE.
*/
package org.caboclo.clients;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.UniformInterfaceException;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.HTTPSProperties;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.SecureRandom;
import java.security.cert.CertificateException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.caboclo.service.MultiPartDownload;
import org.caboclo.service.MultiPartUpload;
import org.caboclo.service.RemoteFile;
import org.caboclo.util.Credentials;
import org.javaswift.joss.client.factory.AccountConfig;
import org.javaswift.joss.client.factory.AccountFactory;
import org.javaswift.joss.client.factory.AuthenticationMethod;
import org.javaswift.joss.model.Account;
import org.javaswift.joss.model.Container;
import org.javaswift.joss.model.StoredObject;
public class OpenStackClient extends ApiClient {
private Container container;
private Container segmentContainer;
private Map<String, String> parameters;
private String storageURL;
private String authToken;
private Client client;
private boolean isAuthenticated;
public static final String USER_NAME = "user_name";
public static final String PASSWORD = "password";
public static final String AUTH_URL = "auth_url";
public static final String DISABLE_SSL_SALVATION = "disable_ssl_salvation";
public static final String AUTHENTICATION_METHOD = "authentication_method";
public static final String TEMP_USER_NAME = "account:bruno";
public static final String TEMP_PASSWORD = "password123";
public static final String TEMP_AUTH_URL = "https://192.168.242.129";
private static final int CHUNK_SIZE = 1024 * 1024; //1MB TODO- Increase to 5MB
@Override
public String[] getAuthenticationParameters() {
return new String[]{USER_NAME, PASSWORD, AUTH_URL, DISABLE_SSL_SALVATION, AUTHENTICATION_METHOD};
}
@Override
public String getAuthenticationParameter(String parameter) {
return parameters.get(parameter);
}
@Override
public boolean authenticate(Map<String, String> parameters) {
this.parameters = parameters;
Credentials cred = new Credentials();
if (!hasValidCredentials()){
cred.removeCredentials("openstack");
return false;
}
try {
AccountConfig config = new AccountConfig();
config.setUsername(parameters.get(USER_NAME));
config.setPassword(parameters.get(PASSWORD));
config.setAuthUrl(parameters.get(AUTH_URL)); //+ "/auth/v1.0");
config.setDisableSslValidation(true);
config.setAuthenticationMethod(AuthenticationMethod.BASIC);
Account account = new AccountFactory(config).createAccount();
this.container = account.getContainer("mainContainer");
if (!this.container.exists()) {
this.container.create();
this.container.makePublic();//TODO - Maybe this is not necessary
System.out.println("Container " + this.container.getName() + " created");
}
this.segmentContainer = account.getContainer("segments");
if (!this.segmentContainer.exists()) {
this.segmentContainer.create();
}
//Remove old credentials, if they exist
cred.removeCredentials("openstack");
//Save new credentials
String keys = parameters.get(USER_NAME) + "@" + parameters.get(PASSWORD) + "@" + parameters.get(AUTH_URL);
cred.saveCredentials("openstack", keys);
} catch (Exception ex) {
return false;
}
updateCredentials();
this.isAuthenticated = true;
return true;
}
private ClientConfig configureClientSSLDisabled() {
TrustManager[] certs = new TrustManager[]{
new X509TrustManager() {
@Override
public void checkClientTrusted(java.security.cert.X509Certificate[] xcs, String string) throws CertificateException {
}
@Override
public void checkServerTrusted(java.security.cert.X509Certificate[] xcs, String string) throws CertificateException {
}
@Override
public java.security.cert.X509Certificate[] getAcceptedIssuers() {
return null;
}
}};
SSLContext ctx = null;
try {
ctx = SSLContext.getInstance("TLS");
ctx.init(null, certs, new SecureRandom());
} catch (java.security.GeneralSecurityException ex) {
}
if (ctx != null) {
HttpsURLConnection.setDefaultSSLSocketFactory(ctx.getSocketFactory());
}
ClientConfig config = new DefaultClientConfig();
try {
config.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES, new HTTPSProperties(
new HostnameVerifier() {
@Override
public boolean verify(String hostname, SSLSession session) {
return true;
}
},
ctx
));
} catch (Exception e) {
}
return config;
}
private void updateCredentials() throws UniformInterfaceException, RuntimeException {
//TODO - In production version, this method should be called -> Client client = Client.create();
this.client = Client.create(configureClientSSLDisabled());
WebResource webResource = client.resource(parameters.get(AUTH_URL) + "/auth/v1.0?format=json");
ClientResponse response = webResource
.header("X-Auth-Key", parameters.get(PASSWORD))
.header("X-Auth-User", parameters.get(USER_NAME))
.accept("application/json")
.get(ClientResponse.class);
if (response.getStatus() != 200) {
throw new RuntimeException("Failed : HTTP error code : " + response.getStatus());
}
this.authToken = response.getHeaders().get("X-Auth-Token").get(0);
this.storageURL = response.getHeaders().get("X-Storage-Url").get(0);
}
@Override
public void makedir(String path) throws IOException {
//nothing
}
@Override
public void putFile(File file, String path) throws IOException {
if (path.startsWith("/") && path.length() > 1) {
path = path.substring(1);
}
StoredObject object = container.getObject(path);
object.uploadObject(file);
object.reload();
}
@Override
public void getFile(File file, String path) throws IOException {
if (path.startsWith("/") && path.length() > 1) {
path = path.substring(1);
}
StoredObject object = container.getObject(path);
object.downloadObject(file);
object.reload();
}
@Override
public boolean isAuthenticated() {
if (this.isAuthenticated && hasValidCredentials()) {
return true;
} else {
System.out.println("check credentials file");
Credentials cred = new Credentials();
String keys = cred.checkCredentialsFile("openstack");
if (keys != null && !keys.isEmpty()) {
String[] access_keys = keys.split("@");
Map<String, String> params = new HashMap<>();
params.put(USER_NAME, access_keys[0]);
params.put(PASSWORD, access_keys[1]);
params.put(AUTH_URL, access_keys[2]);
this.isAuthenticated = authenticate(params);
return this.isAuthenticated;
} else {
System.out.println("No credentials in file");
return false;
}
}
}
//TODO - Adapt to the new getChildren
public ArrayList<String> getChildrenOld(String folderName) throws IOException {
ArrayList<String> result = new ArrayList<>();
try {
if (folderName.startsWith("/") && folderName.length() > 1) {
folderName = folderName.substring(1);
}
String relativePath;
if (folderName == null || folderName.equals("") || folderName.equals("/")) {
relativePath = "/"
+ this.container.getName()
+ "?delimiter=/&format=json";
} else {
relativePath = "/"
+ this.container.getName()
+ "?prefix=" + folderName
+ (folderName.charAt(folderName.length() - 1) == '/' ? "" : "/")
+ "&delimiter=/&format=json";
}
WebResource webResource = client.resource(this.storageURL + relativePath);
ClientResponse response = webResource
.header("X-Auth-Token", this.authToken)
.accept("application/json")
.get(ClientResponse.class);
if (response.getStatus() != 200) {
throw new RuntimeException("Failed : HTTP error code : " + response.getStatus());
}
String output = response.getEntity(String.class);
JSONArray children = new JSONArray(output);
for (int i = 0; i < children.length(); i++) {
JSONObject element = children.getJSONObject(i);
if (!element.isNull("hash")) {
result.add("/" + element.getString("name"));
} else if (!element.isNull("subdir")) {
result.add("/" + element.getString("subdir"));
}
}
return result;
} catch (JSONException ex) {
Logger.getLogger(OpenStackClient.class.getName()).log(Level.SEVERE, null, ex);
}
return null;
}
@Override
public boolean isFolder(String folderName) throws IOException {
if (folderName.startsWith("/") && folderName.length() > 1) {
folderName = folderName.substring(1);
}
try {
String relativePath = "/"
+ this.container.getName()
+ "?prefix="
+ (folderName.charAt(folderName.length() - 1) == '/'
? folderName.substring(0, folderName.length() - 1)
: folderName)
+ "&delimiter=/&format=json";
WebResource webResource = client.resource(this.storageURL + relativePath);
ClientResponse response = webResource
.header("X-Auth-Token", this.authToken)
.accept("application/json")
.get(ClientResponse.class);
if (response.getStatus() != 200) {
throw new RuntimeException("Failed : HTTP error code : " + response.getStatus());
}
String output = response.getEntity(String.class);
JSONArray children = new JSONArray(output);
for (int i = 0; i < children.length(); i++) {
JSONObject element = children.getJSONObject(i);
if (!element.isNull("subdir")) {
if (element.get("subdir").equals(folderName)
|| element.get("subdir").equals(folderName + "/")) {
return true;
}
}
}
} catch (JSONException ex) {
Logger.getLogger(OpenStackClient.class.getName()).log(Level.SEVERE, null, ex);
}
return false;
}
public boolean split(File f, long size) {
if (size <= 0)
return false;
try {
int parts = ((int) (f.length() / size));
long flength = 0;
if (f.length() % size > 0)
parts++;
File[] fparts = new File[parts];
FileInputStream fis = new FileInputStream(f);
FileOutputStream fos = null;
for (int i = 0; i < fparts.length; i++) {
fparts[i] = new File(f.getPath() + i);
fos = new FileOutputStream(fparts[i]);
int read = 0;
long total = 0;
byte[] buff = new byte[1024];
int origbuff = buff.length;
while (total < size) {
read = fis.read(buff);
if (read != -1) {
//Probable mistake buff = invertBuffer(buff, 0, read);
total += read;
flength += read;
fos.write(buff, 0, read);
}
if (i == fparts.length - 1 && read < origbuff)
break;
}
fos.flush();
fos.close();
fos = null;
}
fis.close();
// f.delete();
f = fparts[0];
System.out.println("Length Readed (KB): " + flength / 1024.0);
return true;
} catch (Exception ex) {
System.out.println(ex);
System.out.println(ex.getLocalizedMessage());
System.out.println(ex.getStackTrace()[0].getLineNumber());
ex.printStackTrace();
return false;
}
}
@Override
public MultiPartUpload initializeUpload(File file, String fileName) {
MultiPartUpload mpu = new MultiPartUpload(file, 0);
String localPath = file.getAbsolutePath().substring(0,file.getAbsolutePath().lastIndexOf(File.separator) + 1);
System.out.println("\t\tStarting multipart file sending: " + file.getAbsolutePath());
//Hold the files to be sent
int parts = ((int) (file.length() / CHUNK_SIZE));
if (file.length() % CHUNK_SIZE > 0)
{
parts++;
}
mpu.putObject("numOfChunks", parts);
mpu.putObject("currentChunk", 0);
mpu.putObject("fileName", file.getName());
mpu.putObject("localPath", localPath);
mpu.putObject("ServerPath", fileName);
//Split the file into multiple files
this.split(file, CHUNK_SIZE);
return mpu;
}
@Override
public void sendNextPart(MultiPartUpload mpu) {
Integer currentComponent = (Integer) mpu.getObject("currentChunk");
Integer numChunks = (Integer) mpu.getObject("numOfChunks");
String localPath = mpu.getObject("localPath").toString();
if(currentComponent < numChunks)
{
String fileName = mpu.getObject("fileName").toString();
fileName = fileName + currentComponent;
StoredObject object = segmentContainer.getObject(fileName);
object.uploadObject(new File(localPath + fileName));
mpu.putObject("currentChunk", ++currentComponent);
}
else
{
mpu.setFinished();
}
}
@Override
public void finalizeUpload(MultiPartUpload mpu) {
InputStream is = new ByteArrayInputStream("".getBytes());
WebResource webResource = client.resource(this.storageURL + "/" +this.container.getName() + "/" + (String) mpu.getObject("ServerPath"));
System.out.println("Server addr: " + this.storageURL + "/" +this.container.getName() + "/" + (String) mpu.getObject("ServerPath"));
ClientResponse response = webResource
.header("X-Auth-Token", this.authToken)
.header("X-Object-Manifest", "segments/" + mpu.getObject("fileName"))
.put(ClientResponse.class, is);
if (response.getStatus() != 201) {
throw new RuntimeException("Failed : HTTP error code : " + response.getStatus());
}
System.out.print("Object Uploaded");
}
/***
public static void main(String[] args) throws IOException {
//OBJETO = {"user_name":"account:bruno","password":"password123","password":"https://192.168.242.129"}
OpenStackClient openStackClient = new OpenStackClient();
Map<String, String> parameters = new HashMap<String, String>();
parameters.put("user_name", "account:bruno");
parameters.put("password", "password123");
parameters.put("auth_url", "https://192.168.242.129");
openStackClient.authenticate(parameters);
MultiPartUpload mpu = openStackClient.initializeUpload(new File("C:\\Users\\Bruno\\Desktop\\Temp\\temp.mp3"), "temp.mp3");
while (mpu.hasMoreParts()) {
openStackClient.sendNextPart(mpu);
}
openStackClient.finalizeUpload(mpu);
openStackClient.getFile(new File("C:\\Users\\Bruno\\Desktop\\Temp\\tempNew.mp3"), "temp.mp3");
// try {
// System.out.println(openStackClient.getChildren("/"));
// System.out.println("Info " + openStackClient.storageURL + " " + openStackClient.authToken);
// System.out.println("Is Folder: " + openStackClient.isFolder("folder"));
// System.out.println("Is Folder: " + openStackClient.isFolder("/folder/"));
// System.out.println("Is Folder: " + openStackClient.isFolder("folder/sub"));
// System.out.println("Is Folder: " + openStackClient.isFolder("/folder/subfolder"));
// System.out.println("Is Folder: " + openStackClient.isFolder("folder/subfolder/"));
// System.out.println("Is Folder: " + openStackClient.isFolder("folder/subfolder/File1.txt/"));
// System.out.println("Is Folder: " + openStackClient.isFolder("folder/subfolder/File1.txt"));
// System.out.println("Is Folder: " + openStackClient.isFolder("/"));
// } catch (IOException ex) {
// Logger.getLogger(OpenStackClient.class.getName()).log(Level.SEVERE, null, ex);
// }
}
// {
// AccountConfig config = new AccountConfig();
// config.setUsername("account:bruno");
// config.setPassword("password123");
// config.setAuthUrl("https://192.168.242.129/auth/v1.0");
// config.setDisableSslValidation(true);
// config.setAuthenticationMethod(AuthenticationMethod.BASIC);
//
// Account account = new AccountFactory(config).createAccount();
// Container container = account.getContainer("defaultContainer");
//
// Collection<Container> containers = account.list();
// for (Container currentContainer : containers) {
// System.out.println(currentContainer.getName());
// }
//
// if(!container.exists())
// {
// container.create();
// container.makePublic();
// System.out.println("Container " + container.getName() + " created");
// }
//
// StoredObject object = container.getObject("dog.jpg");
// object.uploadObject(new File("dog.jpg"));
// System.out.println("Public URL: "+object.getPublicURL());
//
// object.downloadObject(new File("newDog.jpg"));
// }
*/
@Override
public List<RemoteFile> getChildren(String folderName) throws IOException {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
private boolean hasValidCredentials() {
this.client = Client.create(configureClientSSLDisabled());
WebResource webResource = client.resource(parameters.get(AUTH_URL) + "/auth/v1.0?format=json");
ClientResponse response = webResource
.header("X-Auth-Key", parameters.get(PASSWORD))
.header("X-Auth-User", parameters.get(USER_NAME))
.accept("application/json")
.get(ClientResponse.class);
if (response.getStatus() == 200) {
return true;
}else{
return false;
}
}
@Override
public MultiPartDownload initializeDownload(File file, RemoteFile remoteFile){
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void getNextPart(MultiPartDownload mpd){
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void removeFolder(String remoteFolder) throws IOException {
ArrayList<String> result = new ArrayList<>();
try {
if (remoteFolder.startsWith("/") && remoteFolder.length() > 1) {
remoteFolder = remoteFolder.substring(1);
}
String relativePath;
if (remoteFolder == null || remoteFolder.equals("") || remoteFolder.equals("/")) {
relativePath = "/"
+ this.container.getName()
+ "?delimiter=/&format=json";
} else {
relativePath = "/"
+ this.container.getName()
+ "?prefix=" + remoteFolder
+ (remoteFolder.charAt(remoteFolder.length() - 1) == '/' ? "" : "/")
+ "&format=json";
}
WebResource webResource = client.resource(this.storageURL + relativePath);
ClientResponse response = webResource
.header("X-Auth-Token", this.authToken)
.accept("application/json")
.get(ClientResponse.class);
if (response.getStatus() != 200) {
throw new RuntimeException("Failed : HTTP error code : " + response.getStatus());
}
String output = response.getEntity(String.class);
JSONArray children = new JSONArray(output);
for (int i = 0; i < children.length(); i++) {
JSONObject element = children.getJSONObject(i);
if (!element.isNull("hash")) {
container.getObject(element.getString("name")).delete();
} else if (!element.isNull("subdir")) {
container.getObject(element.getString("subdir")).delete();
}
}
} catch (JSONException ex) {
Logger.getLogger(OpenStackClient.class.getName()).log(Level.SEVERE, null, ex);
throw new IOException("Error while removing directory "+remoteFolder);
}
}
}
| |
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portlet.calendar.adapter;
import java.net.URL;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import javax.portlet.PortletRequest;
import net.fortuna.ical4j.model.Calendar;
import net.fortuna.ical4j.model.Component;
import net.fortuna.ical4j.model.Period;
import net.fortuna.ical4j.model.PeriodList;
import net.fortuna.ical4j.model.Property;
import net.fortuna.ical4j.model.PropertyList;
import net.fortuna.ical4j.model.component.VEvent;
import net.fortuna.ical4j.model.property.DtEnd;
import net.fortuna.ical4j.model.property.DtStart;
import net.fortuna.ical4j.model.property.Duration;
import net.fortuna.ical4j.model.property.RRule;
import net.sf.ehcache.Cache;
import net.sf.ehcache.Element;
import org.apache.commons.httpclient.Credentials;
import org.apache.commons.httpclient.auth.AuthScope;
import org.apache.commons.httpclient.protocol.Protocol;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jasig.portlet.calendar.CalendarConfiguration;
import org.jasig.portlet.calendar.caching.DefaultCacheKeyGeneratorImpl;
import org.jasig.portlet.calendar.caching.ICacheKeyGenerator;
import org.jasig.portlet.calendar.credentials.DefaultCredentialsExtractorImpl;
import org.jasig.portlet.calendar.credentials.ICredentialsExtractor;
import org.jasig.portlet.calendar.url.DefaultUrlCreatorImpl;
import org.jasig.portlet.calendar.url.IUrlCreator;
import org.joda.time.Interval;
import org.osaf.caldav4j.CalDAVCollection;
import org.osaf.caldav4j.CalDAVConstants;
import org.osaf.caldav4j.exceptions.CalDAV4JException;
import org.osaf.caldav4j.methods.CalDAV4JMethodFactory;
import org.osaf.caldav4j.methods.HttpClient;
/**
* Implementation of {@link ICalendarAdapter} that uses CalDAV for retrieving ical-based {@link
* CalendarEventSet}s.
*
* <p>Useful background articles: <a
* href="http://blogs.nologin.es/rickyepoderi/index.php?/archives/14-Introducing-CalDAV-Part-I.html"/>
* <a
* href="http://blogs.nologin.es/rickyepoderi/index.php?/archives/15-Introducing-CalDAV-Part-II.html"/>
*
* <p>URL format varies per provider (see links above). For Google an URL would be
* https://www.google.com/calendar/dav/CALENDERID/events where CALENDARID is obtained from the
* properties of the calendars, such as jameswennmacher@gmail.com for a user's personal calendar, or
* en.usa%23holiday%40group.v.calendar.google.com for the US Holidays calendar. Google uses Basic
* Authentication (or oAuth 2.0 though this is not implemented yet).
*
* @author Jen Bourey, jennifer.bourey@gmail.com
* @version $Header: CalDavCalendarAdapter.java Exp $
*/
public class CalDavCalendarAdapter extends AbstractCalendarAdapter implements ICalendarAdapter {
protected final Log log = LogFactory.getLog(this.getClass());
private Cache cache;
private IUrlCreator urlCreator = new DefaultUrlCreatorImpl();
private ICredentialsExtractor credentialsExtractor = new DefaultCredentialsExtractorImpl();
private ICacheKeyGenerator cacheKeyGenerator = new DefaultCacheKeyGeneratorImpl();
private String cacheKeyPrefix = "default";
public void setCache(Cache cache) {
this.cache = cache;
}
public void setUrlCreator(IUrlCreator urlCreator) {
this.urlCreator = urlCreator;
}
public void setCredentialsExtractor(ICredentialsExtractor credentialsExtractor) {
this.credentialsExtractor = credentialsExtractor;
}
public void setCacheKeyGenerator(ICacheKeyGenerator cacheKeyGenerator) {
this.cacheKeyGenerator = cacheKeyGenerator;
}
public void setCacheKeyPrefix(String cacheKeyPrefix) {
this.cacheKeyPrefix = cacheKeyPrefix;
}
public CalendarEventSet getEvents(
CalendarConfiguration calendarConfiguration, Interval interval, PortletRequest request)
throws CalendarException {
Set<VEvent> events = new HashSet<VEvent>();
String url = this.urlCreator.constructUrl(calendarConfiguration, interval, request);
if (log.isDebugEnabled()) {
log.debug("generated url: " + url);
}
// try to get the cached calendar
String key =
cacheKeyGenerator.getKey(
calendarConfiguration, interval, request, cacheKeyPrefix.concat(".").concat(url));
Element cachedElement = this.cache.get(key);
CalendarEventSet eventSet;
if (cachedElement == null) {
Credentials credentials = credentialsExtractor.getCredentials(request);
// retrieve calendars for the current user
net.fortuna.ical4j.model.Calendar calendar = retrieveCalendar(url, interval, credentials);
// extract events from the calendars
events.addAll(convertCalendarToEvents(calendar, interval));
log.debug("contentProcessor found " + events.size() + " events");
// save the calendar event set to the cache
eventSet = insertCalendarEventSetIntoCache(this.cache, key, events);
} else {
eventSet = (CalendarEventSet) cachedElement.getValue();
}
return eventSet;
}
// Have not gotten queries working so this method returns all events in
// the calendar.
protected final net.fortuna.ical4j.model.Calendar retrieveCalendar(
String url, Interval interval, Credentials credentials) {
try {
// construct a HostConfiguration from the server URL
URL hostUrl = new URL(url);
int port = hostUrl.getPort();
if (port == -1) {
port = hostUrl.getDefaultPort();
}
HttpClient httpClient = new HttpClient();
httpClient
.getHostConfiguration()
.setHost(hostUrl.getHost(), port, Protocol.getProtocol(hostUrl.getProtocol()));
if (log.isDebugEnabled()) {
log.debug("connecting to calDAV host " + httpClient.getHostConfiguration().getHost());
}
if (credentials != null) {
httpClient.getState().setCredentials(AuthScope.ANY, credentials);
httpClient.getParams().setAuthenticationPreemptive(true);
}
String calendarPath = hostUrl.getPath();
CalDAVCollection calDAVCollection =
new CalDAVCollection(
calendarPath,
httpClient.getHostConfiguration(),
new CalDAV4JMethodFactory(),
CalDAVConstants.PROC_ID_DEFAULT);
// log.debug(calDAVCollection.testConnection(httpClient));
if (log.isDebugEnabled()) {
log.debug(calDAVCollection.getCalendarCollectionRoot());
}
// Haven't gotten queries working
// GenerateQuery gq = new GenerateQuery();
// gq.setTimeRange(new net.fortuna.ical4j.model.Date(interval
// .getStart().toDate()), new net.fortuna.ical4j.model.Date(
// interval.getEnd().toDate()));
// CalendarQuery query = gq.generate();
// Haven't gotten queries working even for single calendar.
// Also I don't think Google supports viewing
// or querying multiple calendars at once.
// List<Calendar> cals = calDAVCollection.queryCalendars(client, query);
// return cals.get(0);
Calendar cal = calDAVCollection.getCalendar(httpClient, "");
if (log.isDebugEnabled()) {
log.debug(cal);
}
return cal;
} catch (CalDAV4JException e) {
log.error("CalDAV exception: ", e);
throw new CalendarException(e);
} catch (Exception e) {
log.error(e);
throw new CalendarException("Unknown exception while retrieving calendar", e);
}
}
protected final Set<VEvent> convertCalendarToEvents(
net.fortuna.ical4j.model.Calendar calendar, Interval interval) throws CalendarException {
Period period =
new Period(
new net.fortuna.ical4j.model.DateTime(interval.getStartMillis()),
new net.fortuna.ical4j.model.DateTime(interval.getEndMillis()));
Set<VEvent> events = new HashSet<VEvent>();
// if the calendar is null, return empty set
if (calendar == null) {
log.info("calendar empty, returning empty set");
return Collections.emptySet();
}
// retrieve the list of events for this calendar within the
// specified time period
for (Iterator<Component> i = calendar.getComponents().iterator(); i.hasNext(); ) {
Component component = i.next();
if (component.getName().equals("VEVENT")) {
VEvent event = (VEvent) component;
if (log.isTraceEnabled()) {
log.trace("processing event " + event.getSummary().getValue());
}
// calculate the recurrence set for this event
// for the specified time period
PeriodList periods = event.calculateRecurrenceSet(period);
// add each recurrence instance to the event list
for (Iterator<Period> iter = periods.iterator(); iter.hasNext(); ) {
Period eventper = iter.next();
PropertyList props = event.getProperties();
// create a new property list, setting the date
// information to this event period
PropertyList newprops = new PropertyList();
newprops.add(new DtStart(eventper.getStart()));
newprops.add(new DtEnd(eventper.getEnd()));
for (Iterator<Property> iter2 = props.iterator(); iter2.hasNext(); ) {
Property prop = iter2.next();
// only add non-date-related properties
if (!(prop instanceof DtStart)
&& !(prop instanceof DtEnd)
&& !(prop instanceof Duration)
&& !(prop instanceof RRule)) newprops.add(prop);
}
// create the new event from our property list
VEvent newevent = new VEvent(newprops);
events.add(newevent);
if (log.isTraceEnabled()) {
log.trace("added event " + newevent);
}
}
}
}
return events;
}
}
| |
package com.foobnix.android.utils;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.text.Html;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListAdapter;
import android.widget.ListView;
import android.widget.TextView;
public class Views {
public static View activate(View view, boolean isActive) {
view.setFocusable(isActive);
view.setFocusableInTouchMode(isActive);
view.setClickable(isActive);
return view;
}
public static View find(final Object o, final int resId) {
if (o instanceof Activity) {
return ((Activity) o).findViewById(resId);
}
if (o instanceof View) {
return ((View) o).findViewById(resId);
}
return null;
}
public static List<View> findAll(final Object av, final int... resIds) {
List<View> res = new ArrayList<View>(resIds.length);
for (int id : resIds) {
res.add(find(av, id));
}
return res;
}
public static View findView(final Activity a, final View v, final int viewId) {
if (a != null) {
return a.findViewById(viewId);
}
return v.findViewById(viewId);
}
public static View click(final View v, final int resId, final OnClickListener onClick) {
View findViewById = v.findViewById(resId);
if (onClick != null) {
findViewById.setOnClickListener(onClick);
}
return findViewById;
}
public static View click(final Activity a, final int resId, final OnClickListener onClick) {
View findViewById = a.findViewById(resId);
if (onClick != null) {
findViewById.setOnClickListener(onClick);
}
return findViewById;
}
public static TextView text(final Object view, final int resId) {
return (TextView) find(view, resId);
}
public static EditText editText(final Object view, final int resId) {
return (EditText) find(view, resId);
}
public static Button button(final Object view, final int resId) {
return (Button) find(view, resId);
}
public static ImageView image(final Object view, final int resId) {
return (ImageView) find(view, resId);
}
public static TextView text(final Object view, final int resId, final String text) {
TextView textView = (TextView) find(view, resId);
textView.setText(text);
return textView;
}
public static TextView text(final Object view, final int resId, final int msgId) {
TextView textView = (TextView) find(view, resId);
textView.setText(msgId);
return textView;
}
public static TextView htmlText(final Object view, final int resId, final String htmlText) {
TextView textView = (TextView) find(view, resId);
if (htmlText == null) {
textView.setText("");
} else {
if (htmlText.contains("<")) {
textView.setText(Html.fromHtml(htmlText));
} else {
textView.setText(htmlText);
}
}
return textView;
}
public static TextView textIfFind(Object view, final int resId, final String text) {
TextView textView = (TextView) find(view, resId);
if (textView != null) {
textView.setText(text);
}
return textView;
}
public static void gone(final Object view, final int... resIds) {
for (int resId : resIds) {
View viewById = find(view, resId);
if (viewById != null) {
viewById.setVisibility(View.GONE);
}
}
}
public static void unselect(final Object view, final int... resIds) {
for (int resId : resIds) {
View viewById = find(view, resId);
viewById.setSelected(false);
}
}
public static void unselect(final List<View> viewIds) {
for (View view : viewIds) {
view.setSelected(false);
}
}
public static void unselect(final View... viewIds) {
for (View view : viewIds) {
view.setSelected(false);
}
}
public static void unchecked(final View... viewIds) {
for (View view : viewIds) {
view.setSelected(false);
}
}
public static void unselectChilds(final LinearLayout layout) {
for (int i = 0; i < layout.getChildCount(); i++) {
layout.getChildAt(i).setSelected(false);
}
}
public static void uncheckedChilds(final LinearLayout layout) {
for (int i = 0; i < layout.getChildCount(); i++) {
View childAt = layout.getChildAt(i);
if (childAt instanceof CheckBox) {
((CheckBox) childAt).setChecked(false);
}
}
}
public static void forAllChilds(final LinearLayout layout, String field, Object value) {
for (int i = 0; i < layout.getChildCount(); i++) {
View childAt = layout.getChildAt(i);
try {
Field field2 = childAt.getClass().getField(field);
field2.setAccessible(true);
field2.set(childAt, value);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
public static <T> List<T> findChilds(final ViewGroup layout, Class<T> type) {
List<T> childs = new ArrayList<T>();
for (int i = 0; i < layout.getChildCount(); i++) {
View childAt = layout.getChildAt(i);
if (childAt.getVisibility() == View.VISIBLE && childAt instanceof ViewGroup) {
childs.addAll(findChilds((ViewGroup) childAt, type));
} else if (childAt.getVisibility() == View.VISIBLE && childAt.getClass().equals(type)) {
childs.add((T) childAt);
}
}
return childs;
}
public static void setListViewHeightBasedOnChildren(final ListView listView) {
ListAdapter listAdapter = listView.getAdapter();
if (listAdapter == null) {
// pre-condition
return;
}
int totalHeight = 0;
for (int i = 0; i < listAdapter.getCount(); i++) {
View listItem = listAdapter.getView(i, null, listView);
listItem.measure(0, 0);
totalHeight += listItem.getMeasuredHeight();
}
ViewGroup.LayoutParams params = listView.getLayoutParams();
params.height = totalHeight + (listView.getDividerHeight() * (listAdapter.getCount() - 1));
listView.setLayoutParams(params);
}
}
| |
/**
*
*/
package com.skht777.chatwork.impl;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Function;
import org.json.JSONArray;
import org.json.JSONObject;
import com.skht777.chatwork.NumberedRoomClient;
import com.skht777.chatwork.api.Contact;
import com.skht777.chatwork.api.File;
import com.skht777.chatwork.api.Files;
import com.skht777.chatwork.api.Me;
import com.skht777.chatwork.api.Member;
import com.skht777.chatwork.api.Message;
import com.skht777.chatwork.api.MessageId;
import com.skht777.chatwork.api.MyStatus;
import com.skht777.chatwork.api.MyTask;
import com.skht777.chatwork.api.NumberedRoom;
import com.skht777.chatwork.api.NumberedRooms;
import com.skht777.chatwork.api.RoomAccount;
import com.skht777.chatwork.api.Task;
import com.skht777.chatwork.api.UserAccount;
import com.skht777.chatwork.parameter.Role;
import com.skht777.chatwork.parameter.Status;
import com.skht777.chatwork.parameter.Type;
/**
* @author skht777
*
*/
class ResponseImpl implements Message, Task, MyTask, File, NumberedRoom, MyStatus, Member, Me {
private enum ResponseList {
ACCOUNT,
ACCOUNT_ID,
ADMIN,
ASSIGNED_BY_ACCOUNT,
AVATAR_IMAGE_URL,
BODY,
CHATWORK_ID,
DEPARTMENT,
DESCRIPTION,
DOWNLOAD_URL,
FACEBOOK,
FILE_ID,
FILE_NUM,
FILENAME,
FILESIZE,
ICON_PATH,
INTRODUCTION,
LAST_UPDATE_TIME,
LIMIT_TIME,
MAIL,
MEMBER,
MENTION_NUM,
MENTION_ROOM_NUM,
MESSAGE_ID,
MESSAGE_NUM,
MYTASK_NUM,
MYTASK_ROOM_NUM,
NAME,
ORGANIZATION_ID,
ORGANIZATION_NAME,
READONLY,
ROLE,
ROOM,
ROOM_ID,
SEND_TIME,
SKYPE,
STATUS,
STICKY,
TASK_ID,
TASK_IDS,
TASK_NUM,
TEL_EXTENSION,
TEL_MOBILE,
TEL_ORGANIZATION,
TITLE,
TWITTER,
TYPE,
UNREAD_NUM,
UNREAD_ROOM_NUM,
UPDATE_TIME,
UPLOAD_TIME,
URL;
public String toString() {
return super.toString().toLowerCase();
}
}
private int messageId;
private UserAccount account;
private String body;
private int taskId;
private LocalDate limitDate;
private Status status;
private UserAccount assignedByAccount;
private RoomAccount room;
private int fileId;
private String fileName;
private int fileSize;
private LocalDateTime uploadTime;
private Type type;
private boolean sticky;
private int messageNum;
private int fileNum;
private int taskNum;
private LocalDateTime lastUpdateTime;
private URL iconPath;
private int roomId;
private String name;
private int unreadNum;
private int mentionNum;
private int mytaskNum;
private Role role;
private int chatworkId;
private int organizationId;
private String organizationName;
private String department;
private int accountId;
private URL avatarImageUrl;
private String title;
private URL url;
private String introduction;
private String mail;
private String telOrganization;
private String telExtension;
private String telMobile;
private String skype;
private String facebook;
private String twitter;
private int unreadRoomNum;
private int mentionRoomNum;
private int mytaskRoomNum;
private String description;
private URL downloadUrl;
private LocalDateTime sendTime;
private LocalDateTime updateTime;
private JSONObject element;
private ResponseImpl(String response) {
element = new JSONObject(response);
}
private ResponseImpl(JSONObject element) {
this.element = element;
}
private int getInt(ResponseList response) {
return element.getInt(response.toString());
}
private String getString(ResponseList response) {
return element.getString(response.toString());
}
private URL getURL(ResponseList response) {
try {
return new URL(getString(response));
}catch(MalformedURLException e) {
e.printStackTrace();
return null;
}
}
private LocalDateTime getDateTime(ResponseList response) {
return new Timestamp(getInt(response)).toLocalDateTime();
}
private boolean getBoolean(ResponseList response) {
return element.getBoolean(response.toString());
}
private JSONObject getJSONObject(ResponseList response) {
return element.getJSONObject(response.toString());
}
private <T extends Enum<T>> T getEnum(ResponseList response, Class<T> e) {
return Enum.valueOf(e, getString(response).toUpperCase());
}
private static ResponseImpl createResponse(String response, Consumer<ResponseImpl> initializer) {
return createResponse(new JSONObject(response), initializer);
}
private static ResponseImpl createResponse(JSONObject response, Consumer<ResponseImpl> initializer) {
ResponseImpl res = new ResponseImpl(response);
initializer.accept(res);
return res;
}
private static <T> List<T> parseJSONArray(String response, Function<JSONObject, T> func) {
JSONArray array = new JSONArray(response);
List<T> list = new ArrayList<>();
array.forEach((o) -> list.add(func.apply((JSONObject) o)));
return list;
}
private static List<Integer> parseJSONArray(JSONArray response) {
List<Integer> list = new ArrayList<>();
response.forEach(o -> list.add((Integer) o));
return list;
}
private static ResponseImpl getUserAccount(JSONObject response) {
return createResponse(response, (res) -> res
.setAccountId()
.setName()
.setAvatarImageUrl());
}
private static ResponseImpl getRoomAccount(JSONObject response) {
return createResponse(response, (res) -> res
.setRoomId()
.setName()
.setIconPath());
}
private static ResponseImpl getTaskBase(JSONObject response) {
return createResponse(response, (res) -> res
.setTaskId()
.setAssignedByAccount()
.setMessageId()
.setBody()
.setLimitDate()
.setStatus());
}
private static ResponseImpl getUser(JSONObject response) {
return getUserAccount(response)
.setChatworkId()
.setOrganizationId()
.setOrganizationName()
.setDepartment();
}
static Me getMyAccount(String response) {
return ((ResponseImpl) getContact(new JSONObject(response)))
.setTitle()
.setUrl()
.setIntroduction()
.setMail()
.setTelOrganization()
.setTelExtension()
.setTelMobile()
.setSkype()
.setFacebook()
.setTwitter();
}
private static MyTask getMyTask(JSONObject response) {
return getTaskBase(response).setRoom();
}
static List<MyTask> getMyTasks(String response) {
return parseJSONArray(response, o -> getMyTask(o));
}
static MyStatus getMyStatus(String response) {
return createResponse(response, (res) -> res
.setUnreadNum()
.setMentionNum()
.setMytaskNum()
.setUnreadRoomNum()
.setMentionRoomNum()
.setMytaskRoomNum());
}
private static Contact getContact(JSONObject response) {
return getUser(response).setRoomId();
}
static List<Contact> getContacts(String response) {
return parseJSONArray(response, o -> getContact(o));
}
private static NumberedRooms getRoom(JSONObject response) {
return getRoomAccount(response)
.setRole()
.setUnreadNum()
.setMentionNum()
.setMytaskNum()
.setType()
.setSticky()
.setMessageNum()
.setFileNum()
.setTaskNum()
.setLastUpdateTime();
}
static List<NumberedRooms> getRooms(String response) {
return parseJSONArray(response, o -> getRoom(o));
}
static NumberedRoomClient createRoom(String response, APIToken token) {
return new NumberedRoomClientImpl(token, new ResponseImpl(response).setRoomId().getRoomId());
}
static NumberedRoom getNumberdRoom(String response) {
return ((ResponseImpl) getRoom(new JSONObject(response))).setDescription();
}
static Map<Role, List<Integer>> editMembers(String response) {
JSONObject obj = new JSONObject(response);
Map<Role, List<Integer>> map = new HashMap<>();
map.put(Role.ADMIN, parseJSONArray(obj.getJSONArray(ResponseList.ADMIN.toString())));
map.put(Role.MEMBER, parseJSONArray(obj.getJSONArray(ResponseList.MEMBER.toString())));
map.put(Role.READONLY, parseJSONArray(obj.getJSONArray(ResponseList.READONLY.toString())));
return map;
}
static Member getMember(JSONObject response) {
return getUser(response).setRole();
}
static List<Member> getMembers(String response) {
return parseJSONArray(response, o -> getMember(o));
}
private static Message getMessage(JSONObject response) {
return createResponse(response, (res) -> res
.setMessageId()
.setAccount()
.setBody()
.setSendTime()
.setUpdateTime());
}
static MessageId createMessage(String response) {
return createResponse(response, (res) -> res.setMessageId());
}
static List<Message> getMessages(String response) {
return parseJSONArray(response, o -> getMessage(response));
}
static Message getMessage(String response) {
return getMessage(new JSONObject(response));
}
static List<Task> getTasks(String response) {
return parseJSONArray(response, o -> getTask(response));
}
static List<Integer> createTask(String response) {
return parseJSONArray(new JSONObject(response).getJSONArray(ResponseList.TASK_IDS.toString()));
}
static Task getTask(String response) {
return getTaskBase(new JSONObject(response)).setAccount();
}
static File getFile(String response) {
return ((ResponseImpl) getFile(new JSONObject(response))).setDownloadUrl();
}
private static Files getFile(JSONObject response) {
return createResponse(response, (res) -> res
.setFileId()
.setAccount()
.setMessageId()
.setFileName()
.setFileSize()
.setUploadTime());
}
static List<Files> getFiles(String response) {
return parseJSONArray(response, o -> getFile(response));
}
private ResponseImpl setMessageId() {
this.messageId = getInt(ResponseList.MESSAGE_ID);
return this;
}
private ResponseImpl setAccount() {
this.account = ResponseImpl.getUserAccount(getJSONObject(ResponseList.ACCOUNT));
return this;
}
private ResponseImpl setBody() {
this.body = getString(ResponseList.BODY);
return this;
}
private ResponseImpl setTaskId() {
this.taskId = getInt(ResponseList.TASK_ID);
return this;
}
private ResponseImpl setLimitDate() {
this.limitDate = getDateTime(ResponseList.LIMIT_TIME).toLocalDate();
return this;
}
private ResponseImpl setStatus() {
this.status = getEnum(ResponseList.STATUS, Status.class);
return this;
}
private ResponseImpl setAssignedByAccount() {
this.assignedByAccount = ResponseImpl.getUserAccount(getJSONObject(ResponseList.ASSIGNED_BY_ACCOUNT));
return this;
}
private ResponseImpl setRoom() {
this.room = ResponseImpl.getRoomAccount(getJSONObject(ResponseList.ROOM));
return this;
}
private ResponseImpl setFileId() {
this.fileId = getInt(ResponseList.FILE_ID);
return this;
}
private ResponseImpl setFileName() {
this.fileName = getString(ResponseList.FILENAME);
return this;
}
private ResponseImpl setFileSize() {
this.fileSize = getInt(ResponseList.FILESIZE);
return this;
}
private ResponseImpl setUploadTime() {
this.uploadTime = getDateTime(ResponseList.UPLOAD_TIME);
return this;
}
private ResponseImpl setType() {
this.type = getEnum(ResponseList.TYPE, Type.class);
return this;
}
private ResponseImpl setSticky() {
this.sticky = getBoolean(ResponseList.STICKY);
return this;
}
private ResponseImpl setMessageNum() {
this.messageNum = getInt(ResponseList.MESSAGE_NUM);
return this;
}
private ResponseImpl setFileNum() {
this.fileNum = getInt(ResponseList.FILE_NUM);
return this;
}
private ResponseImpl setTaskNum() {
this.taskNum = getInt(ResponseList.TASK_NUM);
return this;
}
private ResponseImpl setLastUpdateTime() {
this.lastUpdateTime = getDateTime(ResponseList.LAST_UPDATE_TIME);
return this;
}
private ResponseImpl setIconPath() {
this.iconPath = getURL(ResponseList.ICON_PATH);
return this;
}
private ResponseImpl setRoomId() {
this.roomId = getInt(ResponseList.ROOM_ID);
return this;
}
private ResponseImpl setName() {
this.name = getString(ResponseList.NAME);
return this;
}
private ResponseImpl setUnreadNum() {
this.unreadNum = getInt(ResponseList.UNREAD_NUM);
return this;
}
private ResponseImpl setMentionNum() {
this.mentionNum = getInt(ResponseList.MENTION_NUM);
return this;
}
private ResponseImpl setMytaskNum() {
this.mytaskNum = getInt(ResponseList.MYTASK_NUM);
return this;
}
private ResponseImpl setRole() {
this.role = getEnum(ResponseList.ROLE, Role.class);
return this;
}
private ResponseImpl setChatworkId() {
this.chatworkId = getInt(ResponseList.CHATWORK_ID);
return this;
}
private ResponseImpl setOrganizationId() {
this.organizationId = getInt(ResponseList.ORGANIZATION_ID);
return this;
}
private ResponseImpl setOrganizationName() {
this.organizationName = getString(ResponseList.ORGANIZATION_NAME);
return this;
}
private ResponseImpl setDepartment() {
this.department = getString(ResponseList.DEPARTMENT);
return this;
}
private ResponseImpl setAccountId() {
this.accountId = getInt(ResponseList.ACCOUNT_ID);
return this;
}
private ResponseImpl setAvatarImageUrl() {
this.avatarImageUrl = getURL(ResponseList.AVATAR_IMAGE_URL);
return this;
}
private ResponseImpl setTitle() {
this.title = getString(ResponseList.TITLE);
return this;
}
private ResponseImpl setUrl() {
this.url = getURL(ResponseList.URL);
return this;
}
private ResponseImpl setIntroduction() {
this.introduction = getString(ResponseList.INTRODUCTION);
return this;
}
private ResponseImpl setMail() {
this.mail = getString(ResponseList.MAIL);
return this;
}
private ResponseImpl setTelOrganization() {
this.telOrganization = getString(ResponseList.TEL_ORGANIZATION);
return this;
}
private ResponseImpl setTelExtension() {
this.telExtension = getString(ResponseList.TEL_EXTENSION);
return this;
}
private ResponseImpl setTelMobile() {
this.telMobile = getString(ResponseList.TEL_MOBILE);
return this;
}
private ResponseImpl setSkype() {
this.skype = getString(ResponseList.SKYPE);
return this;
}
private ResponseImpl setFacebook() {
this.facebook = getString(ResponseList.FACEBOOK);
return this;
}
private ResponseImpl setTwitter() {
this.twitter = getString(ResponseList.TWITTER);
return this;
}
private ResponseImpl setUnreadRoomNum() {
this.unreadRoomNum = getInt(ResponseList.UNREAD_ROOM_NUM);
return this;
}
private ResponseImpl setMentionRoomNum() {
this.mentionRoomNum = getInt(ResponseList.MENTION_ROOM_NUM);
return this;
}
private ResponseImpl setMytaskRoomNum() {
this.mytaskRoomNum = getInt(ResponseList.MYTASK_ROOM_NUM);
return this;
}
private ResponseImpl setDescription() {
this.description = getString(ResponseList.DESCRIPTION);
return this;
}
private ResponseImpl setDownloadUrl() {
this.downloadUrl = getURL(ResponseList.DOWNLOAD_URL);
return this;
}
private ResponseImpl setSendTime() {
this.sendTime = getDateTime(ResponseList.SEND_TIME);
return this;
}
private ResponseImpl setUpdateTime() {
this.updateTime = getDateTime(ResponseList.UPDATE_TIME);
return this;
}
@Override
public int getMessageId() {
return messageId;
}
@Override
public UserAccount getAccount() {
return account;
}
@Override
public String getBody() {
return body;
}
@Override
public int getTaskId() {
return taskId;
}
@Override
public LocalDate getLimitDate() {
return limitDate;
}
@Override
public Status getStatus() {
return status;
}
@Override
public UserAccount getAssignedByAccount() {
return assignedByAccount;
}
@Override
public RoomAccount getRoom() {
return room;
}
@Override
public int getFileId() {
return fileId;
}
@Override
public String getFileName() {
return fileName;
}
@Override
public int getFileSize() {
return fileSize;
}
@Override
public LocalDateTime getUploadTime() {
return uploadTime;
}
@Override
public Type getType() {
return type;
}
@Override
public boolean isSticky() {
return sticky;
}
@Override
public int getMessageNum() {
return messageNum;
}
@Override
public int getFileNum() {
return fileNum;
}
@Override
public int getTaskNum() {
return taskNum;
}
@Override
public LocalDateTime getLastUpdateTime() {
return lastUpdateTime;
}
@Override
public URL getIconPath() {
return iconPath;
}
@Override
public int getRoomId() {
return roomId;
}
@Override
public String getName() {
return name;
}
@Override
public int getUnreadNum() {
return unreadNum;
}
@Override
public int getMentionNum() {
return mentionNum;
}
@Override
public int getMytaskNum() {
return mytaskNum;
}
@Override
public Role getRole() {
return role;
}
@Override
public int getChatworkId() {
return chatworkId;
}
@Override
public int getOrganizationId() {
return organizationId;
}
@Override
public String getOrganizationName() {
return organizationName;
}
@Override
public String getDepartment() {
return department;
}
@Override
public int getAccountId() {
return accountId;
}
@Override
public URL getAvatarImageUrl() {
return avatarImageUrl;
}
@Override
public String getTitle() {
return title;
}
@Override
public URL getUrl() {
return url;
}
@Override
public String getIntroduction() {
return introduction;
}
@Override
public String getMail() {
return mail;
}
@Override
public String getTelOrganization() {
return telOrganization;
}
@Override
public String getTelExtension() {
return telExtension;
}
@Override
public String getTelMobile() {
return telMobile;
}
@Override
public String getSkype() {
return skype;
}
@Override
public String getFacebook() {
return facebook;
}
@Override
public String getTwitter() {
return twitter;
}
@Override
public int getUnreadRoomNum() {
return unreadRoomNum;
}
@Override
public int getMentionRoomNum() {
return mentionRoomNum;
}
@Override
public int getMytaskRoomNum() {
return mytaskRoomNum;
}
@Override
public String getDescription() {
return description;
}
@Override
public URL getDownloadUrl() {
return downloadUrl;
}
@Override
public LocalDateTime getSendTime() {
return sendTime;
}
@Override
public LocalDateTime getUpdateTime() {
return updateTime;
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.engine.management;
import java.lang.management.ManagementFactory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import javax.management.MBeanServer;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
import javax.management.openmbean.CompositeData;
import com.gemstone.gemfire.Statistics;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember;
import com.gemstone.gemfire.internal.cache.CachePerfStats;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.management.EvictionAttributesData;
import com.gemstone.gemfire.management.internal.MBeanJMXAdapter;
import com.gemstone.gemfire.management.internal.ManagementConstants;
import com.gemstone.gemfire.management.internal.beans.RegionMBeanCompositeDataFactory;
import com.pivotal.gemfirexd.TestUtil;
import com.pivotal.gemfirexd.internal.engine.GemFireXDQueryObserverHolder;
import com.pivotal.gemfirexd.internal.engine.GfxdConstants;
import com.pivotal.gemfirexd.internal.engine.Misc;
import com.pivotal.gemfirexd.internal.engine.diag.MemoryAnalyticsVTI;
import com.pivotal.gemfirexd.internal.engine.distributed.GfxdConnectionWrapper;
import com.pivotal.gemfirexd.internal.engine.management.TableMXBean.TableMetadata;
import com.pivotal.gemfirexd.internal.engine.management.impl.GfxdManagementTestBase;
import com.pivotal.gemfirexd.internal.engine.management.impl.InternalManagementService;
import com.pivotal.gemfirexd.internal.engine.management.impl.ManagementUtils;
import com.pivotal.gemfirexd.internal.engine.management.impl.TableMBean;
import com.pivotal.gemfirexd.internal.impl.sql.catalog.SYSTABLESRowFactory;
import io.snappydata.test.dunit.AvailablePortHelper;
import io.snappydata.test.dunit.Host;
import io.snappydata.test.dunit.VM;
public class GfxdTableMBeanDUnit extends GfxdManagementTestBase {
private static final long serialVersionUID = 1L;
static final String grp1 = "GRP1";
static final String grp2 = "GRP2";
static final String grp3 = "GRP3";
static final int numberOfInserts = 10;
static final int numberOfDeletes = 2;
static final int numberOfUpdates = 2 ;
static final String SCHEMA = "TRADE";
static final String TABLE = "CUSTOMERS";
static final String PARTITION_TABLE = "PARTITION_CUSTOMERS";
static final String POLICY = "REPLICATE";
static final String PARTITION_POLICY = "PERSISTENT_PARTITION";
static final String PARTITION_SCHEME = "PARTITION BY PRIMARY KEY";
static final String PERSISTENCE_SCHEME = "SYNCHRONOUS";
static final String column1 = "CID";
static final String column2 = "CUST_NAME";
static final String column3 = "SINCE";
static final String column4 = "ADDR";
static final String column5 = "TID";
static final String ALTER_TABLE = "ALTER_CUSTOMERS";
final static int redundancyValue = 2;
static final String redundancyClause = "REDUNDANCY "+ redundancyValue;
private static List<String> grps = new ArrayList<String>();
static final String statemetTextForReplicatedTable = "create table "
+ GfxdTableMBeanDUnit.SCHEMA + "." + GfxdTableMBeanDUnit.TABLE
+ " ("+ column1 +" int not null, "
+ column2 + " varchar(100), "+ column3 +" date, "+ column4 +" varchar(100), "+ column5 + " int ) "
+ GfxdTableMBeanDUnit.POLICY + " SERVER GROUPS ("
+ GfxdTableMBeanDUnit.grp1 + ", " + GfxdTableMBeanDUnit.grp2
+ ") PERSISTENT " + GfxdTableMBeanDUnit.PERSISTENCE_SCHEME ;
static final String statemetTextForPartitionTable = "create table "
+ GfxdTableMBeanDUnit.SCHEMA + "." + GfxdTableMBeanDUnit.PARTITION_TABLE
+ " ("+ column1 +" int not null, " + column2 + " varchar(100), "+ column3 +" date, "+ column4 +" varchar(100), "+ column5 + " int , PRIMARY KEY ("+ column1+ ") ) " +
GfxdTableMBeanDUnit.PARTITION_SCHEME +
" " + redundancyClause +
" SERVER GROUPS (" + GfxdTableMBeanDUnit.grp1 + ", " + GfxdTableMBeanDUnit.grp2+ ")" +
" PERSISTENT " + GfxdTableMBeanDUnit.PERSISTENCE_SCHEME ;
static final String statementTextForAlterTable = "create table "
+ GfxdTableMBeanDUnit.SCHEMA + "." + GfxdTableMBeanDUnit.ALTER_TABLE
+ " ("+ column1 +" int not null, " + column2 + " varchar(100), "+ column3 +" date, "+ column4 +" varchar(100), "+ column5 + " int ) " ;
public static MBeanServer mbeanServer = ManagementFactory
.getPlatformMBeanServer();
public GfxdTableMBeanDUnit(String name) {
super(name);
grps.add(GfxdMemberMBeanDUnit.grp1);
grps.add(GfxdMemberMBeanDUnit.grp2);
grps.add("DEFAULT");
}
public void testAggregateMemberstats() throws Exception {
try {
Properties serverInfo = new Properties();
serverInfo.setProperty("gemfire.enable-time-statistics", "true");
serverInfo.setProperty("gemfirexd.tableAnalyticsUpdateIntervalSeconds", "1");
serverInfo.setProperty("statistic-sample-rate", "100");
serverInfo.setProperty("statistic-sampling-enabled", "true");
startServerVMs(1, 0, GfxdMemberMBeanDUnit.grp1, serverInfo);
serverInfo.setProperty("jmx-manager", "true");
serverInfo.setProperty("jmx-manager-start", "true");
serverInfo.setProperty("jmx-manager-port", "0");// No need to start
// an
// Agent for this
// test
startServerVMs(1, 0, GfxdMemberMBeanDUnit.grp2, serverInfo);
Properties info = new Properties();
info.setProperty("host-data", "false");
info.setProperty("gemfire.enable-time-statistics", "true");
info.setProperty("gemfirexd.tableAnalyticsUpdateIntervalSeconds", "1");
// start a client, register the driver.
startClientVMs(1, 0, GfxdTableMBeanDUnit.grp3, info);
// enable StatementStats for all connections in this VM
System.setProperty(GfxdConstants.GFXD_ENABLE_STATS, "true");
// check that stats are enabled with System property set
Connection conn = TestUtil.getConnection(info);
createTableForVerification(conn, true, 1);
conn.close();
stopVMNums(1, -1);
} finally {
GemFireXDQueryObserverHolder.clearInstance();
}
}
private void createTableForVerification(Connection conn,
final boolean enableStats, final int numTimesSampled) throws Exception {
System.out.println("serverVMs= " + this.serverVMs.size());
final VM serverVM = this.serverVMs.get(1); // Server Started as a
// manager
final Statement stmt = conn.createStatement();
final String createSchemaOrder = "create schema "+ SCHEMA;
stmt.execute(createSchemaOrder);
System.out.println("statemetTextForReplicatedTable= " + GfxdTableMBeanDUnit.statemetTextForReplicatedTable);
System.out.println("statemetTextForPartitionTable= " + GfxdTableMBeanDUnit.statemetTextForPartitionTable);
System.out.println("statementTextForAlterTable= " + GfxdTableMBeanDUnit.statementTextForAlterTable);
logInfo("Executing statemetTextForReplicatedTable");
stmt.execute(GfxdTableMBeanDUnit.statemetTextForReplicatedTable);
logInfo("Executing statemetTextForPartitionTable");
stmt.execute(GfxdTableMBeanDUnit.statemetTextForPartitionTable);
logInfo("Executing statementTextForAlterTable");
stmt.execute(GfxdTableMBeanDUnit.statementTextForAlterTable);
logInfo("Executing psInsertCust");
PreparedStatement psInsertCust = conn.prepareStatement("insert into "
+ SCHEMA+ "."+ TABLE +" values (?,?,?,?,?)");
for (int i = 0; i < GfxdTableMBeanDUnit.numberOfInserts; i++) {
psInsertCust.setInt(1, i);
psInsertCust.setString(2, "XXXX" + i);
java.sql.Date since = new java.sql.Date(System.currentTimeMillis());
psInsertCust.setDate(3, since);
psInsertCust.setString(4, "XXXX" + i);
psInsertCust.setInt(5, i);
psInsertCust.executeUpdate();
}
logInfo("Executing psDeleteCust");
PreparedStatement psDeleteCust = conn.prepareStatement("delete from "
+ SCHEMA+ "."+ TABLE + " where "+ column1 +" = ?");
for (int i = 0; i < GfxdTableMBeanDUnit.numberOfDeletes; i++) {
psDeleteCust.setInt(1, i);
psDeleteCust.executeUpdate();
}
logInfo("Executing psInsertPartitionCust");
PreparedStatement psInsertPartitionCust = conn.prepareStatement("insert into "
+ SCHEMA+ "."+ PARTITION_TABLE +" values (?,?,?,?,?)");
for (int i = 0; i < GfxdTableMBeanDUnit.numberOfInserts; i++) {
psInsertPartitionCust.setInt(1, i);
psInsertPartitionCust.setString(2, "XXXX" + i);
java.sql.Date since = new java.sql.Date(System.currentTimeMillis());
psInsertPartitionCust.setDate(3, since);
psInsertPartitionCust.setString(4, "XXXX" + i);
psInsertPartitionCust.setInt(5, i);
psInsertPartitionCust.executeUpdate();
}
logInfo("Executing psDeletePartition");
PreparedStatement psDeletePartition = conn.prepareStatement("delete from "
+ SCHEMA + "." + PARTITION_TABLE + " where "+ column1 +" = ?");
for (int i = 0; i < GfxdTableMBeanDUnit.numberOfDeletes; i++) {
psDeletePartition.setInt(1, i);
psDeletePartition.executeUpdate();
}
logInfo("Executing psUpdateCust");
PreparedStatement psUpdateCust = conn.prepareStatement("update " + GfxdTableMBeanDUnit.SCHEMA + "." + GfxdTableMBeanDUnit.TABLE +
" set " + GfxdTableMBeanDUnit.column4 + " = 'new address' where " + GfxdTableMBeanDUnit.column1 + " > (?)" );
for(int i = 0; i < GfxdTableMBeanDUnit.numberOfUpdates; i++ ){
psUpdateCust.setInt(1, GfxdTableMBeanDUnit.numberOfInserts - i);
psUpdateCust.executeUpdate();
}
//alter table
logInfo("Executing psAlterTable");
PreparedStatement psAlterTable = conn.prepareStatement("alter table " + GfxdTableMBeanDUnit.SCHEMA + "." + GfxdTableMBeanDUnit.ALTER_TABLE +
" drop column " + GfxdTableMBeanDUnit.column4 );
psAlterTable.executeUpdate();
//verify replicated table attributes
logInfo("Executing verifyReplicateTableMbeans");
serverVM.invoke(this.getClass(), "verifyReplicateTableMbeans");
//verify partitioned table attributes
logInfo("Executing verifyPartitionTableMbeans");
serverVM.invoke(this.getClass(), "verifyPartitionTableMbeans");
//verify alter Table
logInfo("Executing verifyAlterTableMbeans");
serverVM.invoke(this.getClass(), "verifyAlterTableMbeans");
//alter table
PreparedStatement psDropTable = conn.prepareStatement("drop table " + GfxdTableMBeanDUnit.SCHEMA + "." + GfxdTableMBeanDUnit.ALTER_TABLE );
psDropTable.executeUpdate();
//verify drop Table
logInfo("Executing verifyDropTableMbeans");
serverVM.invoke(this.getClass(), "verifyDropTableMbeans");
psInsertCust.close();
psDeleteCust.close();
psInsertPartitionCust.close();
psDeletePartition.close();
psUpdateCust.close();
psAlterTable.close();
psDropTable.close();
stmt.close();
logInfo("done verifyTableMbeans");
}
public static void verifyDropTableMbeans(){
final InternalManagementService service = InternalManagementService
.getInstance(Misc.getMemStore());
GemFireCacheImpl cache = Misc.getGemFireCache();
InternalDistributedMember thisMember = cache.getDistributedSystem()
.getDistributedMember();
String memberNameOrId = MBeanJMXAdapter.getMemberNameOrId(thisMember);
final ObjectName distrObjectName = ManagementUtils.getTableMBeanName(
"DEFAULT", memberNameOrId, SCHEMA + "." + GfxdTableMBeanDUnit.ALTER_TABLE );
logInfo("verifyDropTableMbeans distrObjectName=" + distrObjectName);
waitForCriterion(new WaitCriterion() {
public String description() {
return "Waiting for the partition verifyDropTableMbeans to get reflected at managing node";
}
public boolean done() {
TableMXBean bean = service.getMBeanInstance(distrObjectName,
TableMXBean.class);
boolean done = (bean == null );
return done;
}
}, ManagementConstants.REFRESH_TIME * 5, 500, false);
logInfo("verifyDropTableMbeans did not get MBean for =" + distrObjectName);
TableMXBean bean = service.getMBeanInstance(distrObjectName,
TableMXBean.class);
//we should not get this mbean as table is dropped
assertNull(bean);
}
public static void verifyAlterTableMbeans(){
final InternalManagementService service = InternalManagementService
.getInstance(Misc.getMemStore());
GemFireCacheImpl cache = Misc.getGemFireCache();
InternalDistributedMember thisMember = cache.getDistributedSystem()
.getDistributedMember();
String memberNameOrId = MBeanJMXAdapter.getMemberNameOrId(thisMember);
final ObjectName distrObjectName = ManagementUtils.getTableMBeanName(
"DEFAULT", memberNameOrId, SCHEMA + "." + GfxdTableMBeanDUnit.ALTER_TABLE );
logInfo("verifyAlterTableMbeans distrObjectName=" + distrObjectName);
waitForCriterion(new WaitCriterion() {
public String description() {
return "Waiting for the partition verifyAlterTableMbeans to get reflected at managing node";
}
public boolean done() {
TableMXBean bean = service.getMBeanInstance(distrObjectName,
TableMXBean.class);
boolean done = (bean != null );
return done;
}
}, ManagementConstants.REFRESH_TIME * 5, 500, true);
logInfo("verifyPartitionTableMbeans got MBean for =" + distrObjectName);
TableMXBean bean = service.getMBeanInstance(distrObjectName,
TableMXBean.class);
List<String> definition = bean.getDefinition();
logInfo("verifyAlterTableMbeans From bean Name=" + definition.size() + " definition = " + definition);
assertEquals(definition.size(), 4 );
//verify that column4 is not present in definition
boolean colmn4Validated = false;
for(String def : definition){
logInfo("verifyAlterTableMbeans def="+ def );
if (def.contains(column4)){
colmn4Validated = true;
}
}
//column4 should not be present
assertFalse(colmn4Validated);
}
public static void verifyPartitionTableMbeans() {
final InternalManagementService service = InternalManagementService
.getInstance(Misc.getMemStore());
Set<String> serverGroupsToUse = new HashSet<String>();
serverGroupsToUse.add(GfxdTableMBeanDUnit.grp1);
serverGroupsToUse.add(GfxdTableMBeanDUnit.grp2);
GemFireCacheImpl cache = Misc.getGemFireCache();
InternalDistributedMember thisMember = cache.getDistributedSystem()
.getDistributedMember();
String memberNameOrId = MBeanJMXAdapter.getMemberNameOrId(thisMember);
final ObjectName distrObjectName = ManagementUtils.getTableMBeanName(
GfxdMemberMBeanDUnit.grp2, memberNameOrId, SCHEMA + "." + PARTITION_TABLE);
logInfo("verifyPartitionTableMbeans distrObjectName=" + distrObjectName);
waitForCriterion(new WaitCriterion() {
public String description() {
return "Waiting for the partition tablembean to get reflected at managing node";
}
public boolean done() {
TableMXBean bean = service.getMBeanInstance(distrObjectName,
TableMXBean.class);
boolean done = (bean != null && bean.getDeletes() > 0);
return done;
}
}, ManagementConstants.REFRESH_TIME * 5, 500, true);
logInfo("verifyPartitionTableMbeans got MBean for =" + distrObjectName);
TableMXBean bean = service.getMBeanInstance(distrObjectName,
TableMXBean.class);
String statsTxtId = CachePerfStats.REGIONPERFSTATS_TEXTID_PREFIX
+ CachePerfStats.REGIONPERFSTATS_TEXTID_PR_SPECIFIER
+ ((TableMBean) bean).getRegion().getName();
logInfo("partition statsTxtId= "+statsTxtId);
int inserts = 0;
int updates = 0;
int deletes = 0;
Statistics[] regionPerfStats = cache.getDistributedSystem().findStatisticsByTextId(statsTxtId);
if(regionPerfStats.length > 0){
logInfo("regionPerfStats size=" + regionPerfStats.length);
//this loop should not running for long so restrict to 5
for (int i = 0; i < regionPerfStats.length && i < 5 ; i++) {
inserts = regionPerfStats[i].get("creates").intValue();
updates = regionPerfStats[i].get("puts").intValue();
deletes = regionPerfStats[i].get("destroys").intValue();
}
}else{
logInfo("verifyPartitionTableMbeans did not get regionPerfStats. Fail the test" );
fail();
}
logInfo("From partition bean Inserts=" + bean.getInserts() + " and actual =" + inserts);
assertEquals(bean.getInserts() , inserts );
logInfo("From partition bean updates=" + bean.getUpdates() + " and actual =" + updates);
assertEquals(bean.getUpdates(), updates);
logInfo("From partition bean deletes=" + bean.getDeletes() + " and actual =" + deletes);
assertEquals(bean.getDeletes(), deletes);
logInfo("From partition bean parentschema=" + bean.getParentSchema() + " and actual =" + GfxdTableMBeanDUnit.SCHEMA);
assertEquals(bean.getParentSchema(), GfxdTableMBeanDUnit.SCHEMA );
logInfo("From partition bean partitioningScheme=" + bean.getPartitioningScheme() + " and actual =" + GfxdTableMBeanDUnit.PARTITION_SCHEME );
assertEquals(bean.getPartitioningScheme(), GfxdTableMBeanDUnit.PARTITION_SCHEME );
logInfo("From partition bean policy=" + bean.getPolicy() + " and actual =" + GfxdTableMBeanDUnit.PARTITION_POLICY);
assertEquals( bean.getPolicy(), GfxdTableMBeanDUnit.PARTITION_POLICY);
assertTrue(grps.contains(bean.getServerGroups()[0]));
assertTrue(grps.contains(bean.getServerGroups()[1]));
logInfo("From partition bean entrySize=" + bean.getEntrySize() );
assertTrue(bean.getEntrySize() >= 0 );
logInfo("From partition bean keySize=" + bean.getKeySize() );
assertTrue(bean.getKeySize() >= 0 );
logInfo("From partition bean Name=" + bean.getName() + " and actual =" + GfxdTableMBeanDUnit.PARTITION_TABLE);
assertEquals(bean.getName(), GfxdTableMBeanDUnit.PARTITION_TABLE );
//verify table definition and check the presence of columns as the table has 5 columns
List<String> definition = bean.getDefinition();
logInfo("From partition bean Name=" + definition.size() + " definition = " + definition);
assertEquals(definition.size(), 5 );
//verify each column now
boolean colmn1Validated = false;
boolean colmn2Validated = false;
boolean colmn3Validated = false;
boolean colmn4Validated = false;
boolean colmn5Validated = false;
for(String def : definition){
logInfo("partition def="+ def );
if(def.contains(column1)){
colmn1Validated = true;
}else if (def.contains(column2)){
colmn2Validated = true;
}else if (def.contains(column3)){
colmn3Validated = true;
}else if (def.contains(column4)){
colmn4Validated = true;
}else if (def.contains(column5)){
colmn5Validated = true;
}
}
assertTrue(colmn1Validated);
assertTrue(colmn2Validated);
assertTrue(colmn3Validated);
assertTrue(colmn4Validated);
assertTrue(colmn5Validated);
}
public static void verifyReplicateTableMbeans() {
System.setProperty("gemfirexd.tableAnalyticsUpdateIntervalSeconds", "1");
final InternalManagementService service = InternalManagementService
.getInstance(Misc.getMemStore());
Set<String> serverGroupsToUse = new HashSet<String>();
serverGroupsToUse.add(GfxdTableMBeanDUnit.grp1);
serverGroupsToUse.add(GfxdTableMBeanDUnit.grp2);
GemFireCacheImpl cache = Misc.getGemFireCache();
InternalDistributedMember thisMember = cache.getDistributedSystem()
.getDistributedMember();
String memberNameOrId = MBeanJMXAdapter.getMemberNameOrId(thisMember);
final ObjectName distrObjectName = ManagementUtils.getTableMBeanName(
GfxdMemberMBeanDUnit.grp2, memberNameOrId,GfxdTableMBeanDUnit.SCHEMA + "." + GfxdTableMBeanDUnit.TABLE );
logInfo("verifyReplicateTableMbeans distrObjectName=" + distrObjectName);
waitForCriterion(new WaitCriterion() {
public String description() {
return "Waiting for the tablembean to get reflected at managing node";
}
public boolean done() {
TableMXBean bean = service.getMBeanInstance(distrObjectName,
TableMXBean.class);
boolean done = (bean != null && bean.getInserts() == GfxdTableMBeanDUnit.numberOfInserts &&
bean.getDeletes() == GfxdTableMBeanDUnit.numberOfDeletes);
return done;
}
}, ManagementConstants.REFRESH_TIME * 5, 500, true);
logInfo("verifyReplicateTableMbeans got Mbeanfor =" + distrObjectName);
TableMXBean bean = service.getMBeanInstance(distrObjectName,
TableMXBean.class);
logInfo("From replicated bean Inserts=" + bean.getInserts() + " and actual =" + GfxdTableMBeanDUnit.numberOfInserts);
assertEquals(bean.getInserts() , GfxdTableMBeanDUnit.numberOfInserts );
logInfo("From replicated bean numberOfrows=" + bean.getInserts() + " and actual =" + (GfxdTableMBeanDUnit.numberOfInserts - GfxdTableMBeanDUnit.numberOfDeletes ));
assertEquals(bean.getNumberOfRows(), GfxdTableMBeanDUnit.numberOfInserts - GfxdTableMBeanDUnit.numberOfDeletes );
logInfo("From replicated bean parentschema=" + bean.getParentSchema() + " and actual =" + GfxdTableMBeanDUnit.SCHEMA);
assertEquals(bean.getParentSchema(), GfxdTableMBeanDUnit.SCHEMA );
logInfo("From replicated bean partitioningScheme=" + bean.getPartitioningScheme() + " and actual =" + "NA");
assertEquals(bean.getPartitioningScheme(), "NA" );
logInfo("From replicated bean policy=" + bean.getPolicy() + " and actual =" + "PERSISTENT_REPLICATE");
assertTrue( bean.getPolicy().contains(GfxdTableMBeanDUnit.POLICY.toUpperCase()) );
assertTrue(grps.contains(bean.getServerGroups()[0]) );
assertTrue(grps.contains(bean.getServerGroups()[1]) );
int inserts = 0;
int updates = 0;
int deletes = 0;
String statsTxtId = CachePerfStats.REGIONPERFSTATS_TEXTID_PREFIX + ((TableMBean) bean).getRegion().getName();
logInfo("for replicated table statsTxtId= " + statsTxtId);
Statistics[] regionPerfStats = cache.getDistributedSystem().findStatisticsByTextId(statsTxtId);
if(regionPerfStats.length > 0){
logInfo("replicated regionPerfStats size=" + regionPerfStats.length);
for (int i = 0; i < regionPerfStats.length ;) {
inserts = regionPerfStats[i].get("creates").intValue();
updates = regionPerfStats[i].get("puts").intValue();
deletes = regionPerfStats[i].get("destroys").intValue();
break;
}
logInfo("For replciated table from regionstats inserts=" + inserts);
logInfo("For replciated table from regionstats updates=" + updates);
logInfo("For replciated table from regionstats deletes=" + deletes);
}
logInfo("From replicated bean updates=" + bean.getUpdates() + " and actual =" + Math.abs(updates - inserts));
assertEquals(bean.getUpdates(), Math.abs(updates - inserts));
logInfo("From replicated bean deletes=" + bean.getDeletes() + " and actual =" + GfxdTableMBeanDUnit.numberOfDeletes);
assertEquals(bean.getDeletes(), GfxdTableMBeanDUnit.numberOfDeletes);
logInfo("From replicated bean entrySize=" + bean.getEntrySize() );
assertEquals(true, bean.getEntrySize() >= 0 ? true : false);
logInfo("From replicated bean keySize=" + bean.getKeySize() );
assertEquals(true, bean.getKeySize() >= 0 ? true : false);
logInfo("From replicated bean Name=" + bean.getName() + " and actual =" + GfxdTableMBeanDUnit.TABLE);
assertEquals(bean.getName(), GfxdTableMBeanDUnit.TABLE );
//verify table definition and check the presence of colmns as the table has 5 columns
List<String> definition = bean.getDefinition();
logInfo("From replicated bean Name=" + definition.size() + " definition = " + definition);
assertEquals(definition.size(), 5 );
//verify each column now
boolean colmn1Validated = false;
boolean colmn2Validated = false;
boolean colmn3Validated = false;
boolean colmn4Validated = false;
boolean colmn5Validated = false;
for(String def : definition){
logInfo("replicated def="+ def );
if(def.contains(column1)){
colmn1Validated = true;
}else if (def.contains(column2)){
colmn2Validated = true;
}else if (def.contains(column3)){
colmn3Validated = true;
}else if (def.contains(column4)){
colmn4Validated = true;
}else if (def.contains(column5)){
colmn5Validated = true;
}
}
assertTrue(colmn1Validated);
assertTrue(colmn2Validated);
assertTrue(colmn3Validated);
assertTrue(colmn4Validated);
assertTrue(colmn5Validated);
//validate operations
GfxdConnectionWrapper connWrapper = InternalManagementService.getAnyInstance().getConnectionWrapperForTEST();
ResultSet resultSet = null;
try {
PreparedStatement metadataStatement = connWrapper.getConnectionOrNull().prepareStatement("<local>SELECT * FROM SYS."
+GfxdConstants.SYS_TABLENAME_STRING+" WHERE TABLENAME=? and TABLESCHEMANAME=?");
metadataStatement.setString(1, bean.getName());
metadataStatement.setString(2, bean.getParentSchema());
resultSet = metadataStatement.executeQuery();
TableMetadata tableMetadata = bean.fetchMetadata();
String tableMetadataStr = tableMetadata.toString() ;
logInfo("replicated tableMetadataStr="+tableMetadataStr);
if(resultSet.next()){
TableMetadata tableMetadataFromResultSet = new TableMetadata(
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_TABLEID),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_TABLENAME),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_TABLETYPE),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_SCHEMAID),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_SCHEMANAME),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_LOCKGRANULARITY),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_SERVERGROUPS),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_DATAPOLICY),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_PARTITIONATTRS),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_RESOLVER),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_EXPIRATIONATTRS),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_EVICTIONATTRS),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_DISKATTRS),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_LOADER),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_WRITER),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_LISTENERS),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_ASYNCLISTENERS),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_GATEWAYENABLED),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_SENDERIDS),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_OFFHEAPENABLED ),
resultSet.getString(SYSTABLESRowFactory.SYSTABLES_ROW_LEVEL_SECURITY_ENABLED));
logInfo("replicated tableMetadataFromResultSet="+tableMetadataFromResultSet.toString());
assertTrue(tableMetadataStr.toString().equals(tableMetadataFromResultSet.toString()) );
}
resultSet.close();
}catch (SQLException e) {
logInfo("Exception while verifying table meta data ="+ e);
}finally{
try {
if(resultSet != null){
resultSet.close();
}
} catch (SQLException e) {
}
connWrapper.close();
}
//verify eviction attributes
EvictionAttributesData evictionAttributesDataFromMbean = bean.showEvictionAttributes();
EvictionAttributesData evictionAttributesData = RegionMBeanCompositeDataFactory.getEvictionAttributesData(((TableMBean) bean).getRegion().getAttributes());
assertTrue(evictionAttributesDataFromMbean.getAction().equals(evictionAttributesData.getAction() ));
assertTrue(evictionAttributesDataFromMbean.getAlgorithm().equals(evictionAttributesData.getAlgorithm()));
assertEquals(evictionAttributesDataFromMbean.getMaximum(),evictionAttributesData.getMaximum());
}
static double getEntrySize(){
MemoryAnalyticsVTI memoryAnalyticsVTI = new MemoryAnalyticsVTI(true);
int entrySizeIndex = 0 ;
int tableNameIndex = 0 ;
try {
ResultSetMetaData metaData = memoryAnalyticsVTI.getMetaData();
int columnCount = metaData.getColumnCount();
for (int i = 1; i <= columnCount; i++) {
if (metaData.getColumnName(i).equals(MemoryAnalyticsVTI.TABLE_NAME)) {
tableNameIndex = i;
} else if (metaData.getColumnName(i).equals(MemoryAnalyticsVTI.ENTRY_SIZE)) {
entrySizeIndex = i;
}
if(entrySizeIndex != 0 && tableNameIndex !=0){
break;
}
}
while (memoryAnalyticsVTI.next()) {
try {
String tableName = memoryAnalyticsVTI.getString(tableNameIndex);
logInfo("replicated mbean tableName from VTI =" + tableName);
if (tableName.equals(GfxdTableMBeanDUnit.SCHEMA + "."
+ GfxdTableMBeanDUnit.TABLE)) {
String entrySizeIndexStr = memoryAnalyticsVTI
.getString(entrySizeIndex);
logInfo("replicated mbean entrySizeIndexStr from VTI ="
+ entrySizeIndexStr);
return Double.parseDouble(entrySizeIndexStr);
}
} catch (Exception e) {
logInfo("replicated mbean exception in memoryAnalyticsVTI =" + e);
}
}
} catch (SQLException e) {
logInfo("Did not get memoryAnalyticsVTI " + e);
fail();
}
return 0.0;
}
/* static double getKeySize(){
MemoryAnalyticsVTI memoryAnalyticsVTI = new MemoryAnalyticsVTI(true);
int keySizeIndex = 0 ;
int tableNameIndex = 0 ;
try {
ResultSetMetaData metaData = memoryAnalyticsVTI.getMetaData();
int columnCount = metaData.getColumnCount();
for (int i = 1; i <= columnCount; i++) {
logInfo("replicated memoryAnalyticsVTI metaData.getColumnName(i) " + metaData.getColumnName(i));
if (metaData.getColumnName(i).equals(MemoryAnalyticsVTI.TABLE_NAME)) {
tableNameIndex = i;
} else if (metaData.getColumnName(i).equals(MemoryAnalyticsVTI.KEY_SIZE)) {
keySizeIndex = i;
}
if(tableNameIndex !=0 && keySizeIndex != 0 ){
break;
}
}
while (memoryAnalyticsVTI.next()) {
try {
String tableName = memoryAnalyticsVTI.getString(tableNameIndex);
logInfo("replicated mbean tableName from VTI =" + tableName);
if (tableName.equals(GfxdTableMBeanDUnit.SCHEMA + "."
+ GfxdTableMBeanDUnit.TABLE)) {
String keySizeIndexStr = memoryAnalyticsVTI.getString(keySizeIndex);
logInfo("replicated mbean keySizeIndexStr from VTI ="
+ keySizeIndexStr);
return Double.parseDouble(keySizeIndexStr);
}
} catch (Exception e) {
logInfo("replicated mbean exception in memoryAnalyticsVTI =" + e);
}
}
} catch (SQLException e) {
logInfo("Did not get memoryAnalyticsVTI " + e);
fail();
}
return 0.0;
}*/
private String getSqlQuery(String CREATE_TABLE_SQL , String SPACE, String partitionByClause,
String policyClause, String persistenceClause, String redundancyClause) {
return CREATE_TABLE_SQL + SPACE +
(policyClause != null ? policyClause : partitionByClause) + SPACE +
(redundancyClause != null ? redundancyClause : redundancyClause) + SPACE +
(persistenceClause != null ? persistenceClause : persistenceClause);
}
static Properties getManagerConfig(boolean startManager) {
Properties p = new Properties();
p.setProperty(DistributionConfig.JMX_MANAGER_NAME, "true");
p.setProperty(DistributionConfig.JMX_MANAGER_START_NAME, String.valueOf(startManager));
p.setProperty(DistributionConfig.JMX_MANAGER_PORT_NAME, String.valueOf(
AvailablePortHelper.getRandomAvailableTCPPort()));
return p;
}
static void commitTx() throws SQLException {
Connection connection = TestUtil.jdbcConn;
if (connection != null && !connection.getAutoCommit()) {
connection.commit();
}
}
void doInserts(int start, int numOfInserts, String fullTableName) throws Exception {
String insertTableSql = null;
for (int i = start; i < (start+numOfInserts); i++) {
insertTableSql = "INSERT INTO "+fullTableName+" VALUES ('"+i+"', 'Spring"+i+"','TAG"+i+"',"+((int)(Math.random()*200))+",'city"+i+"')";
logInfo("doInserts: insertTableSql :: "+insertTableSql);
serverSQLExecute(1, insertTableSql);
}
VM vm = Host.getHost(0).getVM(0);
vm.invoke(GfxdTableMBeanDUnit.class, "commitTx"); // auto-commit is false by default for test
}
public void testGetIndexInfo() throws Exception {
final String SPACE = " ";
final String TABLE_SCHEMA = "APP";
final String TABLE_NAME = "BOOKS";
final String CREATE_TABLE_SQL = "CREATE TABLE "+TABLE_SCHEMA+"."+TABLE_NAME+" (" +
"ID VARCHAR(10) NOT NULL, " +
"NAME VARCHAR(25), " +
"TAG VARCHAR(25), " +
"SIZE BIGINT, " +
"LOCATION VARCHAR(25)" +
", CONSTRAINT BOOK_PK PRIMARY KEY (ID) " +
")";
final String INDEX_BY_NAME = "CREATE INDEX INDEX_NAME ON " + TABLE_SCHEMA +"." + TABLE_NAME + "(NAME)";
final String INDEX_BY_LOCATION = "CREATE INDEX INDEX_LOCATION ON " + TABLE_SCHEMA +"." + TABLE_NAME
+ "(LOCATION)";
final String INDEX_BY_TAG = "CREATE INDEX INDEX_TAG ON " + TABLE_SCHEMA +"." + TABLE_NAME
+ "(TAG)";
String partitionByClause = "PARTITION BY PRIMARY KEY";
int redundancyValue = 2;
String redundancyClause = "REDUNDANCY "+redundancyValue;
String persistenceClause = "EVICTION BY LRUHEAPPERCENT EVICTACTION OVERFLOW PERSISTENT ASYNCHRONOUS";
boolean serverVmStarted = false;
try {
String createTableSql = getSqlQuery(CREATE_TABLE_SQL , SPACE, partitionByClause, partitionByClause, persistenceClause, redundancyClause);
startVMs(0, 1, AvailablePortHelper.getRandomAvailableTCPPort(), ManagementUtils.DEFAULT_SERVER_GROUP,
getManagerConfig(true));
serverVmStarted = true;
serverSQLExecute(1, createTableSql);
String fullTableName = TestUtil.getFullTableName(TABLE_SCHEMA, TABLE_NAME);
int inserts = 100;
serverSQLExecute(1, INDEX_BY_LOCATION);
serverSQLExecute(1, INDEX_BY_NAME);
serverSQLExecute(1, INDEX_BY_TAG);
doInserts(1,inserts, fullTableName);
VM serverVM = Host.getHost(0).getVM(0);
ConnectionEndpoint jmxHostPort = (ConnectionEndpoint) serverVM.invoke(GfxdTableMBeanDUnit.class,
"retrieveJmxHostPort");
MBeanServerConnection mbsc = startJmxClient(jmxHostPort.host, jmxHostPort.port);
String memberNameOrId = (String) serverVM.invoke(GfxdTableMBeanDUnit.class, "getMemberNameOrId");
ObjectName tableMBeanName = ManagementUtils.getTableMBeanName(ManagementUtils.DEFAULT_SERVER_GROUP,
memberNameOrId, TestUtil.getFullTableName(TABLE_SCHEMA, TABLE_NAME));
long rowCount;
double entrySize;
logInfo("Validating index info for all indexes ");
Set<String> indexNames = new HashSet<String>();
String pkIndexName = "2__" + TABLE_NAME + "__ID";
indexNames.add("INDEX_LOCATION");
indexNames.add("INDEX_TAG");
indexNames.add("INDEX_NAME");
indexNames.add(pkIndexName);
waitForCriterion(new WaitCriterion() {
@Override
public boolean done() {
try {
CompositeData[] data = (CompositeData[])mbsc.invoke(tableMBeanName,
"listIndexInfo", null, null);
return data != null && data.length == 4;
} catch (Exception e) {
throw new AssertionError(e.getMessage(), e);
}
}
@Override
public String description() {
return "waiting for index information to show up in metadata";
}
}, 30000, 500, true);
CompositeData[] data = (CompositeData[]) mbsc.invoke(tableMBeanName, "listIndexInfo", null, null);
assertEquals(4, data.length);
for (CompositeData indexInfo : data) {
String indexName = (String) indexInfo.get("indexName");
logInfo("Validating index info for " + indexName);
assertTrue(indexNames.contains(indexName));
if ("INDEX_LOCATION".contains(indexName)) {
assertEquals("+LOCATION", indexInfo.get("columnsAndOrder"));
} else if ("INDEX_TAG".contains(indexName)) {
assertEquals("+TAG", indexInfo.get("columnsAndOrder"));
} else if ("INDEX_NAME".contains(indexName)) {
assertEquals("+NAME", indexInfo.get("columnsAndOrder"));
} else {
assertEquals(pkIndexName, indexName);
assertEquals("+ID", indexInfo.get("columnsAndOrder"));
}
assertTrue(((String) indexInfo.get("indexType")).contains("LOCAL")
|| indexInfo.get("indexType").equals("PRIMARY KEY"));
}
CompositeData[] indexStats = (CompositeData[]) mbsc.invoke(tableMBeanName, "listIndexStats", null, null);
assertEquals(3, indexStats.length);
for(CompositeData indexStat : indexStats) {
String indexName = (String) indexStat.get("indexName");
logInfo("Validating index stats for " + indexName
+ " rowCount = " + indexStat.get("rowCount") + " entrySize " + indexStat.get("entrySize"));
logInfo(indexName + " index entry size " + indexStat.get("entrySize"));
logInfo(indexName + " index key size " + indexStat.get("keySize"));
assertTrue(indexNames.contains(indexName));
rowCount=(Long)indexStat.get("rowCount");
assertEquals(inserts,(int)rowCount);
entrySize = (Double) indexStat.get("entrySize");
assertTrue(entrySize > 0.0d);
}
serverSQLExecute(1, "DROP INDEX " + TABLE_SCHEMA + ".INDEX_LOCATION");
data = (CompositeData[]) mbsc.invoke(tableMBeanName, "listIndexInfo", null, null);
assertEquals(3, data.length);
indexStats = (CompositeData[]) mbsc.invoke(tableMBeanName, "listIndexStats", null, null);
assertEquals(2, indexStats.length);
} finally {
stopJmxClient();
if (serverVmStarted) {
stopVMNum(-1);
}
}
}
@Override
public void tearDown2() throws Exception {
super.tearDown2();
}
}
| |
/*
* ######
* ######
* ############ ####( ###### #####. ###### ############ ############
* ############# #####( ###### #####. ###### ############# #############
* ###### #####( ###### #####. ###### ##### ###### ##### ######
* ###### ###### #####( ###### #####. ###### ##### ##### ##### ######
* ###### ###### #####( ###### #####. ###### ##### ##### ######
* ############# ############# ############# ############# ##### ######
* ############ ############ ############# ############ ##### ######
* ######
* #############
* ############
*
* Adyen Java API Library
*
* Copyright (c) 2021 Adyen B.V.
* This file is open source and available under the MIT license.
* See the LICENSE file for more info.
*/
package com.adyen.model.marketpay.notification;
import com.fasterxml.jackson.annotation.JsonValue;
import com.google.gson.annotations.SerializedName;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static com.adyen.util.Util.toIndentedString;
/**
* NotificationConfigurationDetails
*/
public class NotificationConfigurationDetails {
@SerializedName("settings")
private Map<String, String> settings = new HashMap<>();
@SerializedName("apiVersion")
private Integer apiVersion = null;
@SerializedName("eventConfigs")
private List<NotificationEventConfiguration> eventConfigs = null;
@SerializedName("notifyUsername")
private String notifyUsername = null;
/**
* SSL protocol
*/
public enum SslProtocolEnum {
@SerializedName("SSL")
SSL("SSL"),
@SerializedName("SSLInsecureCiphers")
SSLINSECURECIPHERS("SSLInsecureCiphers"),
@SerializedName("SSLv3")
SSLV3("SSLv3"),
@SerializedName("TLS")
TLS("TLS"),
@SerializedName("TLSv10")
TLSV10("TLSv10"),
@SerializedName("TLSv10InsecureCiphers")
TLSV10INSECURECIPHERS("TLSv10InsecureCiphers"),
@SerializedName("TLSv11")
TLSV11("TLSv11"),
@SerializedName("TLSv12")
TLSV12("TLSv12");
@JsonValue
private String value;
SslProtocolEnum(String value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
}
@SerializedName("sslProtocol")
private SslProtocolEnum sslProtocol = null;
@SerializedName("notifyURL")
private String notifyURL = null;
@SerializedName("description")
private String description = null;
@SerializedName("active")
private Boolean active = null;
@SerializedName("notificationId")
private Long notificationId = null;
@SerializedName("notifyPassword")
private String notifyPassword = null;
@SerializedName("hmacSignatureKey")
private String hmacSignatureKey = null;
public NotificationConfigurationDetails settings(Map<String, String> settings) {
this.settings = settings;
return this;
}
public NotificationConfigurationDetails putSettingsItem(String key, String settingsItem) {
this.settings.put(key, settingsItem);
return this;
}
/**
* additional notification settings
*
* @return settings
**/
public Map<String, String> getSettings() {
return settings;
}
public void setSettings(Map<String, String> settings) {
this.settings = settings;
}
public NotificationConfigurationDetails apiVersion(Integer apiVersion) {
this.apiVersion = apiVersion;
return this;
}
/**
* api version to use
*
* @return apiVersion
**/
public Integer getApiVersion() {
return apiVersion;
}
public void setApiVersion(Integer apiVersion) {
this.apiVersion = apiVersion;
}
public NotificationConfigurationDetails eventConfigs(List<NotificationEventConfiguration> eventConfigs) {
this.eventConfigs = eventConfigs;
return this;
}
public NotificationConfigurationDetails addEventConfigsItem(NotificationEventConfiguration eventConfigsItem) {
if (eventConfigs == null) {
eventConfigs = new ArrayList<>();
}
this.eventConfigs.add(eventConfigsItem);
return this;
}
/**
* types of the events thiggering the notification
*
* @return eventConfigs
**/
public List<NotificationEventConfiguration> getEventConfigs() {
return eventConfigs;
}
public void setEventConfigs(List<NotificationEventConfiguration> eventConfigs) {
this.eventConfigs = eventConfigs;
}
public NotificationConfigurationDetails notifyUsername(String notifyUsername) {
this.notifyUsername = notifyUsername;
return this;
}
/**
* username to access notification URL
*
* @return notifyUsername
**/
public String getNotifyUsername() {
return notifyUsername;
}
public void setNotifyUsername(String notifyUsername) {
this.notifyUsername = notifyUsername;
}
public NotificationConfigurationDetails sslProtocol(SslProtocolEnum sslProtocol) {
this.sslProtocol = sslProtocol;
return this;
}
/**
* SSL protocol
*
* @return sslProtocol
**/
public SslProtocolEnum getSslProtocol() {
return sslProtocol;
}
public void setSslProtocol(SslProtocolEnum sslProtocol) {
this.sslProtocol = sslProtocol;
}
public NotificationConfigurationDetails notifyURL(String notifyURL) {
this.notifyURL = notifyURL;
return this;
}
/**
* notification URL
*
* @return notifyURL
**/
public String getNotifyURL() {
return notifyURL;
}
public void setNotifyURL(String notifyURL) {
this.notifyURL = notifyURL;
}
public NotificationConfigurationDetails description(String description) {
this.description = description;
return this;
}
/**
* notification configuration description
*
* @return description
**/
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public NotificationConfigurationDetails active(Boolean active) {
this.active = active;
return this;
}
/**
* indicates if the notification configuration is active
*
* @return active
**/
public Boolean getActive() {
return active;
}
public void setActive(Boolean active) {
this.active = active;
}
public NotificationConfigurationDetails notificationId(Long notificationId) {
this.notificationId = notificationId;
return this;
}
/**
* Id of the notification configuration. Is ignored when the notification configuration has to be created
*
* @return notificationId
**/
public Long getNotificationId() {
return notificationId;
}
public void setNotificationId(Long notificationId) {
this.notificationId = notificationId;
}
public NotificationConfigurationDetails notifyPassword(String notifyPassword) {
this.notifyPassword = notifyPassword;
return this;
}
/**
* password to access notification URL
*
* @return notifyPassword
**/
public String getNotifyPassword() {
return notifyPassword;
}
public void setNotifyPassword(String notifyPassword) {
this.notifyPassword = notifyPassword;
}
public NotificationConfigurationDetails hmacSignatureKey(String hmacSignatureKey) {
this.hmacSignatureKey = hmacSignatureKey;
return this;
}
public String getHmacSignatureKey() {
return hmacSignatureKey;
}
public void setHmacSignatureKey(String hmacSignatureKey) {
this.hmacSignatureKey = hmacSignatureKey;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
NotificationConfigurationDetails notificationConfigurationDetails = (NotificationConfigurationDetails) o;
return Objects.equals(this.settings, notificationConfigurationDetails.settings)
&& Objects.equals(this.apiVersion, notificationConfigurationDetails.apiVersion)
&& Objects.equals(this.eventConfigs, notificationConfigurationDetails.eventConfigs)
&& Objects.equals(this.notifyUsername, notificationConfigurationDetails.notifyUsername)
&& Objects.equals(this.sslProtocol, notificationConfigurationDetails.sslProtocol)
&& Objects.equals(this.notifyURL, notificationConfigurationDetails.notifyURL)
&& Objects.equals(this.description, notificationConfigurationDetails.description)
&& Objects.equals(this.active, notificationConfigurationDetails.active)
&& Objects.equals(this.notificationId, notificationConfigurationDetails.notificationId)
&& Objects.equals(this.notifyPassword, notificationConfigurationDetails.notifyPassword)
&& Objects.equals(this.hmacSignatureKey, notificationConfigurationDetails.hmacSignatureKey);
}
@Override
public int hashCode() {
return Objects.hash(settings, apiVersion, eventConfigs, notifyUsername, sslProtocol, notifyURL, description, active, notificationId, notifyPassword, hmacSignatureKey);
}
@Override
public String toString() {
// Populate the eventConfigs list to provide back in the toString() method
this.getEventConfigs();
StringBuilder sb = new StringBuilder();
sb.append("class NotificationConfigurationDetails {\n");
sb.append(" active: ").append(toIndentedString(active)).append("\n");
sb.append(" apiVersion: ").append(toIndentedString(apiVersion)).append("\n");
sb.append(" description: ").append(toIndentedString(description)).append("\n");
sb.append(" eventConfigs: ").append(toIndentedString(eventConfigs)).append("\n");
sb.append(" hmacSignatureKey: ").append(toIndentedString(hmacSignatureKey)).append("\n");
sb.append(" notificationId: ").append(toIndentedString(notificationId)).append("\n");
sb.append(" notifyPassword: ").append(toIndentedString(notifyPassword)).append("\n");
sb.append(" notifyURL: ").append(toIndentedString(notifyURL)).append("\n");
sb.append(" notifyUsername: ").append(toIndentedString(notifyUsername)).append("\n");
sb.append(" sslProtocol: ").append(toIndentedString(sslProtocol)).append("\n");
sb.append(" settings: ").append(toIndentedString(settings)).append("\n");
sb.append("}");
return sb.toString();
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.aggregations.bucket.sampler;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleDocValuesField;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues.Doubles;
import org.elasticsearch.index.fielddata.ScriptDocValues.DoublesSupplier;
import org.elasticsearch.index.fielddata.plain.SortedDoublesIndexFieldData;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.script.field.DelegateDocValuesField;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Consumer;
import static org.hamcrest.Matchers.greaterThan;
public class DiversifiedSamplerTests extends AggregatorTestCase {
private void writeBooks(RandomIndexWriter iw) throws IOException {
String data[] = {
// "id,cat,name,price,inStock,author_t,series_t,sequence_i,genre_s,genre_id",
"0553573403,book,A Game of Thrones,7.99,true,George R.R. Martin,A Song of Ice and Fire,1,fantasy,0",
"0553579908,book,A Clash of Kings,7.99,true,George R.R. Martin,A Song of Ice and Fire,2,fantasy,0",
"055357342X,book,A Storm of Swords,7.99,true,George R.R. Martin,A Song of Ice and Fire,3,fantasy,0",
"0553293354,book,Foundation,17.99,true,Isaac Asimov,Foundation Novels,1,scifi,1",
"0812521390,book,The Black Company,6.99,false,Glen Cook,The Chronicles of The Black Company,1,fantasy,0",
"0812550706,book,Ender's Game,6.99,true,Orson Scott Card,Ender,1,scifi,1",
"0441385532,book,Jhereg,7.95,false,Steven Brust,Vlad Taltos,1,fantasy,0",
"0380014300,book,Nine Princes In Amber,6.99,true,Roger Zelazny,the Chronicles of Amber,1,fantasy,0",
"0805080481,book,The Book of Three,5.99,true,Lloyd Alexander,The Chronicles of Prydain,1,fantasy,0",
"080508049X,book,The Black Cauldron,5.99,true,Lloyd Alexander,The Chronicles of Prydain,2,fantasy,0" };
List<Document> docs = new ArrayList<>();
for (String entry : data) {
String[] parts = entry.split(",");
Document document = new Document();
document.add(new SortedDocValuesField("id", new BytesRef(parts[0])));
document.add(new StringField("cat", parts[1], Field.Store.NO));
document.add(new TextField("name", parts[2], Field.Store.NO));
document.add(new DoubleDocValuesField("price", Double.valueOf(parts[3])));
document.add(new StringField("inStock", parts[4], Field.Store.NO));
document.add(new StringField("author", parts[5], Field.Store.NO));
document.add(new StringField("series", parts[6], Field.Store.NO));
document.add(new StringField("sequence", parts[7], Field.Store.NO));
document.add(new SortedDocValuesField("genre", new BytesRef(parts[8])));
document.add(new NumericDocValuesField("genre_id", Long.valueOf(parts[9])));
docs.add(document);
}
/*
* Add all documents at once to force the test to aggregate all
* values together at the same time. *That* is required because
* the tests assume that all books are on a shard together. And
* they aren't always if they end up in separate leaves.
*/
iw.addDocuments(docs);
}
public void testDiversifiedSampler() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
MappedFieldType genreFieldType = new KeywordFieldMapper.KeywordFieldType("genre");
writeBooks(indexWriter);
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
Consumer<InternalSampler> verify = result -> {
Terms terms = result.getAggregations().get("terms");
assertEquals(2, terms.getBuckets().size());
assertEquals("0805080481", terms.getBuckets().get(0).getKeyAsString());
assertEquals("0812550706", terms.getBuckets().get(1).getKeyAsString());
};
testCase(indexSearcher, genreFieldType, "map", verify);
testCase(indexSearcher, genreFieldType, "global_ordinals", verify);
testCase(indexSearcher, genreFieldType, "bytes_hash", verify);
genreFieldType = new NumberFieldMapper.NumberFieldType("genre_id", NumberFieldMapper.NumberType.LONG);
testCase(indexSearcher, genreFieldType, null, verify);
// wrong field:
genreFieldType = new KeywordFieldMapper.KeywordFieldType("wrong_field");
testCase(indexSearcher, genreFieldType, null, result -> {
Terms terms = result.getAggregations().get("terms");
assertEquals(1, terms.getBuckets().size());
assertEquals("0805080481", terms.getBuckets().get(0).getKeyAsString());
});
indexReader.close();
directory.close();
}
public void testRidiculousSize() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
writeBooks(indexWriter);
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
MappedFieldType genreFieldType = new KeywordFieldMapper.KeywordFieldType("genre");
Consumer<InternalSampler> verify = result -> {
Terms terms = result.getAggregations().get("terms");
assertThat(terms.getBuckets().size(), greaterThan(0));
};
try {
// huge shard_size
testCase(indexSearcher, genreFieldType, "map", verify, Integer.MAX_VALUE, 1);
testCase(indexSearcher, genreFieldType, "global_ordinals", verify, Integer.MAX_VALUE, 1);
testCase(indexSearcher, genreFieldType, "bytes_hash", verify, Integer.MAX_VALUE, 1);
// huge maxDocsPerValue
testCase(indexSearcher, genreFieldType, "map", verify, 100, Integer.MAX_VALUE);
testCase(indexSearcher, genreFieldType, "global_ordinals", verify, 100, Integer.MAX_VALUE);
testCase(indexSearcher, genreFieldType, "bytes_hash", verify, 100, Integer.MAX_VALUE);
} finally {
indexReader.close();
directory.close();
}
}
private void testCase(
IndexSearcher indexSearcher,
MappedFieldType genreFieldType,
String executionHint,
Consumer<InternalSampler> verify
) throws IOException {
testCase(indexSearcher, genreFieldType, executionHint, verify, 100, 1);
}
private void testCase(
IndexSearcher indexSearcher,
MappedFieldType genreFieldType,
String executionHint,
Consumer<InternalSampler> verify,
int shardSize,
int maxDocsPerValue
) throws IOException {
MappedFieldType idFieldType = new KeywordFieldMapper.KeywordFieldType("id");
SortedDoublesIndexFieldData fieldData = new SortedDoublesIndexFieldData(
"price",
IndexNumericFieldData.NumericType.DOUBLE,
(dv, n) -> new DelegateDocValuesField(new Doubles(new DoublesSupplier(dv)), n)
);
FunctionScoreQuery query = new FunctionScoreQuery(
new MatchAllDocsQuery(),
new FieldValueFactorFunction("price", 1, FieldValueFactorFunction.Modifier.RECIPROCAL, null, fieldData)
);
DiversifiedAggregationBuilder builder = new DiversifiedAggregationBuilder("_name").field(genreFieldType.name())
.executionHint(executionHint)
.maxDocsPerValue(maxDocsPerValue)
.shardSize(shardSize)
.subAggregation(new TermsAggregationBuilder("terms").field("id"));
InternalSampler result = searchAndReduce(indexSearcher, query, builder, genreFieldType, idFieldType);
verify.accept(result);
}
public void testDiversifiedSampler_noDocs() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
MappedFieldType idFieldType = new KeywordFieldMapper.KeywordFieldType("id");
MappedFieldType genreFieldType = new KeywordFieldMapper.KeywordFieldType("genre");
DiversifiedAggregationBuilder builder = new DiversifiedAggregationBuilder("_name").field(genreFieldType.name())
.subAggregation(new TermsAggregationBuilder("terms").field("id"));
InternalSampler result = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, genreFieldType, idFieldType);
Terms terms = result.getAggregations().get("terms");
assertEquals(0, terms.getBuckets().size());
indexReader.close();
directory.close();
}
}
| |
package com.mgaetan89.showsrage.fragment;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.annotation.LayoutRes;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.design.widget.FloatingActionButton;
import android.support.v4.app.Fragment;
import android.support.v4.content.LocalBroadcastManager;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.mgaetan89.showsrage.Constants;
import com.mgaetan89.showsrage.R;
import com.mgaetan89.showsrage.activity.AddShowActivity;
import com.mgaetan89.showsrage.adapter.ShowsAdapter;
import com.mgaetan89.showsrage.model.Show;
import com.mgaetan89.showsrage.model.ShowStat;
import com.mgaetan89.showsrage.model.ShowStatWrapper;
import com.mgaetan89.showsrage.model.ShowStats;
import com.mgaetan89.showsrage.model.ShowStatsWrapper;
import com.mgaetan89.showsrage.network.SickRageApi;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import retrofit.Callback;
import retrofit.RetrofitError;
import retrofit.client.Response;
public class ShowsSectionFragment extends Fragment implements View.OnClickListener {
@Nullable
private ShowsAdapter adapter = null;
@Nullable
private TextView emptyView = null;
@NonNull
private final List<Show> filteredShows = new ArrayList<>();
@NonNull
private final FilterReceiver receiver = new FilterReceiver(this);
@Nullable
private RecyclerView recyclerView = null;
@NonNull
private final Collection<Show> shows = new ArrayList<>();
public ShowsSectionFragment() {
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
Bundle arguments = this.getArguments();
if (arguments != null) {
@SuppressWarnings("unchecked")
Collection<Show> shows = (Collection<Show>) arguments.getSerializable(Constants.Bundle.SHOWS);
if (shows != null) {
this.filteredShows.addAll(shows);
this.shows.addAll(shows);
}
}
if (!this.shows.isEmpty()) {
String command = getCommand(this.shows);
Map<String, Integer> parameters = getCommandParameters(this.shows);
SickRageApi.getInstance().getServices().getShowStats(command, parameters, new ShowStatsCallback(this));
}
this.updateLayout();
}
@Override
public void onClick(View view) {
if (view == null) {
return;
}
if (view.getId() == R.id.add_show) {
Intent intent = new Intent(this.getActivity(), AddShowActivity.class);
this.startActivity(intent);
}
}
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_shows_section, container, false);
if (view != null) {
this.emptyView = (TextView) view.findViewById(android.R.id.empty);
this.recyclerView = (RecyclerView) view.findViewById(android.R.id.list);
FloatingActionButton addShow = (FloatingActionButton) view.findViewById(R.id.add_show);
if (addShow != null) {
addShow.setOnClickListener(this);
}
if (this.recyclerView != null) {
String showsListLayout = PreferenceManager.getDefaultSharedPreferences(this.getContext()).getString("display_shows_list_layout", "poster");
int columnCount = this.getResources().getInteger(R.integer.shows_column_count);
this.adapter = new ShowsAdapter(this.filteredShows, getAdapterLayoutResource(showsListLayout));
this.recyclerView.setAdapter(this.adapter);
this.recyclerView.setLayoutManager(new GridLayoutManager(this.getActivity(), columnCount));
}
}
return view;
}
@Override
public void onDestroy() {
this.filteredShows.clear();
this.shows.clear();
super.onDestroy();
}
@Override
public void onDestroyView() {
this.emptyView = null;
this.recyclerView = null;
super.onDestroyView();
}
@Override
public void onPause() {
LocalBroadcastManager.getInstance(this.getContext()).unregisterReceiver(this.receiver);
super.onPause();
}
@Override
public void onResume() {
super.onResume();
IntentFilter intentFilter = new IntentFilter(Constants.Intents.ACTION_FILTER_SHOWS);
LocalBroadcastManager.getInstance(this.getContext()).registerReceiver(this.receiver, intentFilter);
}
@LayoutRes
/* package */ static int getAdapterLayoutResource(String preferedLayout) {
if ("banner".equals(preferedLayout)) {
return R.layout.adapter_shows_list_content_banner;
}
return R.layout.adapter_shows_list_content_poster;
}
@NonNull
/* package */ static String getCommand(@Nullable Iterable<Show> shows) {
StringBuilder command = new StringBuilder();
if (shows != null) {
for (Show show : shows) {
if (!isShowValid(show)) {
continue;
}
if (command.length() > 0) {
command.append("|");
}
command.append("show.stats_").append(show.getIndexerId());
}
}
return command.toString();
}
@NonNull
/* package */ static Map<String, Integer> getCommandParameters(@Nullable Iterable<Show> shows) {
Map<String, Integer> parameters = new HashMap<>();
if (shows != null) {
for (Show show : shows) {
if (!isShowValid(show)) {
continue;
}
int indexerId = show.getIndexerId();
parameters.put("show.stats_" + indexerId + ".indexerid", indexerId);
}
}
return parameters;
}
/* package */
static boolean isShowValid(@Nullable Show show) {
return show != null && show.getIndexerId() > 0;
}
private void updateLayout() {
if (this.filteredShows.isEmpty()) {
if (this.emptyView != null) {
this.emptyView.setVisibility(View.VISIBLE);
}
if (this.recyclerView != null) {
this.recyclerView.setVisibility(View.GONE);
}
} else {
if (this.emptyView != null) {
this.emptyView.setVisibility(View.GONE);
}
if (this.recyclerView != null) {
this.recyclerView.setVisibility(View.VISIBLE);
}
}
if (this.adapter != null) {
this.adapter.notifyDataSetChanged();
}
}
/* package */ static final class FilterReceiver extends BroadcastReceiver {
private final WeakReference<ShowsSectionFragment> fragmentReference;
FilterReceiver(ShowsSectionFragment fragment) {
this.fragmentReference = new WeakReference<>(fragment);
}
@Override
public void onReceive(Context context, Intent intent) {
ShowsSectionFragment fragment = this.fragmentReference.get();
if (fragment == null) {
return;
}
int filterMode = intent.getIntExtra(Constants.Bundle.FILTER_MODE, ShowsFragment.FILTER_PAUSED_ACTIVE_BOTH);
int filterStatus = intent.getIntExtra(Constants.Bundle.FILTER_STATUS, ShowsFragment.FILTER_STATUS_ALL);
String searchQuery = intent.getStringExtra(Constants.Bundle.SEARCH_QUERY);
Collection<Show> filteredShows = new ArrayList<>();
Collection<Show> shows = fragment.shows;
for (Show show : shows) {
if (match(show, filterMode, filterStatus, searchQuery)) {
filteredShows.add(show);
}
}
fragment.filteredShows.clear();
fragment.filteredShows.addAll(filteredShows);
fragment.updateLayout();
}
/* package */
static boolean match(@Nullable Show show, int filterMode, int filterStatus, String searchQuery) {
return show != null &&
matchFilterMode(show, filterMode) &&
matchFilterStatus(show, filterStatus) &&
matchSearchQuery(show, searchQuery);
}
/* package */
static boolean matchFilterMode(@NonNull Show show, int filterMode) {
switch (filterMode) {
case ShowsFragment.FILTER_PAUSED_ACTIVE_ACTIVE:
return show.getPaused() == 0;
case ShowsFragment.FILTER_PAUSED_ACTIVE_BOTH:
return true;
case ShowsFragment.FILTER_PAUSED_ACTIVE_PAUSED:
return show.getPaused() == 1;
}
return false;
}
/* package */
static boolean matchFilterStatus(@NonNull Show show, int filterStatus) {
switch (filterStatus) {
case ShowsFragment.FILTER_STATUS_ALL:
return true;
case ShowsFragment.FILTER_STATUS_CONTINUING:
return "continuing".equalsIgnoreCase(show.getStatus());
case ShowsFragment.FILTER_STATUS_ENDED:
return "ended".equalsIgnoreCase(show.getStatus());
case ShowsFragment.FILTER_STATUS_UNKNOWN:
return "unknown".equalsIgnoreCase(show.getStatus());
}
return false;
}
/* package */
static boolean matchSearchQuery(@NonNull Show show, @Nullable String searchQuery) {
if (searchQuery == null || searchQuery.isEmpty()) {
return true;
}
searchQuery = searchQuery.trim();
if (searchQuery.isEmpty()) {
return true;
}
String showName = show.getShowName().toLowerCase();
searchQuery = searchQuery.toLowerCase();
return showName.contains(searchQuery);
}
}
private static final class ShowStatsCallback implements Callback<ShowStatsWrapper> {
private final WeakReference<ShowsSectionFragment> fragmentReference;
ShowStatsCallback(ShowsSectionFragment fragment) {
this.fragmentReference = new WeakReference<>(fragment);
}
@Override
public void failure(RetrofitError error) {
error.printStackTrace();
}
@Override
public void success(ShowStatsWrapper showStatsWrapper, Response response) {
ShowsSectionFragment fragment = this.fragmentReference.get();
if (fragment == null) {
return;
}
ShowStatWrapper data = showStatsWrapper.getData();
Map<Integer, ShowStats> showStats = data.getShowStats();
if (showStats != null) {
for (Map.Entry<Integer, ShowStats> entry : showStats.entrySet()) {
ShowStat showStatsData = entry.getValue().getData();
int indexerId = entry.getKey();
for (Show show : fragment.filteredShows) {
if (show.getIndexerId() == indexerId) {
show.setEpisodesCount(showStatsData.getTotal());
show.setDownloaded(showStatsData.getTotalDone());
show.setSnatched(showStatsData.getTotalPending());
break;
}
}
for (Show show : fragment.shows) {
if (show.getIndexerId() == indexerId) {
show.setEpisodesCount(showStatsData.getTotal());
show.setDownloaded(showStatsData.getTotalDone());
show.setSnatched(showStatsData.getTotalPending());
break;
}
}
}
}
fragment.updateLayout();
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
/*
* Class ExceptionBreakpoint
* @author Jeka
*/
package com.intellij.debugger.ui.breakpoints;
import com.intellij.debugger.JavaDebuggerBundle;
import com.intellij.debugger.SourcePosition;
import com.intellij.debugger.engine.*;
import com.intellij.debugger.engine.evaluation.EvaluateException;
import com.intellij.debugger.engine.evaluation.EvaluationContextImpl;
import com.intellij.debugger.engine.requests.RequestManagerImpl;
import com.intellij.debugger.impl.DebuggerUtilsEx;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.InvalidDataException;
import com.intellij.openapi.util.JDOMExternalizerUtil;
import com.intellij.openapi.util.Key;
import com.intellij.psi.CommonClassNames;
import com.intellij.psi.JavaPsiFacade;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiElement;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.ui.classFilter.ClassFilter;
import com.intellij.xdebugger.breakpoints.XBreakpoint;
import com.sun.jdi.AbsentInformationException;
import com.sun.jdi.Location;
import com.sun.jdi.ObjectReference;
import com.sun.jdi.ReferenceType;
import com.sun.jdi.event.ExceptionEvent;
import com.sun.jdi.event.LocatableEvent;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.java.debugger.breakpoints.properties.JavaExceptionBreakpointProperties;
import javax.swing.*;
public class ExceptionBreakpoint extends Breakpoint<JavaExceptionBreakpointProperties> {
private static final Logger LOG = Logger.getInstance(ExceptionBreakpoint.class);
public static final @NonNls Key<ExceptionBreakpoint> CATEGORY = BreakpointCategory.lookup("exception_breakpoints");
public ExceptionBreakpoint(Project project, XBreakpoint<JavaExceptionBreakpointProperties> xBreakpoint) {
super(project, xBreakpoint);
}
@Override
public Key<? extends ExceptionBreakpoint> getCategory() {
return CATEGORY;
}
protected ExceptionBreakpoint(Project project, String qualifiedName, String packageName, XBreakpoint<JavaExceptionBreakpointProperties> xBreakpoint) {
super(project, xBreakpoint);
setQualifiedName(qualifiedName);
if (packageName == null) {
setPackageName(calcPackageName(qualifiedName));
}
else {
setPackageName(packageName);
}
}
private static String calcPackageName(String qualifiedName) {
if (qualifiedName == null) {
return null;
}
int dotIndex = qualifiedName.lastIndexOf('.');
return dotIndex >= 0? qualifiedName.substring(0, dotIndex) : "";
}
@Override
public String getClassName() {
return getQualifiedName();
}
@Override
public String getPackageName() {
return getProperties().myPackageName;
}
@Override
public PsiClass getPsiClass() {
return ReadAction.compute(() -> getQualifiedName() != null ? DebuggerUtils.findClass(getQualifiedName(), myProject, GlobalSearchScope.allScope(myProject)) : null);
}
@Override
public String getDisplayName() {
return JavaDebuggerBundle.message("breakpoint.exception.breakpoint.display.name", getQualifiedName());
}
@Override
public Icon getIcon() {
return AllIcons.Debugger.Db_exception_breakpoint;
}
@Override
public void reload() {
}
@Override
public void createRequest(final DebugProcessImpl debugProcess) {
DebuggerManagerThreadImpl.assertIsManagerThread();
if (!shouldCreateRequest(debugProcess)) {
return;
}
SourcePosition classPosition = ReadAction.compute(() -> {
PsiClass psiClass = DebuggerUtils.findClass(getQualifiedName(), myProject, debugProcess.getSearchScope());
return psiClass != null ? SourcePosition.createFromElement(psiClass) : null;
});
if(classPosition == null) {
createOrWaitPrepare(debugProcess, getQualifiedName());
}
else {
createOrWaitPrepare(debugProcess, classPosition);
}
}
@Override
public void processClassPrepare(DebugProcess process, ReferenceType refType) {
DebugProcessImpl debugProcess = (DebugProcessImpl)process;
if (shouldCreateRequest(debugProcess, true) && !debugProcess.getRequestsManager().checkReadOnly(this)) {
// trying to create a request
RequestManagerImpl manager = debugProcess.getRequestsManager();
manager.enableRequest(manager.createExceptionRequest(this, refType, isNotifyCaught(), isNotifyUncaught()));
if (LOG.isDebugEnabled()) {
if (refType != null) {
LOG.debug("Created exception request for reference type " + refType.name());
}
else {
LOG.debug("Created exception request for reference type null");
}
}
}
}
@Override
protected String calculateEventClass(EvaluationContextImpl context, LocatableEvent event) throws EvaluateException {
return event.location().declaringType().name();
}
@Override
protected ObjectReference getThisObject(SuspendContextImpl context, LocatableEvent event) throws EvaluateException {
if(event instanceof ExceptionEvent) {
return ((ExceptionEvent) event).exception();
}
return super.getThisObject(context, event);
}
@Override
public String getEventMessage(LocatableEvent event) {
String exceptionName = (getQualifiedName() != null)? getQualifiedName() : CommonClassNames.JAVA_LANG_THROWABLE;
String threadName = null;
if (event instanceof ExceptionEvent) {
ExceptionEvent exceptionEvent = (ExceptionEvent)event;
try {
exceptionName = exceptionEvent.exception().type().name();
threadName = exceptionEvent.thread().name();
}
catch (Exception ignore) {
}
}
Location location = event.location();
String locationQName = DebuggerUtilsEx.getLocationMethodQName(location);
String locationInfo;
try {
String file = location.sourceName();
int line = DebuggerUtilsEx.getLineNumber(location, false);
locationInfo = JavaDebuggerBundle.message("exception.breakpoint.console.message.location.info", file, line);
}
catch (AbsentInformationException e) {
locationInfo = JavaDebuggerBundle.message("exception.breakpoint.console.message.location.info.absent");
}
if (threadName != null) {
return JavaDebuggerBundle.message("exception.breakpoint.console.message.with.thread.info",
exceptionName, threadName, locationQName, locationInfo
);
}
else {
return JavaDebuggerBundle.message("exception.breakpoint.console.message", exceptionName, locationQName, locationInfo);
}
}
@Override
public boolean evaluateCondition(EvaluationContextImpl context, LocatableEvent event) throws EvaluateException {
if (getProperties().isCatchFiltersEnabled() && event instanceof ExceptionEvent) {
Location location = ((ExceptionEvent)event).catchLocation();
if (location != null && !typeMatchesClassFilters(location.declaringType().name(),
getProperties().getCatchClassFilters(),
getProperties().getCatchClassExclusionFilters())) {
return false;
}
}
return super.evaluateCondition(context, event);
}
@Override
public boolean isValid() {
return true;
}
//@SuppressWarnings({"HardCodedStringLiteral"}) public void writeExternal(Element parentNode) throws WriteExternalException {
// super.writeExternal(parentNode);
// if(getQualifiedName() != null) {
// parentNode.setAttribute("class_name", getQualifiedName());
// }
// if(getPackageName() != null) {
// parentNode.setAttribute("package_name", getPackageName());
// }
//}
@Override
public PsiElement getEvaluationElement() {
if (getClassName() == null) {
return null;
}
return JavaPsiFacade.getInstance(myProject).findClass(getClassName(), GlobalSearchScope.allScope(myProject));
}
@Override
public void readExternal(Element parentNode) throws InvalidDataException {
super.readExternal(parentNode);
//noinspection HardCodedStringLiteral
String packageName = parentNode.getAttributeValue("package_name");
setPackageName(packageName != null? packageName : calcPackageName(packageName));
try {
getProperties().NOTIFY_CAUGHT = Boolean.parseBoolean(JDOMExternalizerUtil.readField(parentNode, "NOTIFY_CAUGHT"));
} catch (Exception ignore) {
}
try {
getProperties().NOTIFY_UNCAUGHT = Boolean.parseBoolean(JDOMExternalizerUtil.readField(parentNode, "NOTIFY_UNCAUGHT"));
} catch (Exception ignore) {
}
//noinspection HardCodedStringLiteral
String className = parentNode.getAttributeValue("class_name");
setQualifiedName(className);
if(className == null) {
throw new InvalidDataException(getReadNoClassName());
}
}
private boolean isNotifyCaught() {
return getProperties().NOTIFY_CAUGHT;
}
private boolean isNotifyUncaught() {
return getProperties().NOTIFY_UNCAUGHT;
}
private String getQualifiedName() {
return getProperties().myQualifiedName;
}
void setQualifiedName(String qualifiedName) {
getProperties().myQualifiedName = qualifiedName;
}
void setPackageName(String packageName) {
getProperties().myPackageName = packageName;
}
public void setCatchFiltersEnabled(boolean enabled) {
getProperties().setCatchFiltersEnabled(enabled);
}
public void setCatchClassFilters(ClassFilter[] filters) {
getProperties().setCatchClassFilters(filters);
}
public void setCatchClassExclusionFilters(ClassFilter[] filters) {
getProperties().setCatchClassExclusionFilters(filters);
}
protected static String getReadNoClassName() {
return JavaDebuggerBundle.message("error.absent.exception.breakpoint.class.name");
}
}
| |
// Copyright (c) 2014 Intel Corporation. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.xwalk.embedding.base;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.FutureTask;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import junit.framework.Assert;
import org.chromium.content.browser.test.util.CallbackHelper;
import org.chromium.content.browser.test.util.Criteria;
import org.chromium.content.browser.test.util.CriteriaHelper;
import org.chromium.net.test.util.TestWebServer;
import org.xwalk.core.JavascriptInterface;
import org.xwalk.core.XWalkNavigationHistory;
import org.xwalk.core.XWalkNavigationItem;
import org.xwalk.core.XWalkView;
import org.xwalk.embedding.MainActivity;
import com.test.server.ActivityInstrumentationTestCase2;
import android.content.Context;
import android.content.res.AssetManager;
import android.os.Bundle;
import android.util.Log;
import android.util.Pair;
import android.webkit.WebResourceResponse;
public class XWalkViewTestBase extends ActivityInstrumentationTestCase2<MainActivity> {
public XWalkViewTestBase(Class<MainActivity> activityClass) {
super(activityClass);
}
protected static final String EMPTY_PAGE =
"<!doctype html>" +
"<title>Set User Agent String Test</title><p>Set User Agent String Test.</p>";
protected static final String USER_AGENT =
"Set User Agent String Test Mozilla/5.0 Apple Webkit Cosswalk Mobile Safari";
protected static final String EXPECTED_USER_AGENT =
"\"Set User Agent String Test Mozilla/5.0 Apple Webkit Cosswalk Mobile Safari\"";
protected static final int NUM_OF_CONSOLE_CALL = 10;
protected static final String REDIRECT_TARGET_PATH = "/redirect_target.html";
protected static final String TITLE = "TITLE";
protected final String mExpectedStr = "xwalk";
protected static final String DATA_URL = "data:text/html,<div/>";
protected final static int WAIT_TIMEOUT_SECONDS = 15;
protected final static long WAIT_TIMEOUT_MS = 2000;
private final static int CHECK_INTERVAL = 100;
protected XWalkView mXWalkView;
protected XWalkView mRestoreXWalkView;
protected MainActivity mainActivity;
protected TestWebServer mWebServer;
protected final TestHelperBridge mTestHelperBridge = new TestHelperBridge();
private String mUrls[]=new String[3];
protected static final int NUM_NAVIGATIONS = 3;
public static final String TITLES[] = {
"page 1 title foo",
"page 2 title bar",
"page 3 title baz"
};
private static final String PATHS[] = {
"/p1foo.html",
"/p2bar.html",
"/p3baz.html",
};
protected final String ALERT_TEXT = "Hello World!";
protected final String PROMPT_TEXT = "How do you like your eggs in the morning?";
protected final String PROMPT_DEFAULT = "Scrambled";
protected final String PROMPT_RESULT = "I like mine with a kiss";
final String CONFIRM_TEXT = "Would you like a cookie?";
protected final AtomicBoolean callbackCalled = new AtomicBoolean(false);
final CallbackHelper jsBeforeUnloadHelper = new CallbackHelper();
boolean flagForConfirmCancelled = false;
public XWalkViewTestBase() {
super(MainActivity.class);
}
@Override
protected void setUp() throws Exception {
super.setUp();
mainActivity = (MainActivity) getActivity();
mWebServer = TestWebServer.start();
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mRestoreXWalkView = new XWalkView(getActivity(), getActivity());
mXWalkView = mainActivity.getXWalkView();
mXWalkView.setUIClient(new TestXWalkUIClient());
mXWalkView.setResourceClient(new TestXWalkResourceClient());
}
});
}
protected void loadUrlAsync(final String url) throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.load(url, null);
}
});
}
protected void loadUrlAsync(final String url,final String content) throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.load(url, content);
}
});
}
protected void loadDataAsync(final String url, final String data, final String mimeType,
final boolean isBase64Encoded) throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.load(url, data);
}
});
}
protected String getTitleOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return mXWalkView.getTitle();
}
});
}
protected <R> R runTestOnUiThreadAndGetResult(Callable<R> callable)
throws Exception {
FutureTask<R> task = new FutureTask<R>(callable);
getInstrumentation().waitForIdleSync();
getInstrumentation().runOnMainSync(task);
return task.get();
}
protected String getFileContent(String fileName) {
try {
Context context = getInstrumentation().getContext();
InputStream inputStream = context.getAssets().open(fileName);
int size = inputStream.available();
byte buffer[] = new byte[size];
inputStream.read(buffer);
inputStream.close();
String fileContent = new String(buffer);
return fileContent;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
protected XWalkView getXWalkView() {
return mXWalkView;
}
protected boolean canGoBackOnUiThread() throws Throwable {
return runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() {
return mXWalkView.getNavigationHistory().canGoBack();
}
});
}
protected boolean hasEnteredFullScreenOnUiThread() throws Throwable {
return runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() {
return mXWalkView.hasEnteredFullscreen();
}
});
}
protected void leaveFullscreenOnUiThread() throws Throwable {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.leaveFullscreen();
}
});
}
protected boolean canGoForwardOnUiThread() throws Throwable {
return runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() {
return mXWalkView.getNavigationHistory().canGoForward();
}
});
}
protected XWalkNavigationItem getCurrentItemOnUiThread() throws Throwable {
return runTestOnUiThreadAndGetResult(new Callable<XWalkNavigationItem>() {
@Override
public XWalkNavigationItem call() {
return mXWalkView.getNavigationHistory().getCurrentItem();
}
});
}
protected String executeJavaScriptAndWaitForResult(final String code) throws Exception {
final OnEvaluateJavaScriptResultHelper helper = mTestHelperBridge.getOnEvaluateJavaScriptResultHelper();
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
helper.evaluateJavascript(mXWalkView, code);
}
});
helper.waitUntilHasValue();
Assert.assertTrue("Failed to retrieve JavaScript evaluation results.", helper.hasValue());
return helper.getJsonResultAndClear();
}
protected String getUrlOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return mXWalkView.getUrl();
}
});
}
protected String getRemoteDebuggingUrlOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
if(mXWalkView.getRemoteDebuggingUrl() == null)
{
return "";
}
return mXWalkView.getRemoteDebuggingUrl().getPath();
}
});
}
protected String getCurrentItemUrlOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return mXWalkView.getNavigationHistory().getCurrentItem().getUrl();
}
});
}
protected String getNavigationUrlOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return mXWalkView.getNavigationHistory().getCurrentItem().getUrl();
}
});
}
protected String getNavigationOriginalUrlOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return mXWalkView.getNavigationHistory().getCurrentItem().getOriginalUrl();
}
});
}
protected String getNavigationTitleOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return mXWalkView.getNavigationHistory().getCurrentItem().getTitle();
}
});
}
protected String getSizeOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return String.valueOf(mXWalkView.getNavigationHistory().size());
}
});
}
protected String hasItemAtOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return String.valueOf(mXWalkView.getNavigationHistory().hasItemAt(1));
}
});
}
protected String getOriginalUrlOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return mXWalkView.getOriginalUrl();
}
});
}
protected void clearCacheOnUiThread(final boolean includeDiskFiles) throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.clearCache(includeDiskFiles);
}
});
}
protected String getAPIVersionOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return mXWalkView.getAPIVersion();
}
});
}
protected String getXWalkVersionOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return mXWalkView.getXWalkVersion();
}
});
}
private String getAssetsFileContent(AssetManager assetManager, String fileName)
throws IOException {
String result = "";
InputStream inputStream = null;
try {
inputStream = assetManager.open(fileName);
int size = inputStream.available();
byte[] buffer = new byte[size];
inputStream.read(buffer);
result = new String(buffer);
} finally {
if (inputStream != null) {
inputStream.close();
}
}
return result;
}
public class PerformExecute implements Runnable
{
protected StringBuffer urlBuf;
public PerformExecute(StringBuffer url) {
urlBuf = url;
}
@Override
public void run() {
}
}
public static class EmptyInputStream extends InputStream {
@Override
public int available() {
return 0;
}
@Override
public int read() throws IOException {
return -1;
}
@Override
public int read(byte b[]) throws IOException {
return -1;
}
@Override
public int read(byte b[], int off, int len) throws IOException {
return -1;
}
@Override
public long skip(long n) throws IOException {
if (n < 0)
throw new IOException("skipping negative number of bytes");
return 0;
}
}
protected void goBackSync(final int n) throws Throwable {
runTestWaitPageFinished(new Runnable(){
@Override
public void run() {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.getNavigationHistory().navigate(
XWalkNavigationHistory.Direction.BACKWARD, n);
}
});
}
});
}
protected void goForwardSync(final int n) throws Throwable {
runTestWaitPageFinished(new Runnable(){
@Override
public void run() {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.getNavigationHistory().navigate(
XWalkNavigationHistory.Direction.FORWARD, n);
}
});
}
});
}
protected void setServerResponseAndLoad(int upto) throws Throwable {
for (int i = 0; i < upto; ++i) {
String html = "<html><head><title>" + TITLES[i] + "</title></head></html>";
mUrls[i] = mWebServer.setResponse(PATHS[i], html, null);
loadUrlSync(mUrls[i]);
}
}
protected void saveAndRestoreStateOnUiThread() throws Throwable {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
Bundle bundle = new Bundle();
mXWalkView.saveState(bundle);
mRestoreXWalkView.restoreState(bundle);
}
});
}
protected boolean pollOnUiThread(final Callable<Boolean> callable) throws Exception {
return CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
try {
return runTestOnUiThreadAndGetResult(callable);
} catch (Throwable e) {
return false;
}
}
});
}
protected void checkHistoryItemList(XWalkView restoreXWalkView) throws Throwable {
XWalkNavigationHistory history = getNavigationHistoryOnUiThread(restoreXWalkView);
assertEquals(NUM_NAVIGATIONS, history.size());
assertEquals(NUM_NAVIGATIONS - 1, history.getCurrentIndex());
for (int i = 0; i < NUM_NAVIGATIONS; ++i) {
assertEquals(mUrls[i], history.getItemAt(i).getUrl());
assertEquals(TITLES[i], history.getItemAt(i).getTitle());
}
}
private XWalkNavigationHistory getNavigationHistoryOnUiThread(
final XWalkView content) throws Throwable{
return runTestOnUiThreadAndGetResult(new Callable<XWalkNavigationHistory>() {
@Override
public XWalkNavigationHistory call() throws Exception {
return content.getNavigationHistory();
}
});
}
@Override
protected void tearDown() throws Exception {
if (mWebServer != null) {
mWebServer.shutdown();
}
if(mainActivity != null)
{
mainActivity.finish();
}
super.tearDown();
}
public class TestJavascriptInterface {
@JavascriptInterface
public String getTextWithoutAnnotation() {
return mExpectedStr;
}
@JavascriptInterface
public String getText() {
return mExpectedStr;
}
@JavascriptInterface
public String getDateText() {
return new Date().toString();
}
}
protected void addJavascriptInterface() {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
getXWalkView().addJavascriptInterface(new TestJavascriptInterface(),
"testInterface");
}
});
}
protected void raisesExceptionAndSetTitle(String script) throws Throwable {
executeJavaScriptAndWaitForResult("try { var title = " +
script + ";" +
" document.title = title;" +
"} catch (exception) {" +
" document.title = \"xwalk\";" +
"}");
}
public class TestXWalkUIClient extends TestXWalkUIClientBase {
public TestXWalkUIClient() {
super(mTestHelperBridge, mXWalkView, callbackCalled);
}
}
class TestXWalkResourceClient extends TestXWalkResourceClientBase {
public TestXWalkResourceClient() {
super(mTestHelperBridge,mXWalkView);
}
}
protected void loadUrlSync(final String url) throws Exception {
CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper();
int currentCallCount = pageFinishedHelper.getCallCount();
loadUrlAsync(url);
pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
}
protected void loadUrlSync(final String url, final String content) throws Exception {
CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper();
int currentCallCount = pageFinishedHelper.getCallCount();
loadUrlAsync(url, content);
pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
}
protected void loadJavaScriptSync(final String url, final String code) throws Exception {
CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper();
int currentCallCount = pageFinishedHelper.getCallCount();
loadUrlAsync(url);
loadUrlAsync(code);
pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
}
protected void loadFromManifestSync(final String path, final String name) throws Exception {
CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper();
int currentCallCount = pageFinishedHelper.getCallCount();
loadFromManifestAsync(path, name);
pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
}
protected void loadFromManifestAsync(final String path, final String name) throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
String manifestContent = "";
try {
manifestContent = getAssetsFileContent(mainActivity.getAssets(), name);
} catch (IOException e) {
e.printStackTrace();
}
mXWalkView.loadAppFromManifest(path, manifestContent);
}
});
}
protected void loadAssetFile(String fileName) throws Exception {
CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper();
int currentCallCount = pageFinishedHelper.getCallCount();
String fileContent = getFileContent(fileName);
loadDataAsync(fileName, fileContent, "text/html", false);
pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
}
protected void runTestWaitPageFinished(Runnable runnable) throws Exception{
CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper();
int currentCallCount = pageFinishedHelper.getCallCount();
runnable.run();
pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
}
protected void reloadSync(final int mode) throws Exception {
runTestWaitPageFinished(new Runnable(){
@Override
public void run() {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.reload(mode);
}
});
}
});
}
protected void loadDataSync(final String url, final String data, final String mimeType,
final boolean isBase64Encoded) throws Exception {
CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper();
int currentCallCount = pageFinishedHelper.getCallCount();
loadDataAsync(url, data, mimeType, isBase64Encoded);
pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
}
public void loadAssetFileAndWaitForTitle(String fileName) throws Exception {
CallbackHelper getTitleHelper = mTestHelperBridge.getOnTitleUpdatedHelper();
int currentCallCount = getTitleHelper.getCallCount();
String fileContent = getFileContent(fileName);
loadDataSync(fileName, fileContent, "text/html", false);
getTitleHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
}
public boolean checkMethodInClass(Class<?> clazz, String methodName){
Method[] methods = clazz.getMethods();
for(Method method : methods)
{
if(method.getName().equals(methodName)){
return true;
}
}
Method[] methods2 = clazz.getDeclaredMethods();
for(Method method : methods2)
{
if(method.getName().equals(methodName)){
return true;
}
}
return false;
}
public void clickOnElementId_evaluateJavascript(final String id) throws Exception {
Assert.assertTrue(CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
try {
String idIsNotNull = executeJavaScriptAndWaitForResult(
"document.getElementById('" + id + "') != null");
return idIsNotNull.equals("true");
} catch (Throwable t) {
t.printStackTrace();
Assert.fail("Failed to check if DOM is loaded: " + t.toString());
return false;
}
}
}, WAIT_TIMEOUT_MS, CHECK_INTERVAL));
try {
executeJavaScriptAndWaitForResult(
"var evObj = document.createEvent('Events'); " +
"evObj.initEvent('click', true, false); " +
"document.getElementById('" + id + "').dispatchEvent(evObj);" +
"console.log('element with id [" + id + "] clicked');");
} catch (Throwable t) {
t.printStackTrace();
}
}
public void clickOnElementId(final String id, String frameName) throws Exception {
String str;
if (frameName != null) {
str = "top.window." + frameName + ".document.getElementById('" + id + "')";
} else {
str = "document.getElementById('" + id + "')";
}
final String script1 = str + " != null";
final String script2 = str + ".dispatchEvent(evObj);";
Assert.assertTrue(CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
try {
String idIsNotNull = executeJavaScriptAndWaitForResult(script1);
return idIsNotNull.equals("true");
} catch (Throwable t) {
t.printStackTrace();
Assert.fail("Failed to check if DOM is loaded: " + t.toString());
return false;
}
}
}, WAIT_TIMEOUT_MS, CHECK_INTERVAL));
try {
loadJavaScriptUrl("javascript:var evObj = document.createEvent('Events'); " +
"evObj.initEvent('click', true, false); " +
script2 +
"console.log('element with id [" + id + "] clicked');");
} catch (Throwable t) {
t.printStackTrace();
}
}
protected String addPageToTestServer(TestWebServer webServer, String httpPath, String html) {
List<Pair<String, String>> headers = new ArrayList<Pair<String, String>>();
headers.add(Pair.create("Content-Type", "text/html"));
headers.add(Pair.create("Cache-Control", "no-store"));
return webServer.setResponse(httpPath, html, headers);
}
protected void stopLoading() throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.stopLoading();
}
});
}
protected void pauseTimers() throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.pauseTimers();
}
});
}
protected void resumeTimers() throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.resumeTimers();
}
});
}
protected void onHide() throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.onHide();
}
});
}
protected void onShow() throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.onShow();
}
});
}
protected void onDestroy() throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.onDestroy();
}
});
}
public static final String ABOUT_TITLE = "About the Google";
protected String addAboutPageToTestServer(TestWebServer webServer) {
return addPageToTestServer(webServer, "/" + "about.html", "<html><head><title>" + ABOUT_TITLE + "</title></head></html>");
}
protected WebResourceResponse stringToWebResourceResponse(String input) throws Throwable {
final String mimeType = "text/html";
final String encoding = "UTF-8";
return new WebResourceResponse(
mimeType, encoding, new ByteArrayInputStream(input.getBytes(encoding)));
}
protected void loadJavaScriptUrl(final String url) throws Exception {
if (!url.startsWith("javascript:")) {
Log.w("Test", "loadJavascriptUrl only accepts javascript: url");
return;
}
loadUrlAsync(url);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.network.shuffle;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Metric;
import com.codahale.metrics.MetricSet;
import com.codahale.metrics.Timer;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.spark.network.buffer.ManagedBuffer;
import org.apache.spark.network.client.RpcResponseCallback;
import org.apache.spark.network.client.TransportClient;
import org.apache.spark.network.server.OneForOneStreamManager;
import org.apache.spark.network.server.RpcHandler;
import org.apache.spark.network.server.StreamManager;
import org.apache.spark.network.shuffle.ExternalShuffleBlockResolver.AppExecId;
import org.apache.spark.network.shuffle.protocol.*;
import static org.apache.spark.network.util.NettyUtils.getRemoteAddress;
import org.apache.spark.network.util.TransportConf;
/**
* RPC Handler for a server which can serve shuffle blocks from outside of an Executor process.
*
* Handles registering executors and opening shuffle blocks from them. Shuffle blocks are registered
* with the "one-for-one" strategy, meaning each Transport-layer Chunk is equivalent to one Spark-
* level shuffle block.
*/
public class ExternalShuffleBlockHandler extends RpcHandler {
private static final Logger logger = LoggerFactory.getLogger(ExternalShuffleBlockHandler.class);
@VisibleForTesting
final ExternalShuffleBlockResolver blockManager;
private final OneForOneStreamManager streamManager;
private final ShuffleMetrics metrics;
public ExternalShuffleBlockHandler(TransportConf conf, File registeredExecutorFile)
throws IOException {
this(new OneForOneStreamManager(),
new ExternalShuffleBlockResolver(conf, registeredExecutorFile));
}
/** Enables mocking out the StreamManager and BlockManager. */
@VisibleForTesting
public ExternalShuffleBlockHandler(
OneForOneStreamManager streamManager,
ExternalShuffleBlockResolver blockManager) {
this.metrics = new ShuffleMetrics();
this.streamManager = streamManager;
this.blockManager = blockManager;
}
@Override
public void receive(TransportClient client, ByteBuffer message, RpcResponseCallback callback) {
BlockTransferMessage msgObj = BlockTransferMessage.Decoder.fromByteBuffer(message);
handleMessage(msgObj, client, callback);
}
protected void handleMessage(
BlockTransferMessage msgObj,
TransportClient client,
RpcResponseCallback callback) {
if (msgObj instanceof OpenBlocks) {
final Timer.Context responseDelayContext = metrics.openBlockRequestLatencyMillis.time();
try {
OpenBlocks msg = (OpenBlocks) msgObj;
checkAuth(client, msg.appId);
List<ManagedBuffer> blocks = Lists.newArrayList();
long totalBlockSize = 0;
for (String blockId : msg.blockIds) {
final ManagedBuffer block = blockManager.getBlockData(msg.appId, msg.execId, blockId);
totalBlockSize += block != null ? block.size() : 0;
blocks.add(block);
}
long streamId = streamManager.registerStream(client.getClientId(), blocks.iterator());
if (logger.isTraceEnabled()) {
logger.trace("Registered streamId {} with {} buffers for client {} from host {}",
streamId,
msg.blockIds.length,
client.getClientId(),
getRemoteAddress(client.getChannel()));
}
callback.onSuccess(new StreamHandle(streamId, msg.blockIds.length).toByteBuffer());
metrics.blockTransferRateBytes.mark(totalBlockSize);
} finally {
responseDelayContext.stop();
}
} else if (msgObj instanceof RegisterExecutor) {
final Timer.Context responseDelayContext =
metrics.registerExecutorRequestLatencyMillis.time();
try {
RegisterExecutor msg = (RegisterExecutor) msgObj;
checkAuth(client, msg.appId);
blockManager.registerExecutor(msg.appId, msg.execId, msg.executorInfo);
callback.onSuccess(ByteBuffer.wrap(new byte[0]));
} finally {
responseDelayContext.stop();
}
} else {
throw new UnsupportedOperationException("Unexpected message: " + msgObj);
}
}
public MetricSet getAllMetrics() {
return metrics;
}
@Override
public StreamManager getStreamManager() {
return streamManager;
}
/**
* Removes an application (once it has been terminated), and optionally will clean up any
* local directories associated with the executors of that application in a separate thread.
*/
public void applicationRemoved(String appId, boolean cleanupLocalDirs) {
blockManager.applicationRemoved(appId, cleanupLocalDirs);
}
/**
* Register an (application, executor) with the given shuffle info.
*
* The "re-" is meant to highlight the intended use of this method -- when this service is
* restarted, this is used to restore the state of executors from before the restart. Normal
* registration will happen via a message handled in receive()
*
* @param appExecId
* @param executorInfo
*/
public void reregisterExecutor(AppExecId appExecId, ExecutorShuffleInfo executorInfo) {
blockManager.registerExecutor(appExecId.appId, appExecId.execId, executorInfo);
}
public void close() {
blockManager.close();
}
private void checkAuth(TransportClient client, String appId) {
if (client.getClientId() != null && !client.getClientId().equals(appId)) {
throw new SecurityException(String.format(
"Client for %s not authorized for application %s.", client.getClientId(), appId));
}
}
/**
* A simple class to wrap all shuffle service wrapper metrics
*/
private class ShuffleMetrics implements MetricSet {
private final Map<String, Metric> allMetrics;
// Time latency for open block request in ms
private final Timer openBlockRequestLatencyMillis = new Timer();
// Time latency for executor registration latency in ms
private final Timer registerExecutorRequestLatencyMillis = new Timer();
// Block transfer rate in byte per second
private final Meter blockTransferRateBytes = new Meter();
private ShuffleMetrics() {
allMetrics = new HashMap<>();
allMetrics.put("openBlockRequestLatencyMillis", openBlockRequestLatencyMillis);
allMetrics.put("registerExecutorRequestLatencyMillis", registerExecutorRequestLatencyMillis);
allMetrics.put("blockTransferRateBytes", blockTransferRateBytes);
allMetrics.put("registeredExecutorsSize",
(Gauge<Integer>) () -> blockManager.getRegisteredExecutorsSize());
}
@Override
public Map<String, Metric> getMetrics() {
return allMetrics;
}
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.segment.realtime.plumber;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.base.Stopwatch;
import com.google.common.base.Supplier;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.primitives.Ints;
import com.google.common.util.concurrent.MoreExecutors;
import com.metamx.common.Granularity;
import com.metamx.common.ISE;
import com.metamx.common.Pair;
import com.metamx.common.concurrent.ScheduledExecutors;
import com.metamx.common.guava.FunctionalIterable;
import com.metamx.emitter.EmittingLogger;
import com.metamx.emitter.service.ServiceEmitter;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.client.FilteredServerView;
import io.druid.client.ServerView;
import io.druid.common.guava.ThreadRenamingCallable;
import io.druid.common.guava.ThreadRenamingRunnable;
import io.druid.concurrent.Execs;
import io.druid.data.input.Committer;
import io.druid.data.input.InputRow;
import io.druid.query.MetricsEmittingQueryRunner;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QueryRunnerFactory;
import io.druid.query.QueryRunnerFactoryConglomerate;
import io.druid.query.QueryToolChest;
import io.druid.query.ReportTimelineMissingSegmentQueryRunner;
import io.druid.query.SegmentDescriptor;
import io.druid.query.spec.SpecificSegmentQueryRunner;
import io.druid.query.spec.SpecificSegmentSpec;
import io.druid.segment.IndexIO;
import io.druid.segment.IndexMaker;
import io.druid.segment.IndexMerger;
import io.druid.segment.IndexSpec;
import io.druid.segment.QueryableIndex;
import io.druid.segment.QueryableIndexSegment;
import io.druid.segment.Segment;
import io.druid.segment.incremental.IndexSizeExceededException;
import io.druid.segment.indexing.DataSchema;
import io.druid.segment.indexing.RealtimeTuningConfig;
import io.druid.segment.loading.DataSegmentPusher;
import io.druid.segment.realtime.FireDepartmentMetrics;
import io.druid.segment.realtime.FireHydrant;
import io.druid.segment.realtime.SegmentPublisher;
import io.druid.server.coordination.DataSegmentAnnouncer;
import io.druid.server.coordination.DruidServerMetadata;
import io.druid.timeline.DataSegment;
import io.druid.timeline.TimelineObjectHolder;
import io.druid.timeline.VersionedIntervalTimeline;
import io.druid.timeline.partition.SingleElementPartitionChunk;
import org.apache.commons.io.FileUtils;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import org.joda.time.Interval;
import org.joda.time.Period;
import javax.annotation.Nullable;
import java.io.Closeable;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
*/
public class RealtimePlumber implements Plumber
{
private static final EmittingLogger log = new EmittingLogger(RealtimePlumber.class);
private static final int WARN_DELAY = 1000;
private final DataSchema schema;
private final RealtimeTuningConfig config;
private final RejectionPolicy rejectionPolicy;
private final FireDepartmentMetrics metrics;
private final ServiceEmitter emitter;
private final QueryRunnerFactoryConglomerate conglomerate;
private final DataSegmentAnnouncer segmentAnnouncer;
private final ExecutorService queryExecutorService;
private final DataSegmentPusher dataSegmentPusher;
private final SegmentPublisher segmentPublisher;
private final FilteredServerView serverView;
private final Object handoffCondition = new Object();
private final Map<Long, Sink> sinks = Maps.newConcurrentMap();
private final VersionedIntervalTimeline<String, Sink> sinkTimeline = new VersionedIntervalTimeline<String, Sink>(
String.CASE_INSENSITIVE_ORDER
);
private volatile long nextFlush = 0;
private volatile boolean shuttingDown = false;
private volatile boolean stopped = false;
private volatile boolean cleanShutdown = true;
private volatile ExecutorService persistExecutor = null;
private volatile ExecutorService mergeExecutor = null;
private volatile ScheduledExecutorService scheduledExecutor = null;
private static final String COMMIT_METADATA_KEY = "%commitMetadata%";
private static final String COMMIT_METADATA_TIMESTAMP_KEY = "%commitMetadataTimestamp%";
public RealtimePlumber(
DataSchema schema,
RealtimeTuningConfig config,
FireDepartmentMetrics metrics,
ServiceEmitter emitter,
QueryRunnerFactoryConglomerate conglomerate,
DataSegmentAnnouncer segmentAnnouncer,
ExecutorService queryExecutorService,
DataSegmentPusher dataSegmentPusher,
SegmentPublisher segmentPublisher,
FilteredServerView serverView
)
{
this.schema = schema;
this.config = config;
this.rejectionPolicy = config.getRejectionPolicyFactory().create(config.getWindowPeriod());
this.metrics = metrics;
this.emitter = emitter;
this.conglomerate = conglomerate;
this.segmentAnnouncer = segmentAnnouncer;
this.queryExecutorService = queryExecutorService;
this.dataSegmentPusher = dataSegmentPusher;
this.segmentPublisher = segmentPublisher;
this.serverView = serverView;
log.info("Creating plumber using rejectionPolicy[%s]", getRejectionPolicy());
}
public DataSchema getSchema()
{
return schema;
}
public RealtimeTuningConfig getConfig()
{
return config;
}
public RejectionPolicy getRejectionPolicy()
{
return rejectionPolicy;
}
public Map<Long, Sink> getSinks()
{
return sinks;
}
@Override
public Object startJob()
{
computeBaseDir(schema).mkdirs();
initializeExecutors();
Object retVal = bootstrapSinksFromDisk();
registerServerViewCallback();
startPersistThread();
// Push pending sinks bootstrapped from previous run
mergeAndPush();
resetNextFlush();
return retVal;
}
@Override
public int add(InputRow row, Supplier<Committer> committerSupplier) throws IndexSizeExceededException
{
final Sink sink = getSink(row.getTimestampFromEpoch());
if (sink == null) {
return -1;
}
final int numRows = sink.add(row);
if (!sink.canAppendRow() || System.currentTimeMillis() > nextFlush) {
persist(committerSupplier.get());
}
return numRows;
}
private Sink getSink(long timestamp)
{
if (!rejectionPolicy.accept(timestamp)) {
return null;
}
final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity();
final VersioningPolicy versioningPolicy = config.getVersioningPolicy();
final long truncatedTime = segmentGranularity.truncate(new DateTime(timestamp)).getMillis();
Sink retVal = sinks.get(truncatedTime);
if (retVal == null) {
final Interval sinkInterval = new Interval(
new DateTime(truncatedTime),
segmentGranularity.increment(new DateTime(truncatedTime))
);
retVal = new Sink(sinkInterval, schema, config, versioningPolicy.getVersion(sinkInterval));
try {
segmentAnnouncer.announceSegment(retVal.getSegment());
sinks.put(truncatedTime, retVal);
sinkTimeline.add(retVal.getInterval(), retVal.getVersion(), new SingleElementPartitionChunk<Sink>(retVal));
}
catch (IOException e) {
log.makeAlert(e, "Failed to announce new segment[%s]", schema.getDataSource())
.addData("interval", retVal.getInterval())
.emit();
}
}
return retVal;
}
@Override
public <T> QueryRunner<T> getQueryRunner(final Query<T> query)
{
final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
final QueryToolChest<T, Query<T>> toolchest = factory.getToolchest();
final Function<Query<T>, ServiceMetricEvent.Builder> builderFn =
new Function<Query<T>, ServiceMetricEvent.Builder>()
{
@Override
public ServiceMetricEvent.Builder apply(@Nullable Query<T> input)
{
return toolchest.makeMetricBuilder(query);
}
};
List<TimelineObjectHolder<String, Sink>> querySinks = Lists.newArrayList();
for (Interval interval : query.getIntervals()) {
querySinks.addAll(sinkTimeline.lookup(interval));
}
return toolchest.mergeResults(
factory.mergeRunners(
queryExecutorService,
FunctionalIterable
.create(querySinks)
.transform(
new Function<TimelineObjectHolder<String, Sink>, QueryRunner<T>>()
{
@Override
public QueryRunner<T> apply(TimelineObjectHolder<String, Sink> holder)
{
if (holder == null) {
throw new ISE("No timeline entry at all!");
}
// The realtime plumber always uses SingleElementPartitionChunk
final Sink theSink = holder.getObject().getChunk(0).getObject();
if (theSink == null) {
throw new ISE("Missing sink for timeline entry[%s]!", holder);
}
final SegmentDescriptor descriptor = new SegmentDescriptor(
holder.getInterval(),
theSink.getSegment().getVersion(),
theSink.getSegment().getShardSpec().getPartitionNum()
);
return new SpecificSegmentQueryRunner<T>(
new MetricsEmittingQueryRunner<T>(
emitter,
builderFn,
factory.mergeRunners(
MoreExecutors.sameThreadExecutor(),
Iterables.transform(
theSink,
new Function<FireHydrant, QueryRunner<T>>()
{
@Override
public QueryRunner<T> apply(FireHydrant input)
{
// It is possible that we got a query for a segment, and while that query
// is in the jetty queue, the segment is abandoned. Here, we need to retry
// the query for the segment.
if (input == null || input.getSegment() == null) {
return new ReportTimelineMissingSegmentQueryRunner<T>(descriptor);
}
// Prevent the underlying segment from closing when its being iterated
final Closeable closeable = input.getSegment().increment();
try {
return factory.createRunner(input.getSegment());
}
finally {
try {
if (closeable != null) {
closeable.close();
}
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
}
}
)
)
).withWaitMeasuredFromNow(),
new SpecificSegmentSpec(
descriptor
)
);
}
}
)
)
);
}
@Override
public void persist(final Committer committer)
{
final List<Pair<FireHydrant, Interval>> indexesToPersist = Lists.newArrayList();
for (Sink sink : sinks.values()) {
if (sink.swappable()) {
indexesToPersist.add(Pair.of(sink.swap(), sink.getInterval()));
}
}
log.info("Submitting persist runnable for dataSource[%s]", schema.getDataSource());
final Stopwatch runExecStopwatch = Stopwatch.createStarted();
final Stopwatch persistStopwatch = Stopwatch.createStarted();
final Map<String, Object> metadata = committer.getMetadata() == null ? null :
ImmutableMap.of(
COMMIT_METADATA_KEY,
committer.getMetadata(),
COMMIT_METADATA_TIMESTAMP_KEY,
System.currentTimeMillis()
);
persistExecutor.execute(
new ThreadRenamingRunnable(String.format("%s-incremental-persist", schema.getDataSource()))
{
@Override
public void doRun()
{
/* Note:
If plumber crashes after storing a subset of all the hydrants then we will lose data and next
time we will start with the commitMetadata stored in those hydrants.
option#1:
maybe it makes sense to store the metadata outside the segments in a separate file. This is because the
commit metadata isn't really associated with an individual segment-- it's associated with a set of segments
that are persisted at the same time or maybe whole datasource. So storing it in the segments is asking for problems.
Sort of like this:
{
"metadata" : {"foo": "bar"},
"segments": [
{"id": "datasource_2000_2001_2000_1", "hydrant": 10},
{"id": "datasource_2001_2002_2001_1", "hydrant": 12},
]
}
When a realtime node crashes and starts back up, it would delete any hydrants numbered higher than the
ones in the commit file.
option#2
We could also just include the set of segments for the same chunk of metadata in more metadata on each
of the segments. we might also have to think about the hand-off in terms of the full set of segments being
handed off instead of individual segments being handed off (that is, if one of the set succeeds in handing
off and the others fail, the real-time would believe that it needs to re-ingest the data).
*/
try {
for (Pair<FireHydrant, Interval> pair : indexesToPersist) {
metrics.incrementRowOutputCount(
persistHydrant(
pair.lhs, schema, pair.rhs, metadata
)
);
}
committer.run();
}
catch (Exception e) {
metrics.incrementFailedPersists();
throw e;
}
finally {
metrics.incrementNumPersists();
metrics.incrementPersistTimeMillis(persistStopwatch.elapsed(TimeUnit.MILLISECONDS));
persistStopwatch.stop();
}
}
}
);
final long startDelay = runExecStopwatch.elapsed(TimeUnit.MILLISECONDS);
metrics.incrementPersistBackPressureMillis(startDelay);
if (startDelay > WARN_DELAY) {
log.warn("Ingestion was throttled for [%,d] millis because persists were pending.", startDelay);
}
runExecStopwatch.stop();
resetNextFlush();
}
// Submits persist-n-merge task for a Sink to the mergeExecutor
private void persistAndMerge(final long truncatedTime, final Sink sink)
{
final String threadName = String.format(
"%s-%s-persist-n-merge", schema.getDataSource(), new DateTime(truncatedTime)
);
mergeExecutor.execute(
new ThreadRenamingRunnable(threadName)
{
@Override
public void doRun()
{
final Interval interval = sink.getInterval();
// Bail out if this sink has been abandoned by a previously-executed task.
if (sinks.get(truncatedTime) != sink) {
log.info("Sink[%s] was abandoned, bailing out of persist-n-merge.", sink);
return;
}
// Use a file to indicate that pushing has completed.
final File persistDir = computePersistDir(schema, interval);
final File mergedTarget = new File(persistDir, "merged");
final File isPushedMarker = new File(persistDir, "isPushedMarker");
if (!isPushedMarker.exists()) {
removeSegment(sink, mergedTarget);
if (mergedTarget.exists()) {
log.wtf("Merged target[%s] exists?!", mergedTarget);
return;
}
} else {
log.info("Already pushed sink[%s]", sink);
return;
}
/*
Note: it the plumber crashes after persisting a subset of hydrants then might duplicate data as these
hydrants will be read but older commitMetadata will be used. fixing this possibly needs structural
changes to plumber.
*/
for (FireHydrant hydrant : sink) {
synchronized (hydrant) {
if (!hydrant.hasSwapped()) {
log.info("Hydrant[%s] hasn't swapped yet, swapping. Sink[%s]", hydrant, sink);
final int rowCount = persistHydrant(hydrant, schema, interval, null);
metrics.incrementRowOutputCount(rowCount);
}
}
}
try {
List<QueryableIndex> indexes = Lists.newArrayList();
for (FireHydrant fireHydrant : sink) {
Segment segment = fireHydrant.getSegment();
final QueryableIndex queryableIndex = segment.asQueryableIndex();
log.info("Adding hydrant[%s]", fireHydrant);
indexes.add(queryableIndex);
}
final File mergedFile;
if (config.isPersistInHeap()) {
mergedFile = IndexMaker.mergeQueryableIndex(
indexes,
schema.getAggregators(),
mergedTarget,
config.getIndexSpec()
);
} else {
mergedFile = IndexMerger.mergeQueryableIndex(
indexes,
schema.getAggregators(),
mergedTarget,
config.getIndexSpec()
);
}
QueryableIndex index = IndexIO.loadIndex(mergedFile);
log.info("Pushing [%s] to deep storage", sink.getSegment().getIdentifier());
DataSegment segment = dataSegmentPusher.push(
mergedFile,
sink.getSegment().withDimensions(Lists.newArrayList(index.getAvailableDimensions()))
);
log.info("Inserting [%s] to the metadata store", sink.getSegment().getIdentifier());
segmentPublisher.publishSegment(segment);
if (!isPushedMarker.createNewFile()) {
log.makeAlert("Failed to create marker file for [%s]", schema.getDataSource())
.addData("interval", sink.getInterval())
.addData("partitionNum", segment.getShardSpec().getPartitionNum())
.addData("marker", isPushedMarker)
.emit();
}
}
catch (Exception e) {
metrics.incrementFailedHandoffs();
log.makeAlert(e, "Failed to persist merged index[%s]", schema.getDataSource())
.addData("interval", interval)
.emit();
if (shuttingDown) {
// We're trying to shut down, and this segment failed to push. Let's just get rid of it.
// This call will also delete possibly-partially-written files, so we don't need to do it explicitly.
cleanShutdown = false;
abandonSegment(truncatedTime, sink);
}
}
}
}
);
}
@Override
public void finishJob()
{
log.info("Shutting down...");
shuttingDown = true;
for (final Map.Entry<Long, Sink> entry : sinks.entrySet()) {
persistAndMerge(entry.getKey(), entry.getValue());
}
while (!sinks.isEmpty()) {
try {
log.info(
"Cannot shut down yet! Sinks remaining: %s",
Joiner.on(", ").join(
Iterables.transform(
sinks.values(),
new Function<Sink, String>()
{
@Override
public String apply(Sink input)
{
return input.getSegment().getIdentifier();
}
}
)
)
);
synchronized (handoffCondition) {
while (!sinks.isEmpty()) {
handoffCondition.wait();
}
}
}
catch (InterruptedException e) {
throw Throwables.propagate(e);
}
}
shutdownExecutors();
stopped = true;
if (!cleanShutdown) {
throw new ISE("Exception occurred during persist and merge.");
}
}
private void resetNextFlush()
{
nextFlush = new DateTime().plus(config.getIntermediatePersistPeriod()).getMillis();
}
protected void initializeExecutors()
{
final int maxPendingPersists = config.getMaxPendingPersists();
if (persistExecutor == null) {
// use a blocking single threaded executor to throttle the firehose when write to disk is slow
persistExecutor = Execs.newBlockingSingleThreaded(
"plumber_persist_%d", maxPendingPersists
);
}
if (mergeExecutor == null) {
// use a blocking single threaded executor to throttle the firehose when write to disk is slow
mergeExecutor = Execs.newBlockingSingleThreaded(
"plumber_merge_%d", 1
);
}
if (scheduledExecutor == null) {
scheduledExecutor = Execs.scheduledSingleThreaded("plumber_scheduled_%d");
}
}
protected void shutdownExecutors()
{
// scheduledExecutor is shutdown here, but mergeExecutor is shutdown when the
// ServerView sends it a new segment callback
if (scheduledExecutor != null) {
scheduledExecutor.shutdown();
persistExecutor.shutdown();
}
}
protected Object bootstrapSinksFromDisk()
{
final VersioningPolicy versioningPolicy = config.getVersioningPolicy();
File baseDir = computeBaseDir(schema);
if (baseDir == null || !baseDir.exists()) {
return null;
}
File[] files = baseDir.listFiles();
if (files == null) {
return null;
}
Object metadata = null;
long latestCommitTime = 0;
for (File sinkDir : files) {
Interval sinkInterval = new Interval(sinkDir.getName().replace("_", "/"));
//final File[] sinkFiles = sinkDir.listFiles();
// To avoid reading and listing of "merged" dir
final File[] sinkFiles = sinkDir.listFiles(
new FilenameFilter()
{
@Override
public boolean accept(File dir, String fileName)
{
return !(Ints.tryParse(fileName) == null);
}
}
);
Arrays.sort(
sinkFiles,
new Comparator<File>()
{
@Override
public int compare(File o1, File o2)
{
try {
return Ints.compare(Integer.parseInt(o1.getName()), Integer.parseInt(o2.getName()));
}
catch (NumberFormatException e) {
log.error(e, "Couldn't compare as numbers? [%s][%s]", o1, o2);
return o1.compareTo(o2);
}
}
}
);
boolean isCorrupted = false;
try {
List<FireHydrant> hydrants = Lists.newArrayList();
for (File segmentDir : sinkFiles) {
log.info("Loading previously persisted segment at [%s]", segmentDir);
// Although this has been tackled at start of this method.
// Just a doubly-check added to skip "merged" dir. from being added to hydrants
// If 100% sure that this is not needed, this check can be removed.
if (Ints.tryParse(segmentDir.getName()) == null) {
continue;
}
QueryableIndex queryableIndex = null;
try {
queryableIndex = IndexIO.loadIndex(segmentDir);
}
catch (IOException e) {
log.error(e, "Problem loading segmentDir from disk.");
isCorrupted = true;
}
if (isCorrupted) {
try {
File corruptSegmentDir = computeCorruptedFileDumpDir(segmentDir, schema);
log.info("Renaming %s to %s", segmentDir.getAbsolutePath(), corruptSegmentDir.getAbsolutePath());
FileUtils.copyDirectory(segmentDir, corruptSegmentDir);
FileUtils.deleteDirectory(segmentDir);
}
catch (Exception e1) {
log.error(e1, "Failed to rename %s", segmentDir.getAbsolutePath());
}
//Note: skipping corrupted segment might lead to dropping some data. This strategy should be changed
//at some point.
continue;
}
Map<String, Object> segmentMetadata = queryableIndex.getMetaData();
if (segmentMetadata != null) {
Object timestampObj = segmentMetadata.get(COMMIT_METADATA_TIMESTAMP_KEY);
if (timestampObj != null) {
long timestamp = ((Long) timestampObj).longValue();
if (timestamp > latestCommitTime) {
log.info(
"Found metaData [%s] with latestCommitTime [%s] greater than previous recorded [%s]",
queryableIndex.getMetaData(), timestamp, latestCommitTime
);
latestCommitTime = timestamp;
metadata = queryableIndex.getMetaData().get(COMMIT_METADATA_KEY);
}
}
}
hydrants.add(
new FireHydrant(
new QueryableIndexSegment(
DataSegment.makeDataSegmentIdentifier(
schema.getDataSource(),
sinkInterval.getStart(),
sinkInterval.getEnd(),
versioningPolicy.getVersion(sinkInterval),
config.getShardSpec()
),
queryableIndex
),
Integer.parseInt(segmentDir.getName())
)
);
}
if (hydrants.isEmpty()) {
// Probably encountered a corrupt sink directory
log.warn(
"Found persisted segment directory with no intermediate segments present at %s, skipping sink creation.",
sinkDir.getAbsolutePath()
);
continue;
}
Sink currSink = new Sink(sinkInterval, schema, config, versioningPolicy.getVersion(sinkInterval), hydrants);
sinks.put(sinkInterval.getStartMillis(), currSink);
sinkTimeline.add(
currSink.getInterval(),
currSink.getVersion(),
new SingleElementPartitionChunk<Sink>(currSink)
);
segmentAnnouncer.announceSegment(currSink.getSegment());
}
catch (IOException e) {
log.makeAlert(e, "Problem loading sink[%s] from disk.", schema.getDataSource())
.addData("interval", sinkInterval)
.emit();
}
}
return metadata;
}
protected void startPersistThread()
{
final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity();
final Period windowPeriod = config.getWindowPeriod();
final DateTime truncatedNow = segmentGranularity.truncate(new DateTime());
final long windowMillis = windowPeriod.toStandardDuration().getMillis();
log.info(
"Expect to run at [%s]",
new DateTime().plus(
new Duration(
System.currentTimeMillis(),
segmentGranularity.increment(truncatedNow).getMillis() + windowMillis
)
)
);
ScheduledExecutors
.scheduleAtFixedRate(
scheduledExecutor,
new Duration(
System.currentTimeMillis(),
segmentGranularity.increment(truncatedNow).getMillis() + windowMillis
),
new Duration(truncatedNow, segmentGranularity.increment(truncatedNow)),
new ThreadRenamingCallable<ScheduledExecutors.Signal>(
String.format(
"%s-overseer-%d",
schema.getDataSource(),
config.getShardSpec().getPartitionNum()
)
)
{
@Override
public ScheduledExecutors.Signal doCall()
{
if (stopped) {
log.info("Stopping merge-n-push overseer thread");
return ScheduledExecutors.Signal.STOP;
}
mergeAndPush();
if (stopped) {
log.info("Stopping merge-n-push overseer thread");
return ScheduledExecutors.Signal.STOP;
} else {
return ScheduledExecutors.Signal.REPEAT;
}
}
}
);
}
private void mergeAndPush()
{
final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity();
final Period windowPeriod = config.getWindowPeriod();
final long windowMillis = windowPeriod.toStandardDuration().getMillis();
log.info("Starting merge and push.");
DateTime minTimestampAsDate = segmentGranularity.truncate(
new DateTime(
Math.max(
windowMillis,
rejectionPolicy.getCurrMaxTime()
.getMillis()
)
- windowMillis
)
);
long minTimestamp = minTimestampAsDate.getMillis();
log.info(
"Found [%,d] segments. Attempting to hand off segments that start before [%s].",
sinks.size(),
minTimestampAsDate
);
List<Map.Entry<Long, Sink>> sinksToPush = Lists.newArrayList();
for (Map.Entry<Long, Sink> entry : sinks.entrySet()) {
final Long intervalStart = entry.getKey();
if (intervalStart < minTimestamp) {
log.info("Adding entry [%s] for merge and push.", entry);
sinksToPush.add(entry);
} else {
log.info(
"Skipping persist and merge for entry [%s] : Start time [%s] >= [%s] min timestamp required in this run. Segment will be picked up in a future run.",
entry,
new DateTime(intervalStart),
minTimestampAsDate
);
}
}
log.info("Found [%,d] sinks to persist and merge", sinksToPush.size());
for (final Map.Entry<Long, Sink> entry : sinksToPush) {
persistAndMerge(entry.getKey(), entry.getValue());
}
}
/**
* Unannounces a given sink and removes all local references to it. It is important that this is only called
* from the single-threaded mergeExecutor, since otherwise chaos may ensue if merged segments are deleted while
* being created.
*
* @param truncatedTime sink key
* @param sink sink to unannounce
*/
protected void abandonSegment(final long truncatedTime, final Sink sink)
{
try {
segmentAnnouncer.unannounceSegment(sink.getSegment());
removeSegment(sink, computePersistDir(schema, sink.getInterval()));
log.info("Removing sinkKey %d for segment %s", truncatedTime, sink.getSegment().getIdentifier());
sinks.remove(truncatedTime);
sinkTimeline.remove(
sink.getInterval(),
sink.getVersion(),
new SingleElementPartitionChunk<>(sink)
);
synchronized (handoffCondition) {
handoffCondition.notifyAll();
}
}
catch (Exception e) {
log.makeAlert(e, "Unable to abandon old segment for dataSource[%s]", schema.getDataSource())
.addData("interval", sink.getInterval())
.emit();
}
}
protected File computeBaseDir(DataSchema schema)
{
return new File(config.getBasePersistDirectory(), schema.getDataSource());
}
protected File computeCorruptedFileDumpDir(File persistDir, DataSchema schema)
{
return new File(
persistDir.getAbsolutePath()
.replace(schema.getDataSource(), "corrupted" + File.pathSeparator + schema.getDataSource())
);
}
protected File computePersistDir(DataSchema schema, Interval interval)
{
return new File(computeBaseDir(schema), interval.toString().replace("/", "_"));
}
/**
* Persists the given hydrant and returns the number of rows persisted
*
* @param indexToPersist hydrant to persist
* @param schema datasource schema
* @param interval interval to persist
*
* @return the number of rows persisted
*/
protected int persistHydrant(
FireHydrant indexToPersist,
DataSchema schema,
Interval interval,
Map<String, Object> metaData
)
{
synchronized (indexToPersist) {
if (indexToPersist.hasSwapped()) {
log.info(
"DataSource[%s], Interval[%s], Hydrant[%s] already swapped. Ignoring request to persist.",
schema.getDataSource(), interval, indexToPersist
);
return 0;
}
log.info(
"DataSource[%s], Interval[%s], Metadata [%s] persisting Hydrant[%s]",
schema.getDataSource(),
interval,
metaData,
indexToPersist
);
try {
int numRows = indexToPersist.getIndex().size();
final File persistedFile;
final IndexSpec indexSpec = config.getIndexSpec();
if (config.isPersistInHeap()) {
persistedFile = IndexMaker.persist(
indexToPersist.getIndex(),
new File(computePersistDir(schema, interval), String.valueOf(indexToPersist.getCount())),
metaData,
indexSpec
);
} else {
persistedFile = IndexMerger.persist(
indexToPersist.getIndex(),
new File(computePersistDir(schema, interval), String.valueOf(indexToPersist.getCount())),
metaData,
indexSpec
);
}
indexToPersist.swapSegment(
new QueryableIndexSegment(
indexToPersist.getSegment().getIdentifier(),
IndexIO.loadIndex(persistedFile)
)
);
return numRows;
}
catch (IOException e) {
log.makeAlert("dataSource[%s] -- incremental persist failed", schema.getDataSource())
.addData("interval", interval)
.addData("count", indexToPersist.getCount())
.emit();
throw Throwables.propagate(e);
}
}
}
private void registerServerViewCallback()
{
serverView.registerSegmentCallback(
mergeExecutor,
new ServerView.BaseSegmentCallback()
{
@Override
public ServerView.CallbackAction segmentAdded(DruidServerMetadata server, DataSegment segment)
{
if (stopped) {
log.info("Unregistering ServerViewCallback");
mergeExecutor.shutdown();
return ServerView.CallbackAction.UNREGISTER;
}
if (!server.isAssignable()) {
return ServerView.CallbackAction.CONTINUE;
}
log.debug("Checking segment[%s] on server[%s]", segment, server);
if (schema.getDataSource().equals(segment.getDataSource())
&& config.getShardSpec().getPartitionNum() == segment.getShardSpec().getPartitionNum()
) {
final Interval interval = segment.getInterval();
for (Map.Entry<Long, Sink> entry : sinks.entrySet()) {
final Long sinkKey = entry.getKey();
if (interval.contains(sinkKey)) {
final Sink sink = entry.getValue();
log.info("Segment[%s] matches sink[%s] on server[%s]", segment, sink, server);
final String segmentVersion = segment.getVersion();
final String sinkVersion = sink.getSegment().getVersion();
if (segmentVersion.compareTo(sinkVersion) >= 0) {
log.info("Segment version[%s] >= sink version[%s]", segmentVersion, sinkVersion);
abandonSegment(sinkKey, sink);
}
}
}
}
return ServerView.CallbackAction.CONTINUE;
}
},
new Predicate<DataSegment>()
{
@Override
public boolean apply(final DataSegment segment)
{
return
schema.getDataSource().equalsIgnoreCase(segment.getDataSource())
&& config.getShardSpec().getPartitionNum() == segment.getShardSpec().getPartitionNum()
&& Iterables.any(
sinks.keySet(), new Predicate<Long>()
{
@Override
public boolean apply(Long sinkKey)
{
return segment.getInterval().contains(sinkKey);
}
}
);
}
}
);
}
private void removeSegment(final Sink sink, final File target)
{
if (target.exists()) {
try {
log.info("Deleting Index File[%s]", target);
FileUtils.deleteDirectory(target);
}
catch (Exception e) {
log.makeAlert(e, "Unable to remove file for dataSource[%s]", schema.getDataSource())
.addData("file", target)
.addData("interval", sink.getInterval())
.emit();
}
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.editor.impl;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.actionSystem.DocCommandGroupId;
import com.intellij.openapi.editor.actionSystem.ReadonlyFragmentModificationHandler;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.DocumentListener;
import com.intellij.openapi.editor.ex.*;
import com.intellij.openapi.editor.impl.event.DocumentEventImpl;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.reference.SoftReference;
import com.intellij.util.*;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.IntArrayList;
import com.intellij.util.text.CharArrayUtil;
import com.intellij.util.text.ImmutableText;
import gnu.trove.TIntObjectHashMap;
import gnu.trove.TObjectProcedure;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class DocumentImpl extends UserDataHolderBase implements DocumentEx {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.editor.impl.DocumentImpl");
private final Ref<DocumentListener[]> myCachedDocumentListeners = Ref.create(null);
private final List<DocumentListener> myDocumentListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private final RangeMarkerTree<RangeMarkerEx> myRangeMarkers = new RangeMarkerTree<RangeMarkerEx>(this);
private final RangeMarkerTree<RangeMarkerEx> myPersistentRangeMarkers = new RangeMarkerTree<RangeMarkerEx>(this);
private final List<RangeMarker> myGuardedBlocks = new ArrayList<RangeMarker>();
private ReadonlyFragmentModificationHandler myReadonlyFragmentModificationHandler;
private final Object myLineSetLock = new String("line set lock");
private volatile LineSet myLineSet;
private volatile ImmutableText myText;
private volatile SoftReference<String> myTextString;
private volatile FrozenDocument myFrozen;
private boolean myIsReadOnly = false;
private volatile boolean isStripTrailingSpacesEnabled = true;
private volatile long myModificationStamp;
private final PropertyChangeSupport myPropertyChangeSupport = new PropertyChangeSupport(this);
private final List<EditReadOnlyListener> myReadOnlyListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private volatile boolean myMightContainTabs = true; // optimisation flag: when document contains no tabs it is dramatically easier to calculate positions in editor
private int myTabTrackingRequestors = 0;
private int myCheckGuardedBlocks = 0;
private boolean myGuardsSuppressed = false;
private boolean myEventsHandling = false;
private final boolean myAssertThreading;
private volatile boolean myDoingBulkUpdate = false;
private boolean myUpdatingBulkModeStatus;
private volatile boolean myAcceptSlashR = false;
private boolean myChangeInProgress;
private volatile int myBufferSize;
private final CharSequence myMutableCharSequence = new CharSequence() {
@Override
public int length() {
return myText.length();
}
@Override
public char charAt(int index) {
return myText.charAt(index);
}
@Override
public CharSequence subSequence(int start, int end) {
return myText.subSequence(start, end);
}
@NotNull
@Override
public String toString() {
return doGetText();
}
};
public DocumentImpl(@NotNull String text) {
this(text, false);
}
public DocumentImpl(@NotNull CharSequence chars) {
this(chars, false);
}
public DocumentImpl(@NotNull CharSequence chars, boolean forUseInNonAWTThread) {
this(chars, false, forUseInNonAWTThread);
}
public DocumentImpl(@NotNull CharSequence chars, boolean acceptSlashR, boolean forUseInNonAWTThread) {
setAcceptSlashR(acceptSlashR);
assertValidSeparators(chars);
myText = ImmutableText.valueOf(chars);
setCyclicBufferSize(0);
setModificationStamp(LocalTimeCounter.currentTime());
myAssertThreading = !forUseInNonAWTThread;
}
public boolean setAcceptSlashR(boolean accept) {
try {
return myAcceptSlashR;
}
finally {
myAcceptSlashR = accept;
}
}
public boolean acceptsSlashR() {
return myAcceptSlashR;
}
private LineSet getLineSet() {
LineSet lineSet = myLineSet;
if (lineSet == null) {
synchronized (myLineSetLock) {
lineSet = myLineSet;
if (lineSet == null) {
lineSet = LineSet.createLineSet(myText);
myLineSet = lineSet;
}
}
}
return lineSet;
}
@Override
@NotNull
public char[] getChars() {
return CharArrayUtil.fromSequence(myText);
}
@Override
public void setStripTrailingSpacesEnabled(boolean isEnabled) {
isStripTrailingSpacesEnabled = isEnabled;
}
@TestOnly
public boolean stripTrailingSpaces(Project project) {
return stripTrailingSpaces(project, false);
}
@TestOnly
public boolean stripTrailingSpaces(Project project, boolean inChangedLinesOnly) {
return stripTrailingSpaces(project, inChangedLinesOnly, false, new int[0]);
}
/**
* @return true if stripping was completed successfully, false if the document prevented stripping by e.g. caret(s) being in the way
*/
boolean stripTrailingSpaces(@Nullable final Project project,
boolean inChangedLinesOnly,
boolean virtualSpaceEnabled,
@NotNull int[] caretOffsets) {
if (!isStripTrailingSpacesEnabled) {
return true;
}
boolean markAsNeedsStrippingLater = false;
CharSequence text = myText;
TIntObjectHashMap<List<RangeMarker>> caretMarkers = new TIntObjectHashMap<List<RangeMarker>>(caretOffsets.length);
try {
if (!virtualSpaceEnabled) {
for (int caretOffset : caretOffsets) {
if (caretOffset < 0 || caretOffset > getTextLength()) {
continue;
}
int line = getLineNumber(caretOffset);
List<RangeMarker> markers = caretMarkers.get(line);
if (markers == null) {
markers = new ArrayList<RangeMarker>();
caretMarkers.put(line, markers);
}
RangeMarker marker = createRangeMarker(caretOffset, caretOffset);
markers.add(marker);
}
}
lineLoop:
for (int line = 0; line < getLineCount(); line++) {
LineSet lineSet = getLineSet();
if (inChangedLinesOnly && !lineSet.isModified(line)) continue;
int whiteSpaceStart = -1;
final int lineEnd = lineSet.getLineEnd(line) - lineSet.getSeparatorLength(line);
int lineStart = lineSet.getLineStart(line);
for (int offset = lineEnd - 1; offset >= lineStart; offset--) {
char c = text.charAt(offset);
if (c != ' ' && c != '\t') {
break;
}
whiteSpaceStart = offset;
}
if (whiteSpaceStart == -1) continue;
if (!virtualSpaceEnabled) {
List<RangeMarker> markers = caretMarkers.get(line);
if (markers != null) {
for (RangeMarker marker : markers) {
if (marker.getStartOffset() >= 0 && whiteSpaceStart < marker.getStartOffset()) {
// mark this as a document that needs stripping later
// otherwise the caret would jump madly
markAsNeedsStrippingLater = true;
continue lineLoop;
}
}
}
}
final int finalStart = whiteSpaceStart;
// document must be unblocked by now. If not, some Save handler attempted to modify PSI
// which should have been caught by assertion in com.intellij.pom.core.impl.PomModelImpl.runTransaction
DocumentUtil.writeInRunUndoTransparentAction(new DocumentRunnable(DocumentImpl.this, project) {
@Override
public void run() {
deleteString(finalStart, lineEnd);
}
});
text = myText;
}
}
finally {
caretMarkers.forEachValue(new TObjectProcedure<List<RangeMarker>>() {
@Override
public boolean execute(List<RangeMarker> markerList) {
if (markerList != null) {
for (RangeMarker marker : markerList) {
try {
marker.dispose();
}
catch (Exception e) {
LOG.error(e);
}
}
}
return true;
}
});
}
return markAsNeedsStrippingLater;
}
@Override
public void setReadOnly(boolean isReadOnly) {
if (myIsReadOnly != isReadOnly) {
myIsReadOnly = isReadOnly;
myPropertyChangeSupport.firePropertyChange(Document.PROP_WRITABLE, !isReadOnly, isReadOnly);
}
}
ReadonlyFragmentModificationHandler getReadonlyFragmentModificationHandler() {
return myReadonlyFragmentModificationHandler;
}
void setReadonlyFragmentModificationHandler(final ReadonlyFragmentModificationHandler readonlyFragmentModificationHandler) {
myReadonlyFragmentModificationHandler = readonlyFragmentModificationHandler;
}
@Override
public boolean isWritable() {
return !myIsReadOnly;
}
private RangeMarkerTree<RangeMarkerEx> treeFor(@NotNull RangeMarkerEx rangeMarker) {
return rangeMarker instanceof PersistentRangeMarker ? myPersistentRangeMarkers : myRangeMarkers;
}
@Override
public boolean removeRangeMarker(@NotNull RangeMarkerEx rangeMarker) {
return treeFor(rangeMarker).removeInterval(rangeMarker);
}
@Override
public void registerRangeMarker(@NotNull RangeMarkerEx rangeMarker,
int start,
int end,
boolean greedyToLeft,
boolean greedyToRight,
int layer) {
treeFor(rangeMarker).addInterval(rangeMarker, start, end, greedyToLeft, greedyToRight, layer);
}
@TestOnly
int getRangeMarkersSize() {
return myRangeMarkers.size() + myPersistentRangeMarkers.size();
}
@TestOnly
int getRangeMarkersNodeSize() {
return myRangeMarkers.nodeSize()+myPersistentRangeMarkers.nodeSize();
}
@Override
@NotNull
public RangeMarker createGuardedBlock(int startOffset, int endOffset) {
LOG.assertTrue(startOffset <= endOffset, "Should be startOffset <= endOffset");
RangeMarker block = createRangeMarker(startOffset, endOffset, true);
myGuardedBlocks.add(block);
return block;
}
@Override
public void removeGuardedBlock(@NotNull RangeMarker block) {
myGuardedBlocks.remove(block);
}
@Override
@NotNull
public List<RangeMarker> getGuardedBlocks() {
return myGuardedBlocks;
}
@Override
@SuppressWarnings({"ForLoopReplaceableByForEach"}) // Way too many garbage is produced otherwise in AbstractList.iterator()
public RangeMarker getOffsetGuard(int offset) {
for (int i = 0; i < myGuardedBlocks.size(); i++) {
RangeMarker block = myGuardedBlocks.get(i);
if (offsetInRange(offset, block.getStartOffset(), block.getEndOffset())) return block;
}
return null;
}
@Override
public RangeMarker getRangeGuard(int start, int end) {
for (RangeMarker block : myGuardedBlocks) {
if (rangesIntersect(start, true, block.getStartOffset(), block.isGreedyToLeft(), end, true, block.getEndOffset(),
block.isGreedyToRight())) {
return block;
}
}
return null;
}
@Override
public void startGuardedBlockChecking() {
myCheckGuardedBlocks++;
}
@Override
public void stopGuardedBlockChecking() {
LOG.assertTrue(myCheckGuardedBlocks > 0, "Unpaired start/stopGuardedBlockChecking");
myCheckGuardedBlocks--;
}
private static boolean offsetInRange(int offset, int start, int end) {
return start <= offset && offset < end;
}
private static boolean rangesIntersect(int start0, boolean leftInclusive0,
int start1, boolean leftInclusive1,
int end0, boolean rightInclusive0,
int end1, boolean rightInclusive1) {
if (start0 > start1 || start0 == start1 && !leftInclusive0) {
return rangesIntersect(start1, leftInclusive1, start0, leftInclusive0, end1, rightInclusive1, end0, rightInclusive0);
}
if (end0 == start1) return leftInclusive1 && rightInclusive0;
return end0 > start1;
}
@Override
@NotNull
public RangeMarker createRangeMarker(int startOffset, int endOffset) {
return createRangeMarker(startOffset, endOffset, false);
}
@Override
@NotNull
public RangeMarker createRangeMarker(int startOffset, int endOffset, boolean surviveOnExternalChange) {
if (!(0 <= startOffset && startOffset <= endOffset && endOffset <= getTextLength())) {
LOG.error("Incorrect offsets: startOffset=" + startOffset + ", endOffset=" + endOffset + ", text length=" + getTextLength());
}
return surviveOnExternalChange
? new PersistentRangeMarker(this, startOffset, endOffset, true)
: new RangeMarkerImpl(this, startOffset, endOffset, true);
}
@Override
public long getModificationStamp() {
return myModificationStamp;
}
@Override
public void setModificationStamp(long modificationStamp) {
myModificationStamp = modificationStamp;
}
@Override
public void replaceText(@NotNull CharSequence chars, long newModificationStamp) {
replaceString(0, getTextLength(), chars, newModificationStamp, true); //TODO: optimization!!!
clearLineModificationFlags();
}
@Override
public int getListenersCount() {
return myDocumentListeners.size();
}
@Override
public void insertString(int offset, @NotNull CharSequence s) {
if (offset < 0) throw new IndexOutOfBoundsException("Wrong offset: " + offset);
if (offset > getTextLength()) {
throw new IndexOutOfBoundsException(
"Wrong offset: " + offset + "; documentLength: " + getTextLength() + "; " + s.subSequence(Math.max(0, s.length() - 20), s.length())
);
}
assertWriteAccess();
assertValidSeparators(s);
if (!isWritable()) throw new ReadOnlyModificationException(this);
if (s.length() == 0) return;
RangeMarker marker = getRangeGuard(offset, offset);
if (marker != null) {
throwGuardedFragment(marker, offset, null, s.toString());
}
myText = myText.ensureChunked();
ImmutableText newText = myText.insert(offset, ImmutableText.valueOf(s));
updateText(newText, offset, null, newText.subtext(offset, offset + s.length()), false, LocalTimeCounter.currentTime());
trimToSize();
}
private void trimToSize() {
if (myBufferSize != 0 && getTextLength() > myBufferSize) {
deleteString(0, getTextLength() - myBufferSize);
}
}
@Override
public void deleteString(int startOffset, int endOffset) {
assertBounds(startOffset, endOffset);
assertWriteAccess();
if (!isWritable()) throw new ReadOnlyModificationException(this);
if (startOffset == endOffset) return;
RangeMarker marker = getRangeGuard(startOffset, endOffset);
if (marker != null) {
throwGuardedFragment(marker, startOffset, myText.subSequence(startOffset, endOffset).toString(), null);
}
myText = myText.ensureChunked();
updateText(myText.delete(startOffset, endOffset), startOffset, myText.subtext(startOffset, endOffset), null, false, LocalTimeCounter.currentTime());
}
@Override
public void moveText(int srcStart, int srcEnd, int dstOffset) {
assertBounds(srcStart, srcEnd);
if (dstOffset == srcEnd) return;
ProperTextRange srcRange = new ProperTextRange(srcStart, srcEnd);
assert !srcRange.containsOffset(dstOffset) : "Can't perform text move from range [" +srcStart+ "; " + srcEnd+ ") to offset "+dstOffset;
String replacement = getCharsSequence().subSequence(srcStart, srcEnd).toString();
insertString(dstOffset, replacement);
int shift = 0;
if (dstOffset < srcStart) {
shift = srcEnd - srcStart;
}
fireMoveText(srcStart + shift, srcEnd + shift, dstOffset);
deleteString(srcStart + shift, srcEnd + shift);
}
private void fireMoveText(int start, int end, int newBase) {
for (DocumentListener listener : getCachedListeners()) {
if (listener instanceof PrioritizedInternalDocumentListener) {
((PrioritizedInternalDocumentListener)listener).moveTextHappened(start, end, newBase);
}
}
}
@Override
public void replaceString(int startOffset, int endOffset, @NotNull CharSequence s) {
replaceString(startOffset, endOffset, s, LocalTimeCounter.currentTime(), false);
}
private void replaceString(int startOffset, int endOffset, @NotNull CharSequence s, final long newModificationStamp, boolean wholeTextReplaced) {
assertBounds(startOffset, endOffset);
assertWriteAccess();
assertValidSeparators(s);
if (!isWritable()) {
throw new ReadOnlyModificationException(this);
}
final int newStringLength = s.length();
final CharSequence chars = getCharsSequence();
int newStartInString = 0;
int newEndInString = newStringLength;
while (newStartInString < newStringLength &&
startOffset < endOffset &&
s.charAt(newStartInString) == chars.charAt(startOffset)) {
startOffset++;
newStartInString++;
}
while (endOffset > startOffset &&
newEndInString > newStartInString &&
s.charAt(newEndInString - 1) == chars.charAt(endOffset - 1)) {
newEndInString--;
endOffset--;
}
if (startOffset == 0 && endOffset == getTextLength()) {
wholeTextReplaced = true;
}
CharSequence changedPart = s.subSequence(newStartInString, newEndInString);
CharSequence sToDelete = myText.subtext(startOffset, endOffset);
RangeMarker guard = getRangeGuard(startOffset, endOffset);
if (guard != null) {
throwGuardedFragment(guard, startOffset, sToDelete.toString(), changedPart.toString());
}
ImmutableText newText;
if (wholeTextReplaced && s instanceof ImmutableText) {
newText = (ImmutableText)s;
}
else {
myText = myText.ensureChunked();
newText = myText.delete(startOffset, endOffset).insert(startOffset, changedPart);
changedPart = newText.subtext(startOffset, startOffset + changedPart.length());
}
updateText(newText, startOffset, sToDelete, changedPart, wholeTextReplaced, newModificationStamp);
trimToSize();
}
private void assertBounds(final int startOffset, final int endOffset) {
if (startOffset < 0 || startOffset > getTextLength()) {
throw new IndexOutOfBoundsException("Wrong startOffset: " + startOffset + "; documentLength: " + getTextLength());
}
if (endOffset < 0 || endOffset > getTextLength()) {
throw new IndexOutOfBoundsException("Wrong endOffset: " + endOffset + "; documentLength: " + getTextLength());
}
if (endOffset < startOffset) {
throw new IllegalArgumentException(
"endOffset < startOffset: " + endOffset + " < " + startOffset + "; documentLength: " + getTextLength());
}
}
private void assertWriteAccess() {
if (myAssertThreading) {
final Application application = ApplicationManager.getApplication();
if (application != null) {
application.assertWriteAccessAllowed();
}
}
}
private void assertValidSeparators(@NotNull CharSequence s) {
if (myAcceptSlashR) return;
StringUtil.assertValidSeparators(s);
}
/**
* All document change actions follows the algorithm below:
* <pre>
* <ol>
* <li>
* All {@link #addDocumentListener(DocumentListener) registered listeners} are notified
* {@link DocumentListener#beforeDocumentChange(DocumentEvent) before the change};
* </li>
* <li>The change is performed </li>
* <li>
* All {@link #addDocumentListener(DocumentListener) registered listeners} are notified
* {@link DocumentListener#documentChanged(DocumentEvent) after the change};
* </li>
* </ol>
* </pre>
* <p/>
* There is a possible case that <code>'before change'</code> notification produces new change. We have a problem then - imagine
* that initial change was <code>'replace particular range at document end'</code> and <code>'nested change'</code> was to
* <code>'remove text at document end'</code>. That means that when initial change will be actually performed, the document may be
* not long enough to contain target range.
* <p/>
* Current method allows to check if document change is a <code>'nested call'</code>.
*
* @throws IllegalStateException if this method is called during a <code>'nested document modification'</code>
*/
private void assertNotNestedModification() throws IllegalStateException {
if (myChangeInProgress) {
throw new IllegalStateException("Detected document modification from DocumentListener");
}
}
private void throwGuardedFragment(@NotNull RangeMarker guard, int offset, String oldString, String newString) {
if (myCheckGuardedBlocks > 0 && !myGuardsSuppressed) {
DocumentEvent event = new DocumentEventImpl(this, offset, oldString, newString, myModificationStamp, false);
throw new ReadOnlyFragmentModificationException(event, guard);
}
}
@Override
public void suppressGuardedExceptions() {
myGuardsSuppressed = true;
}
@Override
public void unSuppressGuardedExceptions() {
myGuardsSuppressed = false;
}
@Override
public boolean isInEventsHandling() {
return myEventsHandling;
}
@Override
public void clearLineModificationFlags() {
myLineSet = getLineSet().clearModificationFlags();
myFrozen = null;
}
public void clearLineModificationFlags(int startLine, int endLine) {
myLineSet = getLineSet().clearModificationFlags(startLine, endLine);
myFrozen = null;
}
void clearLineModificationFlagsExcept(@NotNull int[] caretLines) {
IntArrayList modifiedLines = new IntArrayList(caretLines.length);
LineSet lineSet = getLineSet();
for (int line : caretLines) {
if (line >= 0 && line < lineSet.getLineCount() && lineSet.isModified(line)) {
modifiedLines.add(line);
}
}
lineSet = lineSet.clearModificationFlags();
for (int i = 0; i < modifiedLines.size(); i++) {
lineSet = lineSet.setModified(modifiedLines.get(i));
}
myLineSet = lineSet;
myFrozen = null;
}
private void updateText(@NotNull ImmutableText newText,
int offset,
@Nullable CharSequence oldString,
@Nullable CharSequence newString,
boolean wholeTextReplaced,
long newModificationStamp) {
assertNotNestedModification();
boolean enableRecursiveModifications = Registry.is("enable.recursive.document.changes"); // temporary property, to remove in IDEA 16
myChangeInProgress = true;
try {
final DocumentEvent event;
try {
event = doBeforeChangedUpdate(offset, oldString, newString, wholeTextReplaced);
}
finally {
if (enableRecursiveModifications) {
myChangeInProgress = false;
}
}
myTextString = null;
ImmutableText prevText = myText;
myText = newText;
changedUpdate(event, newModificationStamp, prevText);
}
finally {
if (!enableRecursiveModifications) {
myChangeInProgress = false;
}
}
}
@NotNull
private DocumentEvent doBeforeChangedUpdate(int offset, CharSequence oldString, CharSequence newString, boolean wholeTextReplaced) {
Application app = ApplicationManager.getApplication();
if (app != null) {
FileDocumentManager manager = FileDocumentManager.getInstance();
VirtualFile file = manager.getFile(this);
if (file != null && !file.isValid()) {
LOG.error("File of this document has been deleted.");
}
}
assertInsideCommand();
getLineSet(); // initialize line set to track changed lines
DocumentEvent event = new DocumentEventImpl(this, offset, oldString, newString, myModificationStamp, wholeTextReplaced);
if (!ShutDownTracker.isShutdownHookRunning()) {
DocumentListener[] listeners = getCachedListeners();
for (int i = listeners.length - 1; i >= 0; i--) {
try {
listeners[i].beforeDocumentChange(event);
}
catch (Throwable e) {
LOG.error(e);
}
}
}
myEventsHandling = true;
return event;
}
private void assertInsideCommand() {
if (!myAssertThreading) return;
CommandProcessor commandProcessor = CommandProcessor.getInstance();
if (!commandProcessor.isUndoTransparentActionInProgress() &&
commandProcessor.getCurrentCommand() == null) {
throw new IncorrectOperationException("Must not change document outside command or undo-transparent action. See com.intellij.openapi.command.WriteCommandAction or com.intellij.openapi.command.CommandProcessor");
}
}
private void changedUpdate(@NotNull DocumentEvent event, long newModificationStamp, ImmutableText prevText) {
try {
if (LOG.isDebugEnabled()) LOG.debug(event.toString());
myLineSet = getLineSet().update(prevText, event.getOffset(), event.getOffset() + event.getOldLength(), event.getNewFragment(), event.isWholeTextReplaced());
myFrozen = null;
if (myTabTrackingRequestors > 0) {
updateMightContainTabs(event.getNewFragment());
}
setModificationStamp(newModificationStamp);
if (!ShutDownTracker.isShutdownHookRunning()) {
DocumentListener[] listeners = getCachedListeners();
for (DocumentListener listener : listeners) {
try {
listener.documentChanged(event);
}
catch (Throwable e) {
LOG.error(e);
}
}
}
}
finally {
myEventsHandling = false;
}
}
@NotNull
@Override
public String getText() {
return ApplicationManager.getApplication().runReadAction(new Computable<String>() {
@Override
public String compute() {
return doGetText();
}
});
}
@NotNull
private String doGetText() {
String s = SoftReference.dereference(myTextString);
if (s == null) {
myTextString = new SoftReference<String>(s = myText.toString());
}
return s;
}
@NotNull
@Override
public String getText(@NotNull final TextRange range) {
return ApplicationManager.getApplication().runReadAction(new Computable<String>() {
@Override
public String compute() {
return myText.subSequence(range.getStartOffset(), range.getEndOffset()).toString();
}
});
}
@Override
public int getTextLength() {
return myText.length();
}
@Override
@NotNull
public CharSequence getCharsSequence() {
return myMutableCharSequence;
}
@NotNull
@Override
public CharSequence getImmutableCharSequence() {
return myText;
}
// Breaks encapsulation, yet required for current zero-latency typing implementation.
// TODO Should be removed when we implement typing without starting write actions.
@NotNull
public ImmutableText getImmutableText() {
return myText;
}
@Override
public void addDocumentListener(@NotNull DocumentListener listener) {
myCachedDocumentListeners.set(null);
if (myDocumentListeners.contains(listener)) {
LOG.error("Already registered: " + listener);
}
boolean added = myDocumentListeners.add(listener);
LOG.assertTrue(added, listener);
}
@Override
public void addDocumentListener(@NotNull final DocumentListener listener, @NotNull Disposable parentDisposable) {
addDocumentListener(listener);
Disposer.register(parentDisposable, new DocumentListenerDisposable(listener, myCachedDocumentListeners, myDocumentListeners));
}
private static class DocumentListenerDisposable implements Disposable {
private final DocumentListener myListener;
private final Ref<DocumentListener[]> myCachedDocumentListenersRef;
private final List<DocumentListener> myDocumentListeners;
private DocumentListenerDisposable(@NotNull DocumentListener listener,
@NotNull Ref<DocumentListener[]> cachedDocumentListenersRef,
@NotNull List<DocumentListener> documentListeners) {
myListener = listener;
myCachedDocumentListenersRef = cachedDocumentListenersRef;
myDocumentListeners = documentListeners;
}
@Override
public void dispose() {
doRemoveDocumentListener(myListener, myCachedDocumentListenersRef, myDocumentListeners);
}
}
@Override
public void removeDocumentListener(@NotNull DocumentListener listener) {
doRemoveDocumentListener(listener, myCachedDocumentListeners, myDocumentListeners);
}
private static void doRemoveDocumentListener(@NotNull DocumentListener listener,
@NotNull Ref<DocumentListener[]> cachedDocumentListenersRef,
@NotNull List<DocumentListener> documentListeners) {
cachedDocumentListenersRef.set(null);
boolean success = documentListeners.remove(listener);
if (!success) {
LOG.error("Can't remove document listener (" + listener + "). Registered listeners: " + documentListeners);
}
}
@Override
public int getLineNumber(final int offset) {
return getLineSet().findLineIndex(offset);
}
@Override
@NotNull
public LineIterator createLineIterator() {
return getLineSet().createIterator();
}
@Override
public final int getLineStartOffset(final int line) {
if (line == 0) return 0; // otherwise it crashed for zero-length document
return getLineSet().getLineStart(line);
}
@Override
public final int getLineEndOffset(int line) {
if (getTextLength() == 0 && line == 0) return 0;
int result = getLineSet().getLineEnd(line) - getLineSeparatorLength(line);
assert result >= 0;
return result;
}
@Override
public final int getLineSeparatorLength(int line) {
int separatorLength = getLineSet().getSeparatorLength(line);
assert separatorLength >= 0;
return separatorLength;
}
@Override
public final int getLineCount() {
int lineCount = getLineSet().getLineCount();
assert lineCount >= 0;
return lineCount;
}
@NotNull
private DocumentListener[] getCachedListeners() {
DocumentListener[] cachedListeners = myCachedDocumentListeners.get();
if (cachedListeners == null) {
DocumentListener[] listeners = ArrayUtil.stripTrailingNulls(myDocumentListeners.toArray(new DocumentListener[myDocumentListeners.size()]));
Arrays.sort(listeners, PrioritizedDocumentListener.COMPARATOR);
cachedListeners = listeners;
myCachedDocumentListeners.set(cachedListeners);
}
return cachedListeners;
}
@Override
public void fireReadOnlyModificationAttempt() {
for (EditReadOnlyListener listener : myReadOnlyListeners) {
listener.readOnlyModificationAttempt(this);
}
}
@Override
public void addEditReadOnlyListener(@NotNull EditReadOnlyListener listener) {
myReadOnlyListeners.add(listener);
}
@Override
public void removeEditReadOnlyListener(@NotNull EditReadOnlyListener listener) {
myReadOnlyListeners.remove(listener);
}
@Override
public void addPropertyChangeListener(@NotNull PropertyChangeListener listener) {
myPropertyChangeSupport.addPropertyChangeListener(listener);
}
@Override
public void removePropertyChangeListener(@NotNull PropertyChangeListener listener) {
myPropertyChangeSupport.removePropertyChangeListener(listener);
}
@Override
public void setCyclicBufferSize(int bufferSize) {
assert bufferSize >= 0 : bufferSize;
myBufferSize = bufferSize;
}
@Override
public void setText(@NotNull final CharSequence text) {
Runnable runnable = new Runnable() {
@Override
public void run() {
replaceString(0, getTextLength(), text, LocalTimeCounter.currentTime(), true);
}
};
if (CommandProcessor.getInstance().isUndoTransparentActionInProgress()) {
runnable.run();
}
else {
CommandProcessor.getInstance().executeCommand(null, runnable, "", DocCommandGroupId.noneGroupId(this));
}
clearLineModificationFlags();
}
@Override
@NotNull
public RangeMarker createRangeMarker(@NotNull final TextRange textRange) {
return createRangeMarker(textRange.getStartOffset(), textRange.getEndOffset());
}
@Override
public final boolean isInBulkUpdate() {
return myDoingBulkUpdate;
}
@Override
public final void setInBulkUpdate(boolean value) {
if (myAssertThreading) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
if (myUpdatingBulkModeStatus) {
throw new IllegalStateException("Detected bulk mode status update from DocumentBulkUpdateListener");
}
if (myDoingBulkUpdate == value) {
return;
}
myUpdatingBulkModeStatus = true;
try {
if (value) {
getPublisher().updateStarted(this);
myDoingBulkUpdate = true;
}
else {
myDoingBulkUpdate = false;
getPublisher().updateFinished(this);
}
}
finally {
myUpdatingBulkModeStatus = false;
}
}
private static class DocumentBulkUpdateListenerHolder {
private static final DocumentBulkUpdateListener ourBulkChangePublisher =
ApplicationManager.getApplication().getMessageBus().syncPublisher(DocumentBulkUpdateListener.TOPIC);
}
@NotNull
private static DocumentBulkUpdateListener getPublisher() {
return DocumentBulkUpdateListenerHolder.ourBulkChangePublisher;
}
@Override
public boolean processRangeMarkers(@NotNull Processor<RangeMarker> processor) {
return processRangeMarkersOverlappingWith(0, getTextLength(), processor);
}
@Override
public boolean processRangeMarkersOverlappingWith(int start, int end, @NotNull Processor<RangeMarker> processor) {
TextRangeInterval interval = new TextRangeInterval(start, end);
IntervalTreeImpl.PeekableIterator<RangeMarkerEx> iterator = IntervalTreeImpl
.mergingOverlappingIterator(myRangeMarkers, interval, myPersistentRangeMarkers, interval, RangeMarker.BY_START_OFFSET);
try {
return ContainerUtil.process(iterator, processor);
}
finally {
iterator.dispose();
}
}
@NotNull
public String dumpState() {
@NonNls StringBuilder result = new StringBuilder();
result.append(", intervals:\n");
for (int line = 0; line < getLineCount(); line++) {
result.append(line).append(": ").append(getLineStartOffset(line)).append("-")
.append(getLineEndOffset(line)).append(", ");
}
if (result.length() > 0) {
result.setLength(result.length() - 1);
}
return result.toString();
}
@Override
public String toString() {
return "DocumentImpl[" + FileDocumentManager.getInstance().getFile(this) + "]";
}
void requestTabTracking() {
if (myAssertThreading) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
if (myTabTrackingRequestors++ == 0) {
myMightContainTabs = false;
updateMightContainTabs(myText);
}
}
void giveUpTabTracking() {
if (myAssertThreading) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
if (--myTabTrackingRequestors == 0) {
myMightContainTabs = true;
}
}
boolean mightContainTabs() {
return myMightContainTabs;
}
private void updateMightContainTabs(CharSequence text) {
if (!myMightContainTabs) {
myMightContainTabs = StringUtil.contains(text, 0, text.length(), '\t');
}
}
@NotNull
public FrozenDocument freeze() {
FrozenDocument frozen = myFrozen;
if (frozen == null) {
synchronized (myLineSetLock) {
frozen = myFrozen;
if (frozen == null) {
frozen = new FrozenDocument(myText, getLineSet(), myModificationStamp, SoftReference.dereference(myTextString));
}
}
}
return frozen;
}
}
| |
/**
* Copyright (c) 2013-2014 Oculus Info Inc.
* http://www.oculusinfo.com/
*
* Released under the MIT License.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do
* so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package oculus.aperture.graph.aggregation.util;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import oculus.aperture.common.data.BoundedLinearQuantizedRange;
import oculus.aperture.spi.common.Link;
import oculus.aperture.spi.common.Node;
import oculus.aperture.spi.common.data.QuantizedRange;
import org.apache.commons.lang3.time.StopWatch;
import org.jgrapht.DirectedGraph;
import org.jgrapht.alg.ConnectivityInspector;
import org.jgrapht.graph.DefaultDirectedGraph;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AggregationUtilities {
private final static Logger logger = LoggerFactory.getLogger(AggregationUtilities.class);
public static final int DEFAULT_NUM_WEIGHT_BINS = 5;
public static Collection<Pair<Map<String, Node>, Map<String, Link>>> findSubgraphs(
Map<String, Node> nodeMap,
Map<String, Link> linkMap
) {
logger.debug("Running subgraph finding algorithm...");
StopWatch overallStopWatch = new StopWatch();
overallStopWatch.start();
StopWatch stopWatch = new StopWatch();
stopWatch.start();
// create a JGraphT graph
DirectedGraph<Node, Link> graph = new DefaultDirectedGraph<Node, Link>(Link.class);
// populate the graph with our nodes and links
for (Node node : nodeMap.values()) {
graph.addVertex(node);
}
for (Link link : linkMap.values()) {
graph.addEdge(
nodeMap.get(link.getSourceId()),
nodeMap.get(link.getTargetId()),
link
);
}
stopWatch.stop();
logger.debug("Graph creation time: " + stopWatch.toString());
stopWatch.reset();
stopWatch.start();
// using the JGraphT
ConnectivityInspector<Node, Link> inspector = new ConnectivityInspector<Node, Link>(graph);
List<Set<Node>> connectedSets = inspector.connectedSets();
stopWatch.stop();
logger.debug("Connectivity calculation time: " + stopWatch.toString());
stopWatch.reset();
stopWatch.start();
// If the original graph is a fully connected graph then we simply return the original graph
if (connectedSets.size() == 1) {
Pair<Map<String, Node>, Map<String, Link>> full_graph = new Pair<Map<String, Node>, Map<String, Link>>(nodeMap, linkMap);
List<Pair<Map<String, Node>, Map<String, Link>>> returnList = new ArrayList<Pair<Map<String, Node>, Map<String, Link>>>(1);
returnList.add(full_graph);
return returnList;
}
// Iterate over the connected sets and create new subgraphs
List<Pair<Map<String, Node>, Map<String, Link>>> returnList = new ArrayList<Pair<Map<String, Node>, Map<String, Link>>>(connectedSets.size());
for (Set<Node> subgraph : connectedSets) {
Map<String, Node> subgraphNodeMap = new HashMap<String, Node>(subgraph.size());
Map<String, Link> subgraphLinkMap = new HashMap<String, Link>(subgraph.size());
for (Node source : subgraph) {
subgraphNodeMap.put(source.getId(), source);
for (Node target : subgraph) {
Set<Link> links = graph.getAllEdges(source, target);
if (links == null || links.isEmpty()) {
continue;
}
for (Link link : links) {
subgraphLinkMap.put(link.getId(), link);
}
}
}
Pair<Map<String, Node>, Map<String, Link>> subgraphPair = new Pair<Map<String, Node>, Map<String, Link>>(
subgraphNodeMap,
subgraphLinkMap
);
returnList.add(subgraphPair);
}
stopWatch.stop();
logger.debug("Subgraph partitioning time: " + stopWatch.toString());
stopWatch.reset();
stopWatch.start();
overallStopWatch.stop();
logger.debug("Finished subgraph finding algorithm.");
logger.debug("Algorithm took " + overallStopWatch.toString());
overallStopWatch.reset();
return returnList;
}
public static Collection<Node> getNeighbors(
Node node,
Map<String, Node> nodeMap
) {
Map<String, Node> neighbors = new HashMap<String, Node>();
for (Link link : node.getIncidentLinks()) {
String id;
if (!link.getSourceId().equalsIgnoreCase(node.getId())) {
id = link.getSourceId();
} else {
id = link.getTargetId();
}
Node neighbour = nodeMap.get(id);
if (neighbour != null) {
neighbors.put(neighbour.getId(), neighbour);
}
}
return neighbors.values();
}
public static double getTotalDegree(
Node node,
Map<String, Node> nodeMap,
Map<String, Link> linkMap
) {
double degree = 0;
for (Link link : linkMap.values()) {
if (link.getSourceId().equalsIgnoreCase(node.getId()) ||
link.getTargetId().equalsIgnoreCase(node.getId())
) {
if (link.getSourceId().equalsIgnoreCase(link.getTargetId())) continue;
degree++;
}
}
return degree;
}
public static Map<String, QuantizedRange> updateNodeWeightRanges(Map<String, QuantizedRange> weightRanges, Collection<Node> nodes) {
// if this hasn't been supplied, then create it.
if (weightRanges == null) {
weightRanges = new HashMap<String, QuantizedRange>();
}
// start by processing for ranges.
for (Node node : nodes) {
QuantizedRange weightRange = weightRanges.get(node.getType());
if (weightRange == null) {
weightRanges.put(node.getType(),
weightRange= new BoundedLinearQuantizedRange(DEFAULT_NUM_WEIGHT_BINS));
}
weightRange.expand(node.getWeight());
}
return weightRanges;
}
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.util;
import io.netty.util.internal.PlatformDependent;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.StringTokenizer;
/**
* A class that holds a number of network-related constants.
* <p/>
* This class borrowed some of its methods from a modified fork of the
* <a href="http://svn.apache.org/repos/asf/harmony/enhanced/java/branches/java6/classlib/modules/luni/
* src/main/java/org/apache/harmony/luni/util/Inet6Util.java">Inet6Util class</a> which was part of Apache Harmony.
*/
public final class NetUtil {
/**
* The {@link Inet4Address} that represents the IPv4 loopback address '127.0.0.1'
*/
public static final Inet4Address LOCALHOST4;
/**
* The {@link Inet6Address} that represents the IPv6 loopback address '::1'
*/
public static final Inet6Address LOCALHOST6;
/**
* The {@link InetAddress} that represents the loopback address. If IPv6 stack is available, it will refer to
* {@link #LOCALHOST6}. Otherwise, {@link #LOCALHOST4}.
*/
public static final InetAddress LOCALHOST;
/**
* The loopback {@link NetworkInterface} of the current machine
*/
public static final NetworkInterface LOOPBACK_IF;
/**
* The SOMAXCONN value of the current machine. If failed to get the value, {@code 200} is used as a
* default value for Windows or {@code 128} for others.
*/
public static final int SOMAXCONN;
/**
* This defines how many words (represented as ints) are needed to represent an IPv6 address
*/
private static final int IPV6_WORD_COUNT = 8;
/**
* The maximum number of characters for an IPV6 string with no scope
*/
private static final int IPV6_MAX_CHAR_COUNT = 39;
/**
* Number of bytes needed to represent and IPV6 value
*/
private static final int IPV6_BYTE_COUNT = 16;
/**
* Maximum amount of value adding characters in between IPV6 separators
*/
private static final int IPV6_MAX_CHAR_BETWEEN_SEPARATOR = 4;
/**
* Minimum number of separators that must be present in an IPv6 string
*/
private static final int IPV6_MIN_SEPARATORS = 2;
/**
* Maximum number of separators that must be present in an IPv6 string
*/
private static final int IPV6_MAX_SEPARATORS = 8;
/**
* Number of bytes needed to represent and IPV4 value
*/
private static final int IPV4_BYTE_COUNT = 4;
/**
* Maximum amount of value adding characters in between IPV4 separators
*/
private static final int IPV4_MAX_CHAR_BETWEEN_SEPARATOR = 3;
/**
* Number of separators that must be present in an IPv4 string
*/
private static final int IPV4_SEPARATORS = 3;
/**
* The logger being used by this class
*/
private static final InternalLogger logger = InternalLoggerFactory.getInstance(NetUtil.class);
static {
byte[] LOCALHOST4_BYTES = {127, 0, 0, 1};
byte[] LOCALHOST6_BYTES = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1};
// Create IPv4 loopback address.
Inet4Address localhost4 = null;
try {
localhost4 = (Inet4Address) InetAddress.getByAddress(LOCALHOST4_BYTES);
} catch (Exception e) {
// We should not get here as long as the length of the address is correct.
PlatformDependent.throwException(e);
}
LOCALHOST4 = localhost4;
// Create IPv6 loopback address.
Inet6Address localhost6 = null;
try {
localhost6 = (Inet6Address) InetAddress.getByAddress(LOCALHOST6_BYTES);
} catch (Exception e) {
// We should not get here as long as the length of the address is correct.
PlatformDependent.throwException(e);
}
LOCALHOST6 = localhost6;
// Retrieve the list of available network interfaces.
List<NetworkInterface> ifaces = new ArrayList<NetworkInterface>();
try {
for (Enumeration<NetworkInterface> i = NetworkInterface.getNetworkInterfaces(); i.hasMoreElements();) {
NetworkInterface iface = i.nextElement();
// Use the interface with proper INET addresses only.
if (iface.getInetAddresses().hasMoreElements()) {
ifaces.add(iface);
}
}
} catch (SocketException e) {
logger.warn("Failed to retrieve the list of available network interfaces", e);
}
// Find the first loopback interface available from its INET address (127.0.0.1 or ::1)
// Note that we do not use NetworkInterface.isLoopback() in the first place because it takes long time
// on a certain environment. (e.g. Windows with -Djava.net.preferIPv4Stack=true)
NetworkInterface loopbackIface = null;
InetAddress loopbackAddr = null;
loop: for (NetworkInterface iface: ifaces) {
for (Enumeration<InetAddress> i = iface.getInetAddresses(); i.hasMoreElements();) {
InetAddress addr = i.nextElement();
if (addr.isLoopbackAddress()) {
// Found
loopbackIface = iface;
loopbackAddr = addr;
break loop;
}
}
}
// If failed to find the loopback interface from its INET address, fall back to isLoopback().
if (loopbackIface == null) {
try {
for (NetworkInterface iface: ifaces) {
if (iface.isLoopback()) {
Enumeration<InetAddress> i = iface.getInetAddresses();
if (i.hasMoreElements()) {
// Found the one with INET address.
loopbackIface = iface;
loopbackAddr = i.nextElement();
break;
}
}
}
if (loopbackIface == null) {
logger.warn("Failed to find the loopback interface");
}
} catch (SocketException e) {
logger.warn("Failed to find the loopback interface", e);
}
}
if (loopbackIface != null) {
// Found the loopback interface with an INET address.
logger.debug(
"Loopback interface: {} ({}, {})",
loopbackIface.getName(), loopbackIface.getDisplayName(), loopbackAddr.getHostAddress());
} else {
// Could not find the loopback interface, but we can't leave LOCALHOST as null.
// Use LOCALHOST6 or LOCALHOST4, preferably the IPv6 one.
if (loopbackAddr == null) {
try {
if (NetworkInterface.getByInetAddress(LOCALHOST6) != null) {
logger.debug("Using hard-coded IPv6 localhost address: {}", localhost6);
loopbackAddr = localhost6;
}
} catch (Exception e) {
// Ignore
} finally {
if (loopbackAddr == null) {
logger.debug("Using hard-coded IPv4 localhost address: {}", localhost4);
loopbackAddr = localhost4;
}
}
}
}
LOOPBACK_IF = loopbackIface;
LOCALHOST = loopbackAddr;
// Determine the default somaxconn (server socket backlog) value of the platform.
// The known defaults:
// - Windows NT Server 4.0+: 200
// - Linux and Mac OS X: 128
int somaxconn = PlatformDependent.isWindows() ? 200 : 128;
File file = new File("/proc/sys/net/core/somaxconn");
if (file.exists()) {
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(file));
somaxconn = Integer.parseInt(in.readLine());
if (logger.isDebugEnabled()) {
logger.debug("{}: {}", file, somaxconn);
}
} catch (Exception e) {
logger.debug("Failed to get SOMAXCONN from: {}", file, e);
} finally {
if (in != null) {
try {
in.close();
} catch (Exception e) {
// Ignored.
}
}
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("{}: {} (non-existent)", file, somaxconn);
}
}
SOMAXCONN = somaxconn;
}
/**
* Creates an byte[] based on an ipAddressString. No error handling is
* performed here.
*/
public static byte[] createByteArrayFromIpAddressString(String ipAddressString) {
if (isValidIpV4Address(ipAddressString)) {
StringTokenizer tokenizer = new StringTokenizer(ipAddressString, ".");
String token;
int tempInt;
byte[] byteAddress = new byte[IPV4_BYTE_COUNT];
for (int i = 0; i < IPV4_BYTE_COUNT; i ++) {
token = tokenizer.nextToken();
tempInt = Integer.parseInt(token);
byteAddress[i] = (byte) tempInt;
}
return byteAddress;
}
if (isValidIpV6Address(ipAddressString)) {
if (ipAddressString.charAt(0) == '[') {
ipAddressString = ipAddressString.substring(1, ipAddressString.length() - 1);
}
int percentPos = ipAddressString.indexOf('%');
if (percentPos >= 0) {
ipAddressString = ipAddressString.substring(0, percentPos);
}
StringTokenizer tokenizer = new StringTokenizer(ipAddressString, ":.", true);
ArrayList<String> hexStrings = new ArrayList<String>();
ArrayList<String> decStrings = new ArrayList<String>();
String token = "";
String prevToken = "";
int doubleColonIndex = -1; // If a double colon exists, we need to
// insert 0s.
// Go through the tokens, including the seperators ':' and '.'
// When we hit a : or . the previous token will be added to either
// the hex list or decimal list. In the case where we hit a ::
// we will save the index of the hexStrings so we can add zeros
// in to fill out the string
while (tokenizer.hasMoreTokens()) {
prevToken = token;
token = tokenizer.nextToken();
if (":".equals(token)) {
if (":".equals(prevToken)) {
doubleColonIndex = hexStrings.size();
} else if (!prevToken.isEmpty()) {
hexStrings.add(prevToken);
}
} else if (".".equals(token)) {
decStrings.add(prevToken);
}
}
if (":".equals(prevToken)) {
if (":".equals(token)) {
doubleColonIndex = hexStrings.size();
} else {
hexStrings.add(token);
}
} else if (".".equals(prevToken)) {
decStrings.add(token);
}
// figure out how many hexStrings we should have
// also check if it is a IPv4 address
int hexStringsLength = 8;
// If we have an IPv4 address tagged on at the end, subtract
// 4 bytes, or 2 hex words from the total
if (!decStrings.isEmpty()) {
hexStringsLength -= 2;
}
// if we hit a double Colon add the appropriate hex strings
if (doubleColonIndex != -1) {
int numberToInsert = hexStringsLength - hexStrings.size();
for (int i = 0; i < numberToInsert; i ++) {
hexStrings.add(doubleColonIndex, "0");
}
}
byte[] ipByteArray = new byte[IPV6_BYTE_COUNT];
// Finally convert these strings to bytes...
for (int i = 0; i < hexStrings.size(); i ++) {
convertToBytes(hexStrings.get(i), ipByteArray, i << 1);
}
// Now if there are any decimal values, we know where they go...
for (int i = 0; i < decStrings.size(); i ++) {
ipByteArray[i + 12] = (byte) (Integer.parseInt(decStrings.get(i)) & 255);
}
return ipByteArray;
}
return null;
}
/**
* Converts a 4 character hex word into a 2 byte word equivalent
*/
private static void convertToBytes(String hexWord, byte[] ipByteArray, int byteIndex) {
int hexWordLength = hexWord.length();
int hexWordIndex = 0;
ipByteArray[byteIndex] = 0;
ipByteArray[byteIndex + 1] = 0;
int charValue;
// high order 4 bits of first byte
if (hexWordLength > 3) {
charValue = getIntValue(hexWord.charAt(hexWordIndex ++));
ipByteArray[byteIndex] |= charValue << 4;
}
// low order 4 bits of the first byte
if (hexWordLength > 2) {
charValue = getIntValue(hexWord.charAt(hexWordIndex ++));
ipByteArray[byteIndex] |= charValue;
}
// high order 4 bits of second byte
if (hexWordLength > 1) {
charValue = getIntValue(hexWord.charAt(hexWordIndex ++));
ipByteArray[byteIndex + 1] |= charValue << 4;
}
// low order 4 bits of the first byte
charValue = getIntValue(hexWord.charAt(hexWordIndex));
ipByteArray[byteIndex + 1] |= charValue & 15;
}
static int getIntValue(char c) {
switch (c) {
case '0':
return 0;
case '1':
return 1;
case '2':
return 2;
case '3':
return 3;
case '4':
return 4;
case '5':
return 5;
case '6':
return 6;
case '7':
return 7;
case '8':
return 8;
case '9':
return 9;
}
c = Character.toLowerCase(c);
switch (c) {
case 'a':
return 10;
case 'b':
return 11;
case 'c':
return 12;
case 'd':
return 13;
case 'e':
return 14;
case 'f':
return 15;
}
return 0;
}
public static boolean isValidIpV6Address(String ipAddress) {
int length = ipAddress.length();
boolean doubleColon = false;
int numberOfColons = 0;
int numberOfPeriods = 0;
int numberOfPercent = 0;
StringBuilder word = new StringBuilder();
char c = 0;
char prevChar;
int offset = 0; // offset for [] ip addresses
if (length < 2) {
return false;
}
for (int i = 0; i < length; i ++) {
prevChar = c;
c = ipAddress.charAt(i);
switch (c) {
// case for an open bracket [x:x:x:...x]
case '[':
if (i != 0) {
return false; // must be first character
}
if (ipAddress.charAt(length - 1) != ']') {
return false; // must have a close ]
}
offset = 1;
if (length < 4) {
return false;
}
break;
// case for a closed bracket at end of IP [x:x:x:...x]
case ']':
if (i != length - 1) {
return false; // must be last charcter
}
if (ipAddress.charAt(0) != '[') {
return false; // must have a open [
}
break;
// case for the last 32-bits represented as IPv4 x:x:x:x:x:x:d.d.d.d
case '.':
numberOfPeriods ++;
if (numberOfPeriods > 3) {
return false;
}
if (!isValidIp4Word(word.toString())) {
return false;
}
if (numberOfColons != 6 && !doubleColon) {
return false;
}
// a special case ::1:2:3:4:5:d.d.d.d allows 7 colons with an
// IPv4 ending, otherwise 7 :'s is bad
if (numberOfColons == 7 && ipAddress.charAt(offset) != ':' &&
ipAddress.charAt(1 + offset) != ':') {
return false;
}
word.delete(0, word.length());
break;
case ':':
// FIX "IP6 mechanism syntax #ip6-bad1"
// An IPV6 address cannot start with a single ":".
// Either it can starti with "::" or with a number.
if (i == offset && (ipAddress.length() <= i || ipAddress.charAt(i + 1) != ':')) {
return false;
}
// END FIX "IP6 mechanism syntax #ip6-bad1"
numberOfColons ++;
if (numberOfColons > 7) {
return false;
}
if (numberOfPeriods > 0) {
return false;
}
if (prevChar == ':') {
if (doubleColon) {
return false;
}
doubleColon = true;
}
word.delete(0, word.length());
break;
case '%':
if (numberOfColons == 0) {
return false;
}
numberOfPercent ++;
// validate that the stuff after the % is valid
if (i + 1 >= length) {
// in this case the percent is there but no number is
// available
return false;
}
try {
if (Integer.parseInt(ipAddress.substring(i + 1)) < 0) {
return false;
}
} catch (NumberFormatException e) {
// right now we just support an integer after the % so if
// this is not
// what is there then return
return false;
}
break;
default:
if (numberOfPercent == 0) {
if (word != null && word.length() > 3) {
return false;
}
if (!isValidHexChar(c)) {
return false;
}
}
word.append(c);
}
}
// Check if we have an IPv4 ending
if (numberOfPeriods > 0) {
// There is a test case with 7 colons and valid ipv4 this should resolve it
if (numberOfPeriods != 3 || !(isValidIp4Word(word.toString()) && numberOfColons < 7)) {
return false;
}
} else {
// If we're at then end and we haven't had 7 colons then there is a
// problem unless we encountered a doubleColon
if (numberOfColons != 7 && !doubleColon) {
return false;
}
// If we have an empty word at the end, it means we ended in either
// a : or a .
// If we did not end in :: then this is invalid
if (numberOfPercent == 0) {
if (word.length() == 0 && ipAddress.charAt(length - 1 - offset) == ':' &&
ipAddress.charAt(length - 2 - offset) != ':') {
return false;
}
}
}
return true;
}
public static boolean isValidIp4Word(String word) {
char c;
if (word.length() < 1 || word.length() > 3) {
return false;
}
for (int i = 0; i < word.length(); i ++) {
c = word.charAt(i);
if (!(c >= '0' && c <= '9')) {
return false;
}
}
return Integer.parseInt(word) <= 255;
}
private static boolean isValidHexChar(char c) {
return c >= '0' && c <= '9' || c >= 'A' && c <= 'F' || c >= 'a' && c <= 'f';
}
private static boolean isValidNumericChar(char c) {
return c >= '0' && c <= '9';
}
/**
* Takes a string and parses it to see if it is a valid IPV4 address.
*
* @return true, if the string represents an IPV4 address in dotted
* notation, false otherwise
*/
public static boolean isValidIpV4Address(String value) {
int periods = 0;
int i;
int length = value.length();
if (length > 15) {
return false;
}
char c;
StringBuilder word = new StringBuilder();
for (i = 0; i < length; i ++) {
c = value.charAt(i);
if (c == '.') {
periods ++;
if (periods > 3) {
return false;
}
if (word.length() == 0) {
return false;
}
if (Integer.parseInt(word.toString()) > 255) {
return false;
}
word.delete(0, word.length());
} else if (!Character.isDigit(c)) {
return false;
} else {
if (word.length() > 2) {
return false;
}
word.append(c);
}
}
if (word.length() == 0 || Integer.parseInt(word.toString()) > 255) {
return false;
}
return periods == 3;
}
/**
* Returns the {@link Inet6Address} representation of a {@link CharSequence} IP address.
* <p>
* This method will treat all IPv4 type addresses as "IPv4 mapped" (see {@link #getByName(CharSequence, boolean)})
* @param ip {@link CharSequence} IP address to be converted to a {@link Inet6Address}
* @return {@link Inet6Address} representation of the {@code ip} or {@code null} if not a valid IP address.
*/
public static Inet6Address getByName(CharSequence ip) {
return getByName(ip, true);
}
/**
* Returns the {@link Inet6Address} representation of a {@link CharSequence} IP address.
* <p>
* The {@code ipv4Mapped} parameter specifies how IPv4 addresses should be treated.
* "IPv4 mapped" format as
* defined in <a href="http://tools.ietf.org/html/rfc4291#section-2.5.5">rfc 4291 section 2</a> is supported.
* @param ip {@link CharSequence} IP address to be converted to a {@link Inet6Address}
* @param ipv4Mapped
* <ul>
* <li>{@code true} To allow IPv4 mapped inputs to be translated into {@link Inet6Address}</li>
* <li>{@code false} Don't turn IPv4 addressed to mapped addresses</li>
* </ul>
* @return {@link Inet6Address} representation of the {@code ip} or {@code null} if not a valid IP address.
*/
public static Inet6Address getByName(CharSequence ip, boolean ipv4Mapped) {
final byte[] bytes = new byte[IPV6_BYTE_COUNT];
final int ipLength = ip.length();
int compressBegin = 0;
int compressLength = 0;
int currentIndex = 0;
int value = 0;
int begin = -1;
int i = 0;
int ipv6Seperators = 0;
int ipv4Seperators = 0;
int tmp = 0;
boolean needsShift = false;
for (; i < ipLength; ++i) {
final char c = ip.charAt(i);
switch (c) {
case ':':
++ipv6Seperators;
if (i - begin > IPV6_MAX_CHAR_BETWEEN_SEPARATOR ||
ipv4Seperators > 0 || ipv6Seperators > IPV6_MAX_SEPARATORS ||
currentIndex + 1 >= bytes.length) {
return null;
}
value <<= (IPV6_MAX_CHAR_BETWEEN_SEPARATOR - (i - begin)) << 2;
if (compressLength > 0) {
compressLength -= 2;
}
// The value integer holds at most 4 bytes from right (most significant) to left (least significant).
// The following bit shifting is used to extract and re-order the individual bytes to achieve a
// left (most significant) to right (least significant) ordering.
bytes[currentIndex++] = (byte) (((value & 0xf) << 4) | ((value >> 4) & 0xf));
bytes[currentIndex++] = (byte) ((((value >> 8) & 0xf) << 4) | ((value >> 12) & 0xf));
tmp = i + 1;
if (tmp < ipLength && ip.charAt(tmp) == ':') {
++tmp;
if (compressBegin != 0 || (tmp < ipLength && ip.charAt(tmp) == ':')) {
return null;
}
++ipv6Seperators;
needsShift = ipv6Seperators == 2 && value == 0;
compressBegin = currentIndex;
compressLength = bytes.length - compressBegin - 2;
++i;
}
value = 0;
begin = -1;
break;
case '.':
++ipv4Seperators;
if (i - begin > IPV4_MAX_CHAR_BETWEEN_SEPARATOR
|| ipv4Seperators > IPV4_SEPARATORS
|| (ipv6Seperators > 0 && (currentIndex + compressLength < 12))
|| i + 1 >= ipLength
|| currentIndex >= bytes.length
|| begin < 0
|| (begin == 0 && (i == 3 && (!isValidNumericChar(ip.charAt(2)) ||
!isValidNumericChar(ip.charAt(1)) ||
!isValidNumericChar(ip.charAt(0))) ||
i == 2 && (!isValidNumericChar(ip.charAt(1)) ||
!isValidNumericChar(ip.charAt(0))) ||
i == 1 && !isValidNumericChar(ip.charAt(0))))) {
return null;
}
value <<= (IPV4_MAX_CHAR_BETWEEN_SEPARATOR - (i - begin)) << 2;
// The value integer holds at most 3 bytes from right (most significant) to left (least significant).
// The following bit shifting is to restructure the bytes to be left (most significant) to
// right (least significant) while also accounting for each IPv4 digit is base 10.
begin = (value & 0xf) * 100 + ((value >> 4) & 0xf) * 10 + ((value >> 8) & 0xf);
if (begin < 0 || begin > 255) {
return null;
}
bytes[currentIndex++] = (byte) begin;
value = 0;
begin = -1;
break;
default:
if (!isValidHexChar(c) || (ipv4Seperators > 0 && !isValidNumericChar(c))) {
return null;
}
if (begin < 0) {
begin = i;
} else if (i - begin > IPV6_MAX_CHAR_BETWEEN_SEPARATOR) {
return null;
}
// The value is treated as a sort of array of numbers because we are dealing with
// at most 4 consecutive bytes we can use bit shifting to accomplish this.
// The most significant byte will be encountered first, and reside in the right most
// position of the following integer
value += getIntValue(c) << ((i - begin) << 2);
break;
}
}
final boolean isCompressed = compressBegin > 0;
// Finish up last set of data that was accumulated in the loop (or before the loop)
if (ipv4Seperators > 0) {
if (begin > 0 && i - begin > IPV4_MAX_CHAR_BETWEEN_SEPARATOR ||
ipv4Seperators != IPV4_SEPARATORS ||
currentIndex >= bytes.length) {
return null;
}
if (ipv6Seperators == 0) {
compressLength = 12;
} else if (ipv6Seperators >= IPV6_MIN_SEPARATORS &&
ip.charAt(ipLength - 1) != ':' &&
(!isCompressed && (ipv6Seperators == 6 && ip.charAt(0) != ':') ||
isCompressed && (ipv6Seperators + 1 < IPV6_MAX_SEPARATORS &&
(ip.charAt(0) != ':' || compressBegin <= 2)))) {
compressLength -= 2;
} else {
return null;
}
value <<= (IPV4_MAX_CHAR_BETWEEN_SEPARATOR - (i - begin)) << 2;
// The value integer holds at most 3 bytes from right (most significant) to left (least significant).
// The following bit shifting is to restructure the bytes to be left (most significant) to
// right (least significant) while also accounting for each IPv4 digit is base 10.
begin = (value & 0xf) * 100 + ((value >> 4) & 0xf) * 10 + ((value >> 8) & 0xf);
if (begin < 0 || begin > 255) {
return null;
}
bytes[currentIndex++] = (byte) begin;
} else {
tmp = ipLength - 1;
if (begin > 0 && i - begin > IPV6_MAX_CHAR_BETWEEN_SEPARATOR ||
ipv6Seperators < IPV6_MIN_SEPARATORS ||
!isCompressed && (ipv6Seperators + 1 != IPV6_MAX_SEPARATORS ||
ip.charAt(0) == ':' || ip.charAt(tmp) == ':') ||
isCompressed && (ipv6Seperators > IPV6_MAX_SEPARATORS ||
(ipv6Seperators == IPV6_MAX_SEPARATORS &&
(compressBegin <= 2 && ip.charAt(0) != ':' ||
compressBegin >= 14 && ip.charAt(tmp) != ':'))) ||
currentIndex + 1 >= bytes.length) {
return null;
}
if (begin >= 0 && i - begin <= IPV6_MAX_CHAR_BETWEEN_SEPARATOR) {
value <<= (IPV6_MAX_CHAR_BETWEEN_SEPARATOR - (i - begin)) << 2;
}
// The value integer holds at most 4 bytes from right (most significant) to left (least significant).
// The following bit shifting is used to extract and re-order the individual bytes to achieve a
// left (most significant) to right (least significant) ordering.
bytes[currentIndex++] = (byte) (((value & 0xf) << 4) | ((value >> 4) & 0xf));
bytes[currentIndex++] = (byte) ((((value >> 8) & 0xf) << 4) | ((value >> 12) & 0xf));
}
i = currentIndex + compressLength;
if (needsShift || i >= bytes.length) {
// Right shift array
if (i >= bytes.length) {
++compressBegin;
}
for (i = currentIndex; i < bytes.length; ++i) {
for (begin = bytes.length - 1; begin >= compressBegin; --begin) {
bytes[begin] = bytes[begin - 1];
}
bytes[begin] = 0;
++compressBegin;
}
} else {
// Selectively move elements
for (i = 0; i < compressLength; ++i) {
begin = i + compressBegin;
currentIndex = begin + compressLength;
if (currentIndex < bytes.length) {
bytes[currentIndex] = bytes[begin];
bytes[begin] = 0;
} else {
break;
}
}
}
if (ipv4Mapped && ipv4Seperators > 0 &&
bytes[0] == 0 && bytes[1] == 0 && bytes[2] == 0 && bytes[3] == 0 && bytes[4] == 0 &&
bytes[5] == 0 && bytes[6] == 0 && bytes[7] == 0 && bytes[8] == 0 && bytes[9] == 0) {
bytes[10] = bytes[11] = (byte) 0xff;
}
try {
return Inet6Address.getByAddress(null, bytes, -1);
} catch (UnknownHostException e) {
throw new RuntimeException(e); // Should never happen
}
}
/**
* Returns the {@link String} representation of an {@link InetAddress}.
* <ul>
* <li>Inet4Address results are identical to {@link InetAddress#getHostAddress()}</li>
* <li>Inet6Address results adhere to
* <a href="http://tools.ietf.org/html/rfc5952#section-4">rfc 5952 section 4</a></li>
* </ul>
* <p>
* The output does not include Scope ID.
* @param ip {@link InetAddress} to be converted to an address string
* @return {@code String} containing the text-formatted IP address
*/
public static String toAddressString(InetAddress ip) {
return toAddressString(ip, false);
}
/**
* Returns the {@link String} representation of an {@link InetAddress}.
* <ul>
* <li>Inet4Address results are identical to {@link InetAddress#getHostAddress()}</li>
* <li>Inet6Address results adhere to
* <a href="http://tools.ietf.org/html/rfc5952#section-4">rfc 5952 section 4</a> if
* {@code ipv4Mapped} is false. If {@code ipv4Mapped} is true then "IPv4 mapped" format
* from <a href="http://tools.ietf.org/html/rfc4291#section-2.5.5">rfc 4291 section 2</a> will be supported.
* The compressed result will always obey the compression rules defined in
* <a href="http://tools.ietf.org/html/rfc5952#section-4">rfc 5952 section 4</a></li>
* </ul>
* <p>
* The output does not include Scope ID.
* @param ip {@link InetAddress} to be converted to an address string
* @param ipv4Mapped
* <ul>
* <li>{@code true} to stray from strict rfc 5952 and support the "IPv4 mapped" format
* defined in <a href="http://tools.ietf.org/html/rfc4291#section-2.5.5">rfc 4291 section 2</a> while still
* following the updated guidelines in
* <a href="http://tools.ietf.org/html/rfc5952#section-4">rfc 5952 section 4</a></li>
* <li>{@code false} to strictly follow rfc 5952</li>
* </ul>
* @return {@code String} containing the text-formatted IP address
*/
public static String toAddressString(InetAddress ip, boolean ipv4Mapped) {
if (ip instanceof Inet4Address) {
return ip.getHostAddress();
}
if (!(ip instanceof Inet6Address)) {
throw new IllegalArgumentException("Unhandled type: " + ip.getClass());
}
final byte[] bytes = ip.getAddress();
final int[] words = new int[IPV6_WORD_COUNT];
int i;
for (i = 0; i < words.length; ++i) {
words[i] = ((bytes[i << 1] & 0xff) << 8) | (bytes[(i << 1) + 1] & 0xff);
}
// Find longest run of 0s, tie goes to first found instance
int currentStart = -1;
int currentLength = 0;
int shortestStart = -1;
int shortestLength = 0;
for (i = 0; i < words.length; ++i) {
if (words[i] == 0) {
if (currentStart < 0) {
currentStart = i;
}
} else if (currentStart >= 0) {
currentLength = i - currentStart;
if (currentLength > shortestLength) {
shortestStart = currentStart;
shortestLength = currentLength;
}
currentStart = -1;
}
}
// If the array ends on a streak of zeros, make sure we account for it
if (currentStart >= 0) {
currentLength = i - currentStart;
if (currentLength > shortestLength) {
shortestStart = currentStart;
shortestLength = currentLength;
}
}
// Ignore the longest streak if it is only 1 long
if (shortestLength == 1) {
shortestLength = 0;
shortestStart = -1;
}
// Translate to string taking into account longest consecutive 0s
final int shortestEnd = shortestStart + shortestLength;
final StringBuilder b = new StringBuilder(IPV6_MAX_CHAR_COUNT);
if (shortestEnd < 0) { // Optimization when there is no compressing needed
b.append(Integer.toHexString(words[0]));
for (i = 1; i < words.length; ++i) {
b.append(':');
b.append(Integer.toHexString(words[i]));
}
} else { // General case that can handle compressing (and not compressing)
// Loop unroll the first index (so we don't constantly check i==0 cases in loop)
final boolean isIpv4Mapped;
if (inRangeEndExclusive(0, shortestStart, shortestEnd)) {
b.append("::");
isIpv4Mapped = ipv4Mapped && (shortestEnd == 5 && words[5] == 0xffff);
} else {
b.append(Integer.toHexString(words[0]));
isIpv4Mapped = false;
}
for (i = 1; i < words.length; ++i) {
if (!inRangeEndExclusive(i, shortestStart, shortestEnd)) {
if (!inRangeEndExclusive(i - 1, shortestStart, shortestEnd)) {
// If the last index was not part of the shortened sequence
if (!isIpv4Mapped || i == 6) {
b.append(':');
} else {
b.append('.');
}
}
if (isIpv4Mapped && i > 5) {
b.append(words[i] >> 8);
b.append('.');
b.append(words[i] & 0xff);
} else {
b.append(Integer.toHexString(words[i]));
}
} else if (!inRangeEndExclusive(i - 1, shortestStart, shortestEnd)) {
// If we are in the shortened sequence and the last index was not
b.append("::");
}
}
}
return b.toString();
}
/**
* Does a range check on {@code value} if is within {@code start} (inclusive) and {@code end} (exclusive).
* @param value The value to checked if is within {@code start} (inclusive) and {@code end} (exclusive)
* @param start The start of the range (inclusive)
* @param end The end of the range (exclusive)
* @return
* <ul>
* <li>{@code true} if {@code value} if is within {@code start} (inclusive) and {@code end} (exclusive)</li>
* <li>{@code false} otherwise</li>
* </ul>
*/
private static boolean inRangeEndExclusive(int value, int start, int end) {
return value >= start && value < end;
}
/**
* A constructor to stop this class being constructed.
*/
private NetUtil() {
// Unused
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.runtime.compress.utils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class DblArrayIntListHashMap {
protected static final Log LOG = LogFactory.getLog(DblArrayIntListHashMap.class.getName());
protected static final int INIT_CAPACITY = 8;
protected static final int RESIZE_FACTOR = 2;
protected static final float LOAD_FACTOR = 0.5f;
public static int hashMissCount = 0;
protected int _size = -1;
protected DArrayIListEntry[] _data = null;
public DblArrayIntListHashMap() {
_data = new DArrayIListEntry[INIT_CAPACITY];
_size = 0;
}
public DblArrayIntListHashMap(int init_capacity) {
_data = new DArrayIListEntry[Util.getPow2(init_capacity)];
_size = 0;
}
public int size() {
return _size;
}
public IntArrayList get(DblArray key) {
// probe for early abort
if(_size == 0)
return null;
// compute entry index position
int hash = key.hashCode();
int ix = indexFor(hash, _data.length);
// find entry
while(_data[ix] != null && !_data[ix].keyEquals(key)) {
hash = Integer.hashCode(hash + 1); // hash of hash
ix = indexFor(hash, _data.length);
hashMissCount++;
}
DArrayIListEntry e = _data[ix];
if(e != null)
return e.value;
return null;
}
private void appendValue(DblArray key, IntArrayList value) {
// compute entry index position
int hash = key.hashCode();
int ix = indexFor(hash, _data.length);
// add new table entry (constant time)
while(_data[ix] != null && !_data[ix].keyEquals(key)) {
hash = Integer.hashCode(hash + 1); // hash of hash
ix = indexFor(hash, _data.length);
hashMissCount++;
}
_data[ix] = new DArrayIListEntry(key, value);
_size++;
}
public void appendValue(DblArray key, int value) {
int hash = key.hashCode();
int ix = indexFor(hash, _data.length);
while(_data[ix] != null && !_data[ix].keyEquals(key)) {
hash = Integer.hashCode(hash + 1); // hash of hash
ix = indexFor(hash, _data.length);
hashMissCount++;
}
DArrayIListEntry e = _data[ix];
if(e == null) {
final IntArrayList lstPtr = new IntArrayList();
lstPtr.appendValue(value);
_data[ix] = new DArrayIListEntry(new DblArray(key), lstPtr);
_size++;
}
else {
final IntArrayList lstPtr = e.value;
lstPtr.appendValue(value);
}
// resize if necessary
if(_size >= LOAD_FACTOR * _data.length)
resize();
}
public List<DArrayIListEntry> extractValues() {
List<DArrayIListEntry> ret = new ArrayList<>();
for(DArrayIListEntry e : _data)
if(e != null)
ret.add(e);
// Collections.sort(ret);
return ret;
}
private void resize() {
// check for integer overflow on resize
if(_data.length > Integer.MAX_VALUE / RESIZE_FACTOR)
return;
// resize data array and copy existing contents
DArrayIListEntry[] olddata = _data;
_data = new DArrayIListEntry[_data.length * RESIZE_FACTOR];
_size = 0;
// rehash all entries
for(DArrayIListEntry e : olddata)
if(e != null)
appendValue(e.key, e.value);
}
public void reset() {
Arrays.fill(_data, null);
_size = 0;
}
public void reset(int size) {
int newSize = Util.getPow2(size);
if(newSize > _data.length) {
_data = new DArrayIListEntry[newSize];
}
else {
Arrays.fill(_data, null);
// only allocate new if the size is smaller than 2x
if(size < _data.length / 2)
_data = new DArrayIListEntry[newSize];
}
_size = 0;
}
protected static int indexFor(int h, int length) {
return h & (length - 1);
}
public static class DArrayIListEntry {
public DblArray key;
public IntArrayList value;
public DArrayIListEntry(DblArray ekey, IntArrayList evalue) {
key = ekey;
value = evalue;
}
@Override
public String toString() {
return key + ":" + value;
}
public boolean keyEquals(DblArray keyThat) {
return key.equals(keyThat);
}
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(this.getClass().getSimpleName() + this.hashCode());
sb.append(" " + _size);
for(int i = 0; i < _data.length; i++) {
DArrayIListEntry ent = _data[i];
if(ent != null) {
sb.append("\n");
sb.append("id:" + i);
sb.append("[");
sb.append(ent);
sb.append("]");
}
}
return sb.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.api.common;
import java.io.IOException;
import java.io.PrintWriter;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.apache.asterix.algebra.base.ILangExpressionToPlanTranslator;
import org.apache.asterix.algebra.base.ILangExpressionToPlanTranslatorFactory;
import org.apache.asterix.api.http.server.ResultUtil;
import org.apache.asterix.common.config.CompilerProperties;
import org.apache.asterix.common.config.OptimizationConfUtil;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.utils.Job;
import org.apache.asterix.common.utils.Job.SubmissionMode;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.compiler.provider.IRuleSetFactory;
import org.apache.asterix.dataflow.data.common.ConflictingTypeResolver;
import org.apache.asterix.dataflow.data.common.ExpressionTypeComputer;
import org.apache.asterix.dataflow.data.common.MergeAggregationExpressionFactory;
import org.apache.asterix.dataflow.data.common.MissableTypeComputer;
import org.apache.asterix.dataflow.data.common.PartialAggregationTypeComputer;
import org.apache.asterix.external.feed.watch.FeedActivityDetails;
import org.apache.asterix.formats.base.IDataFormat;
import org.apache.asterix.jobgen.QueryLogicalExpressionJobGen;
import org.apache.asterix.lang.aql.statement.SubscribeFeedStatement;
import org.apache.asterix.lang.common.base.IAstPrintVisitorFactory;
import org.apache.asterix.lang.common.base.IQueryRewriter;
import org.apache.asterix.lang.common.base.IReturningStatement;
import org.apache.asterix.lang.common.base.IRewriterFactory;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
import org.apache.asterix.lang.common.statement.FunctionDecl;
import org.apache.asterix.lang.common.statement.Query;
import org.apache.asterix.lang.common.util.FunctionUtil;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.optimizer.base.FuzzyUtils;
import org.apache.asterix.runtime.formats.FormatUtils;
import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
import org.apache.asterix.transaction.management.service.transaction.JobIdFactory;
import org.apache.asterix.translator.CompiledStatements.ICompiledDmlStatement;
import org.apache.asterix.translator.IStatementExecutor.Stats;
import org.apache.asterix.translator.SessionConfig;
import org.apache.asterix.translator.SessionOutput;
import org.apache.asterix.translator.util.FunctionCollection;
import org.apache.asterix.utils.ResourceUtils;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.compiler.api.HeuristicCompilerFactoryBuilder;
import org.apache.hyracks.algebricks.compiler.api.ICompiler;
import org.apache.hyracks.algebricks.compiler.api.ICompilerFactory;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.expressions.ExpressionRuntimeProvider;
import org.apache.hyracks.algebricks.core.algebra.expressions.IConflictingTypeResolver;
import org.apache.hyracks.algebricks.core.algebra.expressions.IExpressionEvalSizeComputer;
import org.apache.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
import org.apache.hyracks.algebricks.core.algebra.expressions.IMergeAggregationExpressionFactory;
import org.apache.hyracks.algebricks.core.algebra.expressions.IMissableTypeComputer;
import org.apache.hyracks.algebricks.core.algebra.prettyprint.AlgebricksAppendable;
import org.apache.hyracks.algebricks.core.algebra.prettyprint.LogicalOperatorPrettyPrintVisitor;
import org.apache.hyracks.algebricks.core.algebra.prettyprint.PlanPrettyPrinter;
import org.apache.hyracks.algebricks.core.rewriter.base.AlgebricksOptimizationContext;
import org.apache.hyracks.algebricks.core.rewriter.base.IOptimizationContextFactory;
import org.apache.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
import org.apache.hyracks.api.client.IClusterInfoCollector;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.api.client.NodeControllerInfo;
import org.apache.hyracks.api.config.IOptionType;
import org.apache.hyracks.api.exceptions.HyracksException;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.control.common.config.OptionTypes;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableSet;
/**
* Provides helper methods for compilation of a query into a JobSpec and submission
* to Hyracks through the Hyracks client interface.
*/
public class APIFramework {
private static final int MIN_FRAME_LIMIT_FOR_SORT = 3;
private static final int MIN_FRAME_LIMIT_FOR_GROUP_BY = 4;
private static final int MIN_FRAME_LIMIT_FOR_JOIN = 5;
// A white list of supported configurable parameters.
private static final Set<String> CONFIGURABLE_PARAMETER_NAMES =
ImmutableSet.of(CompilerProperties.COMPILER_JOINMEMORY_KEY, CompilerProperties.COMPILER_GROUPMEMORY_KEY,
CompilerProperties.COMPILER_SORTMEMORY_KEY, CompilerProperties.COMPILER_PARALLELISM_KEY,
FunctionUtil.IMPORT_PRIVATE_FUNCTIONS, FuzzyUtils.SIM_FUNCTION_PROP_NAME,
FuzzyUtils.SIM_THRESHOLD_PROP_NAME, SubscribeFeedStatement.WAIT_FOR_COMPLETION,
FeedActivityDetails.FEED_POLICY_NAME, FeedActivityDetails.COLLECT_LOCATIONS, "inline_with",
"hash_merge", "output-record-type");
private final IRewriterFactory rewriterFactory;
private final IAstPrintVisitorFactory astPrintVisitorFactory;
private final ILangExpressionToPlanTranslatorFactory translatorFactory;
private final IRuleSetFactory ruleSetFactory;
public APIFramework(ILangCompilationProvider compilationProvider) {
this.rewriterFactory = compilationProvider.getRewriterFactory();
this.astPrintVisitorFactory = compilationProvider.getAstPrintVisitorFactory();
this.translatorFactory = compilationProvider.getExpressionToPlanTranslatorFactory();
this.ruleSetFactory = compilationProvider.getRuleSetFactory();
}
static {
FormatUtils.getDefaultFormat().registerRuntimeFunctions(FunctionCollection.getFunctionDescriptorFactories());
}
private static class OptimizationContextFactory implements IOptimizationContextFactory {
public static final OptimizationContextFactory INSTANCE = new OptimizationContextFactory();
private OptimizationContextFactory() {
}
@Override
public IOptimizationContext createOptimizationContext(int varCounter,
IExpressionEvalSizeComputer expressionEvalSizeComputer,
IMergeAggregationExpressionFactory mergeAggregationExpressionFactory,
IExpressionTypeComputer expressionTypeComputer, IMissableTypeComputer missableTypeComputer,
IConflictingTypeResolver conflictingTypeResolver, PhysicalOptimizationConfig physicalOptimizationConfig,
AlgebricksPartitionConstraint clusterLocations) {
return new AlgebricksOptimizationContext(varCounter, expressionEvalSizeComputer,
mergeAggregationExpressionFactory, expressionTypeComputer, missableTypeComputer,
conflictingTypeResolver, physicalOptimizationConfig, clusterLocations);
}
}
private void printPlanPrefix(SessionOutput output, String planName) {
if (output.config().is(SessionConfig.FORMAT_HTML)) {
output.out().println("<h4>" + planName + ":</h4>");
output.out().println("<pre>");
} else {
output.out().println("----------" + planName + ":");
}
}
private void printPlanPostfix(SessionOutput output) {
if (output.config().is(SessionConfig.FORMAT_HTML)) {
output.out().println("</pre>");
}
}
public Pair<IReturningStatement, Integer> reWriteQuery(List<FunctionDecl> declaredFunctions,
MetadataProvider metadataProvider, IReturningStatement q, SessionOutput output)
throws CompilationException {
if (q == null) {
return null;
}
SessionConfig conf = output.config();
if (!conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS) && conf.is(SessionConfig.OOB_EXPR_TREE)) {
output.out().println();
printPlanPrefix(output, "Expression tree");
q.accept(astPrintVisitorFactory.createLangVisitor(output.out()), 0);
printPlanPostfix(output);
}
IQueryRewriter rw = rewriterFactory.createQueryRewriter();
rw.rewrite(declaredFunctions, q, metadataProvider, new LangRewritingContext(q.getVarCounter()));
return new Pair<>(q, q.getVarCounter());
}
public JobSpecification compileQuery(IClusterInfoCollector clusterInfoCollector, MetadataProvider metadataProvider,
Query rwQ, int varCounter, String outputDatasetName, SessionOutput output, ICompiledDmlStatement statement)
throws AlgebricksException, RemoteException, ACIDException {
SessionConfig conf = output.config();
if (!conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS) && conf.is(SessionConfig.OOB_REWRITTEN_EXPR_TREE)) {
output.out().println();
printPlanPrefix(output, "Rewritten expression tree");
if (rwQ != null) {
rwQ.accept(astPrintVisitorFactory.createLangVisitor(output.out()), 0);
}
printPlanPostfix(output);
}
org.apache.asterix.common.transactions.JobId asterixJobId = JobIdFactory.generateJobId();
metadataProvider.setJobId(asterixJobId);
ILangExpressionToPlanTranslator t =
translatorFactory.createExpressionToPlanTranslator(metadataProvider, varCounter);
ILogicalPlan plan;
// statement = null when it's a query
if (statement == null || statement.getKind() != Statement.Kind.LOAD) {
plan = t.translate(rwQ, outputDatasetName, statement);
} else {
plan = t.translateLoad(statement);
}
if (!conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS) && conf.is(SessionConfig.OOB_LOGICAL_PLAN)) {
output.out().println();
printPlanPrefix(output, "Logical plan");
if (rwQ != null || (statement != null && statement.getKind() == Statement.Kind.LOAD)) {
LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor(output.out());
PlanPrettyPrinter.printPlan(plan, pvisitor, 0);
}
printPlanPostfix(output);
}
CompilerProperties compilerProperties = metadataProvider.getApplicationContext().getCompilerProperties();
int frameSize = compilerProperties.getFrameSize();
Map<String, String> querySpecificConfig = metadataProvider.getConfig();
validateConfig(querySpecificConfig); // Validates the user-overridden query parameters.
int sortFrameLimit = getFrameLimit(CompilerProperties.COMPILER_SORTMEMORY_KEY,
querySpecificConfig.get(CompilerProperties.COMPILER_SORTMEMORY_KEY),
compilerProperties.getSortMemorySize(), frameSize, MIN_FRAME_LIMIT_FOR_SORT);
int groupFrameLimit = getFrameLimit(CompilerProperties.COMPILER_GROUPMEMORY_KEY,
querySpecificConfig.get(CompilerProperties.COMPILER_GROUPMEMORY_KEY),
compilerProperties.getGroupMemorySize(), frameSize, MIN_FRAME_LIMIT_FOR_GROUP_BY);
int joinFrameLimit = getFrameLimit(CompilerProperties.COMPILER_JOINMEMORY_KEY,
querySpecificConfig.get(CompilerProperties.COMPILER_JOINMEMORY_KEY),
compilerProperties.getJoinMemorySize(), frameSize, MIN_FRAME_LIMIT_FOR_JOIN);
OptimizationConfUtil.getPhysicalOptimizationConfig().setFrameSize(frameSize);
OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesExternalSort(sortFrameLimit);
OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesExternalGroupBy(groupFrameLimit);
OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesForJoin(joinFrameLimit);
HeuristicCompilerFactoryBuilder builder =
new HeuristicCompilerFactoryBuilder(OptimizationContextFactory.INSTANCE);
builder.setPhysicalOptimizationConfig(OptimizationConfUtil.getPhysicalOptimizationConfig());
builder.setLogicalRewrites(ruleSetFactory.getLogicalRewrites(metadataProvider.getApplicationContext()));
builder.setPhysicalRewrites(ruleSetFactory.getPhysicalRewrites(metadataProvider.getApplicationContext()));
IDataFormat format = metadataProvider.getFormat();
ICompilerFactory compilerFactory = builder.create();
builder.setExpressionEvalSizeComputer(format.getExpressionEvalSizeComputer());
builder.setIMergeAggregationExpressionFactory(new MergeAggregationExpressionFactory());
builder.setPartialAggregationTypeComputer(new PartialAggregationTypeComputer());
builder.setExpressionTypeComputer(ExpressionTypeComputer.INSTANCE);
builder.setMissableTypeComputer(MissableTypeComputer.INSTANCE);
builder.setConflictingTypeResolver(ConflictingTypeResolver.INSTANCE);
int parallelism = getParallelism(querySpecificConfig.get(CompilerProperties.COMPILER_PARALLELISM_KEY),
compilerProperties.getParallelism());
AlgebricksAbsolutePartitionConstraint computationLocations =
chooseLocations(clusterInfoCollector, parallelism, metadataProvider.getClusterLocations());
builder.setClusterLocations(computationLocations);
ICompiler compiler = compilerFactory.createCompiler(plan, metadataProvider, t.getVarCounter());
if (conf.isOptimize()) {
compiler.optimize();
if (conf.is(SessionConfig.OOB_OPTIMIZED_LOGICAL_PLAN)) {
if (conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS)) {
// For Optimizer tests.
AlgebricksAppendable buffer = new AlgebricksAppendable(output.out());
PlanPrettyPrinter.printPhysicalOps(plan, buffer, 0);
} else {
printPlanPrefix(output, "Optimized logical plan");
if (rwQ != null || (statement != null && statement.getKind() == Statement.Kind.LOAD)) {
LogicalOperatorPrettyPrintVisitor pvisitor =
new LogicalOperatorPrettyPrintVisitor(output.out());
PlanPrettyPrinter.printPlan(plan, pvisitor, 0);
}
printPlanPostfix(output);
}
}
}
if (rwQ != null && rwQ.isExplain()) {
try {
LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();
PlanPrettyPrinter.printPlan(plan, pvisitor, 0);
ResultUtil.printResults(metadataProvider.getApplicationContext(), pvisitor.get().toString(), output,
new Stats(), null);
return null;
} catch (IOException e) {
throw new AlgebricksException(e);
}
}
if (!conf.isGenerateJobSpec()) {
return null;
}
builder.setBinaryBooleanInspectorFactory(format.getBinaryBooleanInspectorFactory());
builder.setBinaryIntegerInspectorFactory(format.getBinaryIntegerInspectorFactory());
builder.setComparatorFactoryProvider(format.getBinaryComparatorFactoryProvider());
builder.setExpressionRuntimeProvider(new ExpressionRuntimeProvider(QueryLogicalExpressionJobGen.INSTANCE));
builder.setHashFunctionFactoryProvider(format.getBinaryHashFunctionFactoryProvider());
builder.setHashFunctionFamilyProvider(format.getBinaryHashFunctionFamilyProvider());
builder.setMissingWriterFactory(format.getMissingWriterFactory());
builder.setPredicateEvaluatorFactoryProvider(format.getPredicateEvaluatorFactoryProvider());
final SessionConfig.OutputFormat outputFormat = conf.fmt();
switch (outputFormat) {
case LOSSLESS_JSON:
builder.setPrinterProvider(format.getLosslessJSONPrinterFactoryProvider());
break;
case CSV:
builder.setPrinterProvider(format.getCSVPrinterFactoryProvider());
break;
case ADM:
builder.setPrinterProvider(format.getADMPrinterFactoryProvider());
break;
case CLEAN_JSON:
builder.setPrinterProvider(format.getCleanJSONPrinterFactoryProvider());
break;
default:
throw new AlgebricksException("Unexpected OutputFormat: " + outputFormat);
}
builder.setSerializerDeserializerProvider(format.getSerdeProvider());
builder.setTypeTraitProvider(format.getTypeTraitProvider());
builder.setNormalizedKeyComputerFactoryProvider(format.getNormalizedKeyComputerFactoryProvider());
JobEventListenerFactory jobEventListenerFactory =
new JobEventListenerFactory(asterixJobId, metadataProvider.isWriteTransaction());
JobSpecification spec = compiler.createJob(metadataProvider.getApplicationContext(), jobEventListenerFactory);
// When the top-level statement is a query, the statement parameter is null.
if (statement == null) {
// Sets a required capacity, only for read-only queries.
// DDLs and DMLs are considered not that frequent.
spec.setRequiredClusterCapacity(ResourceUtils.getRequiredCompacity(plan, computationLocations,
sortFrameLimit, groupFrameLimit, joinFrameLimit, frameSize));
}
if (conf.is(SessionConfig.OOB_HYRACKS_JOB)) {
printPlanPrefix(output, "Hyracks job");
if (rwQ != null) {
try {
output.out().println(
new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(spec.toJSON()));
} catch (IOException e) {
throw new AlgebricksException(e);
}
output.out().println(spec.getUserConstraints());
}
printPlanPostfix(output);
}
return spec;
}
public void executeJobArray(IHyracksClientConnection hcc, JobSpecification[] specs, PrintWriter out)
throws Exception {
for (JobSpecification spec : specs) {
spec.setMaxReattempts(0);
JobId jobId = hcc.startJob(spec);
long startTime = System.currentTimeMillis();
hcc.waitForCompletion(jobId);
long endTime = System.currentTimeMillis();
double duration = (endTime - startTime) / 1000.00;
out.println("<pre>Duration: " + duration + " sec</pre>");
}
}
public void executeJobArray(IHyracksClientConnection hcc, Job[] jobs, PrintWriter out) throws Exception {
for (Job job : jobs) {
job.getJobSpec().setMaxReattempts(0);
long startTime = System.currentTimeMillis();
try {
JobId jobId = hcc.startJob(job.getJobSpec());
if (job.getSubmissionMode() == SubmissionMode.ASYNCHRONOUS) {
continue;
}
hcc.waitForCompletion(jobId);
} catch (Exception e) {
e.printStackTrace();
continue;
}
long endTime = System.currentTimeMillis();
double duration = (endTime - startTime) / 1000.00;
out.println("<pre>Duration: " + duration + " sec</pre>");
}
}
// Chooses the location constraints, i.e., whether to use storage parallelism or use a user-sepcified number
// of cores.
private static AlgebricksAbsolutePartitionConstraint chooseLocations(IClusterInfoCollector clusterInfoCollector,
int parallelismHint, AlgebricksAbsolutePartitionConstraint storageLocations) throws AlgebricksException {
try {
Map<String, NodeControllerInfo> ncMap = clusterInfoCollector.getNodeControllerInfos();
// Gets total number of cores in the cluster.
int totalNumCores = getTotalNumCores(ncMap);
// If storage parallelism is not larger than the total number of cores, we use the storage parallelism.
// Otherwise, we will use all available cores.
if (parallelismHint == CompilerProperties.COMPILER_PARALLELISM_AS_STORAGE
&& storageLocations.getLocations().length <= totalNumCores) {
return storageLocations;
}
return getComputationLocations(ncMap, parallelismHint);
} catch (HyracksException e) {
throw new AlgebricksException(e);
}
}
// Computes the location constraints based on user-configured parallelism parameter.
// Note that the parallelism parameter is only a hint -- it will not be respected if it is too small or too large.
private static AlgebricksAbsolutePartitionConstraint getComputationLocations(Map<String, NodeControllerInfo> ncMap,
int parallelismHint) {
// Unifies the handling of non-positive parallelism.
int parallelism = parallelismHint <= 0 ? -2 * ncMap.size() : parallelismHint;
// Calculates per node parallelism, with load balance, i.e., randomly selecting nodes with larger
// parallelism.
int numNodes = ncMap.size();
int numNodesWithOneMorePartition = parallelism % numNodes;
int perNodeParallelismMin = parallelism / numNodes;
int perNodeParallelismMax = parallelism / numNodes + 1;
List<String> allNodes = new ArrayList<>();
Set<String> selectedNodesWithOneMorePartition = new HashSet<>();
for (Map.Entry<String, NodeControllerInfo> entry : ncMap.entrySet()) {
allNodes.add(entry.getKey());
}
Random random = new Random();
for (int index = numNodesWithOneMorePartition; index >= 1; --index) {
int pick = random.nextInt(index);
selectedNodesWithOneMorePartition.add(allNodes.get(pick));
Collections.swap(allNodes, pick, index - 1);
}
// Generates cluster locations, which has duplicates for a node if it contains more than one partitions.
List<String> locations = new ArrayList<>();
for (Map.Entry<String, NodeControllerInfo> entry : ncMap.entrySet()) {
String nodeId = entry.getKey();
int availableCores = entry.getValue().getNumAvailableCores();
int nodeParallelism =
selectedNodesWithOneMorePartition.contains(nodeId) ? perNodeParallelismMax : perNodeParallelismMin;
int coresToUse =
nodeParallelism >= 0 && nodeParallelism < availableCores ? nodeParallelism : availableCores;
for (int count = 0; count < coresToUse; ++count) {
locations.add(nodeId);
}
}
return new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[0]));
}
// Gets the total number of available cores in the cluster.
private static int getTotalNumCores(Map<String, NodeControllerInfo> ncMap) {
int sum = 0;
for (Map.Entry<String, NodeControllerInfo> entry : ncMap.entrySet()) {
sum += entry.getValue().getNumAvailableCores();
}
return sum;
}
// Gets the frame limit.
private static int getFrameLimit(String parameterName, String parameter, long memBudgetInConfiguration,
int frameSize, int minFrameLimit) throws AlgebricksException {
IOptionType<Long> longBytePropertyInterpreter = OptionTypes.LONG_BYTE_UNIT;
long memBudget = parameter == null ? memBudgetInConfiguration : longBytePropertyInterpreter.parse(parameter);
int frameLimit = (int) (memBudget / frameSize);
if (frameLimit < minFrameLimit) {
throw AsterixException.create(ErrorCode.COMPILATION_BAD_QUERY_PARAMETER_VALUE, parameterName,
frameSize * minFrameLimit);
}
// Sets the frame limit to the minimum frame limit if the caculated frame limit is too small.
return Math.max(frameLimit, minFrameLimit);
}
// Gets the parallelism parameter.
private static int getParallelism(String parameter, int parallelismInConfiguration) {
IOptionType<Integer> integerIPropertyInterpreter = OptionTypes.INTEGER;
return parameter == null ? parallelismInConfiguration : integerIPropertyInterpreter.parse(parameter);
}
// Validates if the query contains unsupported query parameters.
private static void validateConfig(Map<String, String> config) throws AlgebricksException {
for (String parameterName : config.keySet()) {
if (!CONFIGURABLE_PARAMETER_NAMES.contains(parameterName)) {
throw AsterixException.create(ErrorCode.COMPILATION_UNSUPPORTED_QUERY_PARAMETER, parameterName);
}
}
}
}
| |
package org.wordpress.android.analytics;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public final class AnalyticsTracker {
private static boolean mHasUserOptedOut;
public static final String READER_DETAIL_TYPE_KEY = "post_detail_type";
public static final String READER_DETAIL_TYPE_NORMAL = "normal";
public static final String READER_DETAIL_TYPE_BLOG_PREVIEW = "preview-blog";
public static final String READER_DETAIL_TYPE_TAG_PREVIEW = "preview-tag";
public static final String ACTIVITY_LOG_ACTIVITY_ID_KEY = "activity_id";
public static final String NOTIFICATIONS_SELECTED_FILTER = "selected_filter";
public enum Stat {
// This stat is part of a funnel that provides critical information. Before
// making ANY modification to this stat please refer to: p4qSXL-35X-p2
APPLICATION_OPENED,
APPLICATION_CLOSED,
APPLICATION_INSTALLED,
APPLICATION_UPGRADED,
READER_ACCESSED,
READER_ARTICLE_COMMENTED_ON,
READER_ARTICLE_COMMENTS_OPENED,
READER_ARTICLE_COMMENT_LIKED,
READER_ARTICLE_COMMENT_UNLIKED,
READER_ARTICLE_LIKED,
READER_ARTICLE_DETAIL_LIKED,
READER_ARTICLE_REBLOGGED,
READER_ARTICLE_DETAIL_REBLOGGED,
READER_ARTICLE_OPENED,
READER_ARTICLE_UNLIKED,
READER_ARTICLE_DETAIL_UNLIKED,
READER_ARTICLE_RENDERED,
READER_ARTICLE_VISITED,
READER_BLOG_BLOCKED,
READER_BLOG_FOLLOWED,
READER_BLOG_PREVIEWED,
READER_BLOG_UNFOLLOWED,
READER_DISCOVER_VIEWED,
READER_INFINITE_SCROLL,
READER_LIST_FOLLOWED,
READER_LIST_LOADED,
READER_LIST_PREVIEWED,
READER_LIST_UNFOLLOWED,
READER_TAG_FOLLOWED,
READER_TAG_LOADED,
READER_TAG_PREVIEWED,
READER_TAG_UNFOLLOWED,
READER_SEARCH_LOADED,
READER_SEARCH_PERFORMED,
READER_SEARCH_RESULT_TAPPED,
READER_GLOBAL_RELATED_POST_CLICKED,
READER_LOCAL_RELATED_POST_CLICKED,
READER_VIEWPOST_INTERCEPTED,
READER_BLOG_POST_INTERCEPTED,
READER_FEED_POST_INTERCEPTED,
READER_WPCOM_BLOG_POST_INTERCEPTED,
READER_SIGN_IN_INITIATED,
READER_WPCOM_SIGN_IN_NEEDED,
READER_USER_UNAUTHORIZED,
READER_POST_SAVED_FROM_OTHER_POST_LIST,
READER_POST_SAVED_FROM_SAVED_POST_LIST,
READER_POST_SAVED_FROM_DETAILS,
READER_POST_UNSAVED_FROM_OTHER_POST_LIST,
READER_POST_UNSAVED_FROM_SAVED_POST_LIST,
READER_POST_UNSAVED_FROM_DETAILS,
READER_SAVED_POST_OPENED_FROM_SAVED_POST_LIST,
READER_SAVED_POST_OPENED_FROM_OTHER_POST_LIST,
READER_SITE_SHARED,
STATS_ACCESSED,
STATS_ACCESS_ERROR,
STATS_INSIGHTS_ACCESSED,
STATS_INSIGHTS_MANAGEMENT_HINT_DISMISSED,
STATS_INSIGHTS_MANAGEMENT_HINT_CLICKED,
STATS_INSIGHTS_MANAGEMENT_ACCESSED,
STATS_INSIGHTS_TYPE_MOVED_UP,
STATS_INSIGHTS_TYPE_MOVED_DOWN,
STATS_INSIGHTS_TYPE_REMOVED,
STATS_INSIGHTS_MANAGEMENT_SAVED,
STATS_INSIGHTS_MANAGEMENT_TYPE_ADDED,
STATS_INSIGHTS_MANAGEMENT_TYPE_REMOVED,
STATS_INSIGHTS_MANAGEMENT_TYPE_REORDERED,
STATS_PERIOD_DAYS_ACCESSED,
STATS_PERIOD_WEEKS_ACCESSED,
STATS_PERIOD_MONTHS_ACCESSED,
STATS_PERIOD_YEARS_ACCESSED,
STATS_VIEW_ALL_ACCESSED,
STATS_PREVIOUS_DATE_TAPPED,
STATS_NEXT_DATE_TAPPED,
STATS_FOLLOWERS_VIEW_MORE_TAPPED,
STATS_TAGS_AND_CATEGORIES_VIEW_MORE_TAPPED,
STATS_PUBLICIZE_VIEW_MORE_TAPPED,
STATS_POSTS_AND_PAGES_VIEW_MORE_TAPPED,
STATS_POSTS_AND_PAGES_ITEM_TAPPED,
STATS_REFERRERS_VIEW_MORE_TAPPED,
STATS_REFERRERS_ITEM_TAPPED,
STATS_CLICKS_VIEW_MORE_TAPPED,
STATS_COUNTRIES_VIEW_MORE_TAPPED,
STATS_OVERVIEW_BAR_CHART_TAPPED,
STATS_VIDEO_PLAYS_VIEW_MORE_TAPPED,
STATS_SEARCH_TERMS_VIEW_MORE_TAPPED,
STATS_AUTHORS_VIEW_MORE_TAPPED,
STATS_FILE_DOWNLOADS_VIEW_MORE_TAPPED,
STATS_SINGLE_POST_ACCESSED,
STATS_TAPPED_BAR_CHART,
STATS_OVERVIEW_TYPE_TAPPED,
STATS_SCROLLED_TO_BOTTOM,
STATS_WIDGET_ADDED,
STATS_WIDGET_REMOVED,
STATS_WIDGET_TAPPED,
STATS_LATEST_POST_SUMMARY_ADD_NEW_POST_TAPPED,
STATS_LATEST_POST_SUMMARY_SHARE_POST_TAPPED,
STATS_LATEST_POST_SUMMARY_VIEW_POST_DETAILS_TAPPED,
STATS_LATEST_POST_SUMMARY_POST_ITEM_TAPPED,
STATS_TAGS_AND_CATEGORIES_VIEW_TAG_TAPPED,
STATS_AUTHORS_VIEW_POST_TAPPED,
STATS_CLICKS_ITEM_TAPPED,
STATS_VIDEO_PLAYS_VIDEO_TAPPED,
STATS_DETAIL_POST_TAPPED,
EDITOR_CREATED_POST,
EDITOR_ADDED_PHOTO_VIA_DEVICE_LIBRARY,
EDITOR_ADDED_VIDEO_VIA_DEVICE_LIBRARY,
EDITOR_ADDED_PHOTO_VIA_MEDIA_EDITOR,
EDITOR_ADDED_PHOTO_NEW,
EDITOR_ADDED_VIDEO_NEW,
EDITOR_ADDED_PHOTO_VIA_WP_MEDIA_LIBRARY,
EDITOR_ADDED_VIDEO_VIA_WP_MEDIA_LIBRARY,
EDITOR_ADDED_PHOTO_VIA_STOCK_MEDIA_LIBRARY,
MEDIA_PHOTO_OPTIMIZED,
MEDIA_PHOTO_OPTIMIZE_ERROR,
MEDIA_VIDEO_OPTIMIZED,
MEDIA_VIDEO_CANT_OPTIMIZE,
MEDIA_VIDEO_OPTIMIZE_ERROR,
MEDIA_PICKER_OPEN_CAPTURE_MEDIA,
MEDIA_PICKER_OPEN_DEVICE_LIBRARY,
MEDIA_PICKER_OPEN_WP_MEDIA,
MEDIA_PICKER_OPEN_WP_STORIES_CAPTURE,
MEDIA_PICKER_OPEN_FOR_STORIES,
MEDIA_PICKER_RECENT_MEDIA_SELECTED,
MEDIA_PICKER_PREVIEW_OPENED,
EDITOR_UPDATED_POST,
EDITOR_SCHEDULED_POST,
EDITOR_OPENED,
POST_LIST_ACCESS_ERROR,
POST_LIST_BUTTON_PRESSED,
POST_LIST_ITEM_SELECTED,
POST_LIST_AUTHOR_FILTER_CHANGED,
POST_LIST_TAB_CHANGED,
POST_LIST_VIEW_LAYOUT_TOGGLED,
POST_LIST_SEARCH_ACCESSED,
EDITOR_CLOSED,
EDITOR_SESSION_START,
EDITOR_SESSION_SWITCH_EDITOR,
EDITOR_SESSION_TEMPLATE_APPLY,
EDITOR_SESSION_TEMPLATE_PREVIEW,
EDITOR_SESSION_END,
EDITOR_PUBLISHED_POST,
EDITOR_POST_PUBLISH_TAPPED,
EDITOR_POST_SCHEDULE_CHANGED,
EDITOR_POST_VISIBILITY_CHANGED,
EDITOR_POST_TAGS_CHANGED,
EDITOR_POST_PUBLISH_NOW_TAPPED,
EDITOR_POST_PASSWORD_CHANGED,
EDITOR_POST_CATEGORIES_ADDED,
EDITOR_POST_FORMAT_CHANGED,
EDITOR_POST_SLUG_CHANGED,
EDITOR_POST_EXCERPT_CHANGED,
EDITOR_POST_LOCATION_CHANGED,
EDITOR_SAVED_DRAFT,
EDITOR_EDITED_IMAGE, // Visual editor only
EDITOR_UPLOAD_MEDIA_FAILED, // Visual editor only
EDITOR_UPLOAD_MEDIA_RETRIED, // Visual editor only
EDITOR_TAPPED_BLOCKQUOTE,
EDITOR_TAPPED_BOLD,
EDITOR_TAPPED_ELLIPSIS_COLLAPSE,
EDITOR_TAPPED_ELLIPSIS_EXPAND,
EDITOR_TAPPED_HEADING,
EDITOR_TAPPED_HEADING_1,
EDITOR_TAPPED_HEADING_2,
EDITOR_TAPPED_HEADING_3,
EDITOR_TAPPED_HEADING_4,
EDITOR_TAPPED_HEADING_5,
EDITOR_TAPPED_HEADING_6,
EDITOR_TAPPED_HTML, // Visual editor only
EDITOR_TAPPED_HORIZONTAL_RULE,
EDITOR_TAPPED_IMAGE,
EDITOR_TAPPED_ITALIC,
EDITOR_TAPPED_LINK_ADDED,
EDITOR_TAPPED_LIST,
EDITOR_TAPPED_LIST_ORDERED, // Visual editor only
EDITOR_TAPPED_LIST_UNORDERED, // Visual editor only
EDITOR_TAPPED_NEXT_PAGE,
EDITOR_TAPPED_PARAGRAPH,
EDITOR_TAPPED_PREFORMAT,
EDITOR_TAPPED_READ_MORE,
EDITOR_TAPPED_STRIKETHROUGH,
EDITOR_TAPPED_UNDERLINE,
EDITOR_TAPPED_ALIGN_LEFT,
EDITOR_TAPPED_ALIGN_CENTER,
EDITOR_TAPPED_ALIGN_RIGHT,
EDITOR_TAPPED_REDO,
EDITOR_TAPPED_UNDO,
EDITOR_AZTEC_TOGGLED_OFF, // Aztec editor only
EDITOR_AZTEC_TOGGLED_ON, // Aztec editor only
EDITOR_AZTEC_ENABLED, // Aztec editor only
EDITOR_GUTENBERG_ENABLED, // Gutenberg editor only
EDITOR_GUTENBERG_DISABLED, // Gutenberg editor only
REVISIONS_LIST_VIEWED,
REVISIONS_DETAIL_VIEWED_FROM_LIST,
REVISIONS_DETAIL_VIEWED_FROM_SWIPE,
REVISIONS_DETAIL_VIEWED_FROM_CHEVRON,
REVISIONS_DETAIL_CANCELLED,
REVISIONS_REVISION_LOADED,
REVISIONS_LOAD_UNDONE,
FOLLOWED_BLOG_NOTIFICATIONS_READER_ENABLED,
FOLLOWED_BLOG_NOTIFICATIONS_READER_MENU_OFF,
FOLLOWED_BLOG_NOTIFICATIONS_READER_MENU_ON,
FOLLOWED_BLOG_NOTIFICATIONS_SETTINGS_OFF,
FOLLOWED_BLOG_NOTIFICATIONS_SETTINGS_ON,
FOLLOWED_BLOG_NOTIFICATIONS_SETTINGS_EMAIL_OFF,
FOLLOWED_BLOG_NOTIFICATIONS_SETTINGS_EMAIL_ON,
FOLLOWED_BLOG_NOTIFICATIONS_SETTINGS_EMAIL_INSTANTLY,
FOLLOWED_BLOG_NOTIFICATIONS_SETTINGS_EMAIL_DAILY,
FOLLOWED_BLOG_NOTIFICATIONS_SETTINGS_EMAIL_WEEKLY,
FOLLOWED_BLOG_NOTIFICATIONS_SETTINGS_COMMENTS_OFF,
FOLLOWED_BLOG_NOTIFICATIONS_SETTINGS_COMMENTS_ON,
ME_ACCESSED,
ME_GRAVATAR_TAPPED,
ME_GRAVATAR_SHOT_NEW,
ME_GRAVATAR_GALLERY_PICKED,
ME_GRAVATAR_CROPPED,
ME_GRAVATAR_UPLOADED,
ME_GRAVATAR_UPLOAD_UNSUCCESSFUL,
ME_GRAVATAR_UPLOAD_EXCEPTION,
MY_SITE_ACCESSED,
MY_SITE_ICON_TAPPED,
MY_SITE_ICON_REMOVED,
MY_SITE_ICON_SHOT_NEW,
MY_SITE_ICON_GALLERY_PICKED,
MY_SITE_ICON_CROPPED,
MY_SITE_ICON_UPLOADED,
MY_SITE_ICON_UPLOAD_UNSUCCESSFUL,
NOTIFICATIONS_DISABLED,
NOTIFICATIONS_ENABLED,
NOTIFICATIONS_ACCESSED,
NOTIFICATIONS_OPENED_NOTIFICATION_DETAILS,
NOTIFICATIONS_MISSING_SYNC_WARNING,
NOTIFICATION_REPLIED_TO,
NOTIFICATION_QUICK_ACTIONS_REPLIED_TO,
NOTIFICATION_APPROVED,
NOTIFICATION_QUICK_ACTIONS_APPROVED,
NOTIFICATION_UNAPPROVED,
NOTIFICATION_LIKED,
NOTIFICATION_QUICK_ACTIONS_LIKED,
NOTIFICATION_QUICK_ACTIONS_QUICKACTION_TOUCHED,
NOTIFICATION_UNLIKED,
NOTIFICATION_TRASHED,
NOTIFICATION_FLAGGED_AS_SPAM,
NOTIFICATION_SWIPE_PAGE_CHANGED,
NOTIFICATION_PENDING_DRAFTS_TAPPED,
NOTIFICATION_PENDING_DRAFTS_IGNORED,
NOTIFICATION_PENDING_DRAFTS_DISMISSED,
NOTIFICATION_PENDING_DRAFTS_SETTINGS_ENABLED,
NOTIFICATION_PENDING_DRAFTS_SETTINGS_DISABLED,
NOTIFICATION_UPLOAD_MEDIA_SUCCESS_WRITE_POST,
NOTIFICATION_UPLOAD_POST_ERROR_RETRY,
NOTIFICATION_UPLOAD_MEDIA_ERROR_RETRY,
NOTIFICATION_RECEIVED_PROCESSING_START,
NOTIFICATION_RECEIVED_PROCESSING_END,
NOTIFICATION_SHOWN,
NOTIFICATION_TAPPED,
NOTIFICATION_DISMISSED,
OPENED_POSTS,
OPENED_PAGES,
OPENED_PAGE_PARENT,
OPENED_COMMENTS,
OPENED_VIEW_SITE,
OPENED_VIEW_SITE_FROM_HEADER,
OPENED_VIEW_ADMIN,
OPENED_MEDIA_LIBRARY,
OPENED_BLOG_SETTINGS,
OPENED_ACCOUNT_SETTINGS,
ACCOUNT_SETTINGS_CHANGE_USERNAME_SUCCEEDED,
ACCOUNT_SETTINGS_CHANGE_USERNAME_FAILED,
ACCOUNT_SETTINGS_CHANGE_USERNAME_SUGGESTIONS_FAILED,
OPENED_APP_SETTINGS,
OPENED_MY_PROFILE,
OPENED_PEOPLE_MANAGEMENT,
OPENED_PERSON,
OPENED_PLUGIN_DIRECTORY,
OPENED_PLANS,
OPENED_PLANS_COMPARISON,
OPENED_SHARING_MANAGEMENT,
OPENED_SHARING_BUTTON_MANAGEMENT,
ACTIVITY_LOG_LIST_OPENED,
ACTIVITY_LOG_DETAIL_OPENED,
ACTIVITY_LOG_REWIND_STARTED,
OPENED_PLUGIN_LIST,
OPENED_PLUGIN_DETAIL,
CREATE_ACCOUNT_INITIATED,
CREATE_ACCOUNT_EMAIL_EXISTS,
CREATE_ACCOUNT_USERNAME_EXISTS,
CREATE_ACCOUNT_FAILED,
// This stat is part of a funnel that provides critical information. Before
// making ANY modification to this stat please refer to: p4qSXL-35X-p2
CREATED_ACCOUNT,
ACCOUNT_LOGOUT,
SHARED_ITEM,
SHARED_ITEM_READER,
ADDED_SELF_HOSTED_SITE,
SIGNED_IN,
SIGNED_INTO_JETPACK,
INSTALL_JETPACK_SELECTED,
INSTALL_JETPACK_CANCELLED,
INSTALL_JETPACK_COMPLETED,
INSTALL_JETPACK_REMOTE_START,
INSTALL_JETPACK_REMOTE_COMPLETED,
INSTALL_JETPACK_REMOTE_FAILED,
INSTALL_JETPACK_REMOTE_CONNECT,
INSTALL_JETPACK_REMOTE_LOGIN,
INSTALL_JETPACK_REMOTE_RESTART,
INSTALL_JETPACK_REMOTE_START_MANUAL_FLOW,
INSTALL_JETPACK_REMOTE_ALREADY_INSTALLED,
CONNECT_JETPACK_SELECTED,
CONNECT_JETPACK_FAILED,
PUSH_NOTIFICATION_RECEIVED,
PUSH_NOTIFICATION_TAPPED, // Same of opened
UNIFIED_LOGIN_STEP,
UNIFIED_LOGIN_INTERACTION,
UNIFIED_LOGIN_FAILURE,
LOGIN_ACCESSED,
LOGIN_MAGIC_LINK_EXITED,
LOGIN_MAGIC_LINK_FAILED,
LOGIN_MAGIC_LINK_OPENED,
LOGIN_MAGIC_LINK_REQUESTED,
LOGIN_MAGIC_LINK_SUCCEEDED,
LOGIN_FAILED,
LOGIN_FAILED_TO_GUESS_XMLRPC,
LOGIN_INSERTED_INVALID_URL,
LOGIN_AUTOFILL_CREDENTIALS_FILLED,
LOGIN_AUTOFILL_CREDENTIALS_UPDATED,
LOGIN_PROLOGUE_PAGED,
LOGIN_PROLOGUE_PAGED_JETPACK,
LOGIN_PROLOGUE_PAGED_NOTIFICATIONS,
LOGIN_PROLOGUE_PAGED_POST,
LOGIN_PROLOGUE_PAGED_READER,
LOGIN_PROLOGUE_PAGED_STATS,
LOGIN_PROLOGUE_VIEWED,
LOGIN_EMAIL_FORM_VIEWED,
LOGIN_MAGIC_LINK_OPEN_EMAIL_CLIENT_VIEWED,
LOGIN_MAGIC_LINK_OPEN_EMAIL_CLIENT_CLICKED,
LOGIN_MAGIC_LINK_REQUEST_FORM_VIEWED,
LOGIN_PASSWORD_FORM_VIEWED,
LOGIN_URL_FORM_VIEWED,
LOGIN_URL_HELP_SCREEN_VIEWED,
LOGIN_CONNECTED_SITE_INFO_REQUESTED,
LOGIN_CONNECTED_SITE_INFO_FAILED,
LOGIN_CONNECTED_SITE_INFO_SUCCEEDED,
LOGIN_USERNAME_PASSWORD_FORM_VIEWED,
LOGIN_TWO_FACTOR_FORM_VIEWED,
LOGIN_EPILOGUE_VIEWED,
LOGIN_FORGOT_PASSWORD_CLICKED,
LOGIN_SOCIAL_BUTTON_CLICK,
LOGIN_SOCIAL_BUTTON_FAILURE,
LOGIN_SOCIAL_CONNECT_SUCCESS,
LOGIN_SOCIAL_CONNECT_FAILURE,
LOGIN_SOCIAL_SUCCESS,
LOGIN_SOCIAL_FAILURE,
LOGIN_SOCIAL_2FA_NEEDED,
LOGIN_SOCIAL_ACCOUNTS_NEED_CONNECTING,
LOGIN_SOCIAL_ERROR_UNKNOWN_USER,
LOGIN_WPCOM_BACKGROUND_SERVICE_UPDATE,
PAGES_SET_PARENT_CHANGES_SAVED,
PAGES_ADD_PAGE,
PAGES_TAB_PRESSED,
PAGES_OPTIONS_PRESSED,
PAGES_SEARCH_ACCESSED,
// This stat is part of a funnel that provides critical information. Before
// making ANY modification to this stat please refer to: p4qSXL-35X-p2
SIGNUP_BUTTON_TAPPED,
SIGNUP_EMAIL_BUTTON_TAPPED,
SIGNUP_EMAIL_EPILOGUE_GRAVATAR_CROPPED,
SIGNUP_EMAIL_EPILOGUE_GRAVATAR_GALLERY_PICKED,
SIGNUP_EMAIL_EPILOGUE_GRAVATAR_SHOT_NEW,
SIGNUP_EMAIL_EPILOGUE_UNCHANGED,
SIGNUP_EMAIL_EPILOGUE_UPDATE_DISPLAY_NAME_FAILED,
SIGNUP_EMAIL_EPILOGUE_UPDATE_DISPLAY_NAME_SUCCEEDED,
SIGNUP_EMAIL_EPILOGUE_UPDATE_USERNAME_FAILED,
SIGNUP_EMAIL_EPILOGUE_UPDATE_USERNAME_SUCCEEDED,
SIGNUP_EMAIL_EPILOGUE_USERNAME_SUGGESTIONS_FAILED,
SIGNUP_EMAIL_EPILOGUE_USERNAME_TAPPED,
SIGNUP_EMAIL_EPILOGUE_VIEWED,
SIGNUP_SOCIAL_BUTTON_TAPPED,
SIGNUP_TERMS_OF_SERVICE_TAPPED,
SIGNUP_CANCELED,
SIGNUP_EMAIL_TO_LOGIN,
SIGNUP_MAGIC_LINK_FAILED,
SIGNUP_MAGIC_LINK_OPENED,
SIGNUP_MAGIC_LINK_OPEN_EMAIL_CLIENT_CLICKED,
SIGNUP_MAGIC_LINK_SENT,
SIGNUP_MAGIC_LINK_SUCCEEDED,
SIGNUP_SOCIAL_ACCOUNTS_NEED_CONNECTING,
SIGNUP_SOCIAL_BUTTON_FAILURE,
SIGNUP_SOCIAL_EPILOGUE_UNCHANGED,
SIGNUP_SOCIAL_EPILOGUE_UPDATE_DISPLAY_NAME_FAILED,
SIGNUP_SOCIAL_EPILOGUE_UPDATE_DISPLAY_NAME_SUCCEEDED,
SIGNUP_SOCIAL_EPILOGUE_UPDATE_USERNAME_FAILED,
SIGNUP_SOCIAL_EPILOGUE_UPDATE_USERNAME_SUCCEEDED,
SIGNUP_SOCIAL_EPILOGUE_USERNAME_SUGGESTIONS_FAILED,
SIGNUP_SOCIAL_EPILOGUE_USERNAME_TAPPED,
SIGNUP_SOCIAL_EPILOGUE_VIEWED,
SIGNUP_SOCIAL_SUCCESS,
SIGNUP_SOCIAL_TO_LOGIN,
ENHANCED_SITE_CREATION_ACCESSED,
ENHANCED_SITE_CREATION_SEGMENTS_VIEWED,
ENHANCED_SITE_CREATION_SEGMENTS_SELECTED,
ENHANCED_SITE_CREATION_DOMAINS_ACCESSED,
ENHANCED_SITE_CREATION_DOMAINS_SELECTED,
ENHANCED_SITE_CREATION_SUCCESS_LOADING,
ENHANCED_SITE_CREATION_SUCCESS_PREVIEW_VIEWED,
ENHANCED_SITE_CREATION_SUCCESS_PREVIEW_LOADED,
ENHANCED_SITE_CREATION_PREVIEW_OK_BUTTON_TAPPED,
ENHANCED_SITE_CREATION_EXITED,
ENHANCED_SITE_CREATION_ERROR_SHOWN,
ENHANCED_SITE_CREATION_BACKGROUND_SERVICE_UPDATED,
// This stat is part of a funnel that provides critical information. Before
// making ANY modification to this stat please refer to: p4qSXL-35X-p2
SITE_CREATED,
MEDIA_LIBRARY_ADDED_PHOTO,
MEDIA_LIBRARY_ADDED_VIDEO,
PERSON_REMOVED,
PERSON_UPDATED,
PUSH_AUTHENTICATION_APPROVED,
PUSH_AUTHENTICATION_EXPIRED,
PUSH_AUTHENTICATION_FAILED,
PUSH_AUTHENTICATION_IGNORED,
NOTIFICATION_SETTINGS_LIST_OPENED,
NOTIFICATION_SETTINGS_STREAMS_OPENED,
NOTIFICATION_SETTINGS_DETAILS_OPENED,
NOTIFICATION_SETTINGS_APP_NOTIFICATIONS_DISABLED,
NOTIFICATION_SETTINGS_APP_NOTIFICATIONS_ENABLED,
NOTIFICATION_TAPPED_SEGMENTED_CONTROL,
THEMES_ACCESSED_THEMES_BROWSER,
THEMES_ACCESSED_SEARCH,
THEMES_CHANGED_THEME,
THEMES_PREVIEWED_SITE,
THEMES_DEMO_ACCESSED,
THEMES_CUSTOMIZE_ACCESSED,
THEMES_SUPPORT_ACCESSED,
THEMES_DETAILS_ACCESSED,
ACCOUNT_SETTINGS_LANGUAGE_CHANGED,
SITE_SETTINGS_ACCESSED,
SITE_SETTINGS_ACCESSED_MORE_SETTINGS,
SITE_SETTINGS_LEARN_MORE_CLICKED,
SITE_SETTINGS_LEARN_MORE_LOADED,
SITE_SETTINGS_ADDED_LIST_ITEM,
SITE_SETTINGS_DELETED_LIST_ITEMS,
SITE_SETTINGS_SAVED_REMOTELY,
SITE_SETTINGS_HINT_TOAST_SHOWN,
SITE_SETTINGS_START_OVER_ACCESSED,
SITE_SETTINGS_START_OVER_CONTACT_SUPPORT_CLICKED,
SITE_SETTINGS_EXPORT_SITE_ACCESSED,
SITE_SETTINGS_EXPORT_SITE_REQUESTED,
SITE_SETTINGS_EXPORT_SITE_RESPONSE_OK,
SITE_SETTINGS_EXPORT_SITE_RESPONSE_ERROR,
SITE_SETTINGS_DELETE_SITE_ACCESSED,
SITE_SETTINGS_DELETE_SITE_PURCHASES_REQUESTED,
SITE_SETTINGS_DELETE_SITE_PURCHASES_SHOWN,
SITE_SETTINGS_DELETE_SITE_PURCHASES_SHOW_CLICKED,
SITE_SETTINGS_DELETE_SITE_REQUESTED,
SITE_SETTINGS_DELETE_SITE_RESPONSE_OK,
SITE_SETTINGS_DELETE_SITE_RESPONSE_ERROR,
SITE_SETTINGS_OPTIMIZE_IMAGES_CHANGED,
ABTEST_START,
FEATURE_FLAG_SET,
EXPERIMENT_VARIANT_SET,
TRAIN_TRACKS_RENDER,
TRAIN_TRACKS_INTERACT,
DEEP_LINKED,
DEEP_LINKED_FALLBACK,
DEEP_LINK_NOT_DEFAULT_HANDLER,
MEDIA_UPLOAD_STARTED,
MEDIA_UPLOAD_ERROR,
MEDIA_UPLOAD_SUCCESS,
MEDIA_UPLOAD_CANCELED,
APP_PERMISSION_GRANTED,
APP_PERMISSION_DENIED,
SHARE_TO_WP_SUCCEEDED,
PLUGIN_ACTIVATED,
PLUGIN_AUTOUPDATE_ENABLED,
PLUGIN_AUTOUPDATE_DISABLED,
PLUGIN_DEACTIVATED,
PLUGIN_INSTALLED,
PLUGIN_REMOVED,
PLUGIN_SEARCH_PERFORMED,
PLUGIN_UPDATED,
STOCK_MEDIA_ACCESSED,
STOCK_MEDIA_SEARCHED,
STOCK_MEDIA_UPLOADED,
GIF_PICKER_SEARCHED,
GIF_PICKER_ACCESSED,
GIF_PICKER_DOWNLOADED,
SHORTCUT_STATS_CLICKED,
SHORTCUT_NOTIFICATIONS_CLICKED,
SHORTCUT_NEW_POST_CLICKED,
AUTOMATED_TRANSFER_CONFIRM_DIALOG_SHOWN,
AUTOMATED_TRANSFER_CONFIRM_DIALOG_CANCELLED,
AUTOMATED_TRANSFER_CHECK_ELIGIBILITY,
AUTOMATED_TRANSFER_NOT_ELIGIBLE,
AUTOMATED_TRANSFER_INITIATE,
AUTOMATED_TRANSFER_INITIATED,
AUTOMATED_TRANSFER_INITIATION_FAILED,
AUTOMATED_TRANSFER_STATUS_COMPLETE,
AUTOMATED_TRANSFER_STATUS_FAILED,
AUTOMATED_TRANSFER_FLOW_COMPLETE,
AUTOMATED_TRANSFER_CUSTOM_DOMAIN_PURCHASED,
AUTOMATED_TRANSFER_CUSTOM_DOMAIN_PURCHASE_FAILED,
PUBLICIZE_SERVICE_CONNECTED,
PUBLICIZE_SERVICE_DISCONNECTED,
SUPPORT_OPENED,
SUPPORT_HELP_CENTER_VIEWED,
SUPPORT_NEW_REQUEST_VIEWED,
SUPPORT_TICKET_LIST_VIEWED,
SUPPORT_IDENTITY_FORM_VIEWED,
SUPPORT_IDENTITY_SET,
QUICK_START_TASK_DIALOG_VIEWED,
QUICK_START_TASK_DIALOG_NEGATIVE_TAPPED,
QUICK_START_TASK_DIALOG_POSITIVE_TAPPED,
QUICK_START_MIGRATION_DIALOG_VIEWED,
QUICK_START_MIGRATION_DIALOG_POSITIVE_TAPPED,
QUICK_START_REMOVE_DIALOG_NEGATIVE_TAPPED,
QUICK_START_REMOVE_DIALOG_POSITIVE_TAPPED,
QUICK_START_TYPE_CUSTOMIZE_VIEWED,
QUICK_START_TYPE_GROW_VIEWED,
QUICK_START_TYPE_CUSTOMIZE_DISMISSED,
QUICK_START_TYPE_GROW_DISMISSED,
QUICK_START_LIST_CUSTOMIZE_COLLAPSED,
QUICK_START_LIST_GROW_COLLAPSED,
QUICK_START_LIST_CUSTOMIZE_EXPANDED,
QUICK_START_LIST_GROW_EXPANDED,
QUICK_START_LIST_CREATE_SITE_SKIPPED,
QUICK_START_LIST_UPDATE_SITE_TITLE_SKIPPED,
QUICK_START_LIST_VIEW_SITE_SKIPPED,
QUICK_START_LIST_BROWSE_THEMES_SKIPPED,
QUICK_START_LIST_CUSTOMIZE_SITE_SKIPPED,
QUICK_START_LIST_ADD_SOCIAL_SKIPPED,
QUICK_START_LIST_PUBLISH_POST_SKIPPED,
QUICK_START_LIST_FOLLOW_SITE_SKIPPED,
QUICK_START_LIST_UPLOAD_ICON_SKIPPED,
QUICK_START_LIST_CREATE_PAGE_SKIPPED,
QUICK_START_LIST_CHECK_STATS_SKIPPED,
QUICK_START_LIST_EXPLORE_PLANS_SKIPPED,
QUICK_START_LIST_CREATE_SITE_TAPPED,
QUICK_START_LIST_UPDATE_SITE_TITLE_TAPPED,
QUICK_START_LIST_VIEW_SITE_TAPPED,
QUICK_START_LIST_BROWSE_THEMES_TAPPED,
QUICK_START_LIST_CUSTOMIZE_SITE_TAPPED,
QUICK_START_LIST_ADD_SOCIAL_TAPPED,
QUICK_START_LIST_PUBLISH_POST_TAPPED,
QUICK_START_LIST_FOLLOW_SITE_TAPPED,
QUICK_START_LIST_UPLOAD_ICON_TAPPED,
QUICK_START_LIST_CREATE_PAGE_TAPPED,
QUICK_START_LIST_CHECK_STATS_TAPPED,
QUICK_START_LIST_EXPLORE_PLANS_TAPPED,
QUICK_START_CREATE_SITE_TASK_COMPLETED,
QUICK_START_UPDATE_SITE_TITLE_COMPLETED,
QUICK_START_VIEW_SITE_TASK_COMPLETED,
QUICK_START_BROWSE_THEMES_TASK_COMPLETED,
QUICK_START_CUSTOMIZE_SITE_TASK_COMPLETED,
QUICK_START_SHARE_SITE_TASK_COMPLETED,
QUICK_START_PUBLISH_POST_TASK_COMPLETED,
QUICK_START_FOLLOW_SITE_TASK_COMPLETED,
QUICK_START_UPLOAD_ICON_COMPLETED,
QUICK_START_CREATE_PAGE_COMPLETED,
QUICK_START_CHECK_STATS_COMPLETED,
QUICK_START_EXPLORE_PLANS_COMPLETED,
QUICK_START_ALL_TASKS_COMPLETED,
QUICK_START_REQUEST_VIEWED,
QUICK_START_REQUEST_DIALOG_NEGATIVE_TAPPED,
QUICK_START_REQUEST_DIALOG_POSITIVE_TAPPED,
QUICK_START_REQUEST_DIALOG_NEUTRAL_TAPPED,
QUICK_START_NOTIFICATION_DISMISSED,
QUICK_START_NOTIFICATION_SENT,
QUICK_START_NOTIFICATION_TAPPED,
INSTALLATION_REFERRER_OBTAINED,
INSTALLATION_REFERRER_FAILED,
GUTENBERG_WARNING_CONFIRM_DIALOG_SHOWN,
GUTENBERG_WARNING_CONFIRM_DIALOG_YES_TAPPED,
GUTENBERG_WARNING_CONFIRM_DIALOG_CANCEL_TAPPED,
GUTENBERG_WARNING_CONFIRM_DIALOG_DONT_SHOW_AGAIN_CHECKED,
GUTENBERG_WARNING_CONFIRM_DIALOG_DONT_SHOW_AGAIN_UNCHECKED,
GUTENBERG_WARNING_CONFIRM_DIALOG_LEARN_MORE_TAPPED,
APP_REVIEWS_SAW_PROMPT,
APP_REVIEWS_CANCELLED_PROMPT,
APP_REVIEWS_RATED_APP,
APP_REVIEWS_DECLINED_TO_RATE_APP,
APP_REVIEWS_DECIDED_TO_RATE_LATER,
APP_REVIEWS_EVENT_INCREMENTED_BY_UPLOADING_MEDIA,
APP_REVIEWS_EVENT_INCREMENTED_BY_CHECKING_NOTIFICATION,
APP_REVIEWS_EVENT_INCREMENTED_BY_PUBLISHING_POST_OR_PAGE,
APP_REVIEWS_EVENT_INCREMENTED_BY_OPENING_READER_POST,
DOMAIN_CREDIT_PROMPT_SHOWN,
DOMAIN_CREDIT_REDEMPTION_TAPPED,
DOMAIN_CREDIT_REDEMPTION_SUCCESS,
DOMAIN_CREDIT_SUGGESTION_QUERIED,
DOMAIN_CREDIT_NAME_SELECTED,
QUICK_ACTION_STATS_TAPPED,
QUICK_ACTION_PAGES_TAPPED,
QUICK_ACTION_POSTS_TAPPED,
QUICK_ACTION_MEDIA_TAPPED,
AUTO_UPLOAD_POST_INVOKED,
AUTO_UPLOAD_PAGE_INVOKED,
UNPUBLISHED_REVISION_DIALOG_SHOWN,
UNPUBLISHED_REVISION_DIALOG_LOAD_LOCAL_VERSION_CLICKED,
UNPUBLISHED_REVISION_DIALOG_LOAD_UNPUBLISHED_VERSION_CLICKED,
WELCOME_NO_SITES_INTERSTITIAL_SHOWN,
WELCOME_NO_SITES_INTERSTITIAL_CREATE_NEW_SITE_TAPPED,
WELCOME_NO_SITES_INTERSTITIAL_ADD_SELF_HOSTED_SITE_TAPPED,
WELCOME_NO_SITES_INTERSTITIAL_DISMISSED,
FEATURED_IMAGE_SET_CLICKED_POST_SETTINGS,
FEATURED_IMAGE_PICKED_POST_SETTINGS,
FEATURED_IMAGE_UPLOAD_CANCELED_POST_SETTINGS,
FEATURED_IMAGE_UPLOAD_RETRY_CLICKED_POST_SETTINGS,
FEATURED_IMAGE_REMOVE_CLICKED_POST_SETTINGS,
MEDIA_EDITOR_SHOWN,
MEDIA_EDITOR_USED,
STORY_SAVE_SUCCESSFUL,
STORY_SAVE_ERROR,
STORY_POST_SAVE_LOCALLY,
STORY_POST_SAVE_REMOTELY,
STORY_SAVE_ERROR_SNACKBAR_MANAGE_TAPPED,
STORY_POST_PUBLISH_TAPPED,
PREPUBLISHING_BOTTOM_SHEET_OPENED,
PREPUBLISHING_BOTTOM_SHEET_DISMISSED,
FEATURE_ANNOUNCEMENT_SHOWN_ON_APP_UPGRADE,
FEATURE_ANNOUNCEMENT_SHOWN_FROM_APP_SETTINGS,
FEATURE_ANNOUNCEMENT_FIND_OUT_MORE_TAPPED,
FEATURE_ANNOUNCEMENT_CLOSE_DIALOG_BUTTON_TAPPED,
PAGES_LIST_AUTHOR_FILTER_CHANGED,
EDITOR_GUTENBERG_UNSUPPORTED_BLOCK_WEBVIEW_SHOWN,
EDITOR_GUTENBERG_UNSUPPORTED_BLOCK_WEBVIEW_CLOSED,
SELECT_INTERESTS_SHOWN,
SELECT_INTERESTS_PICKED,
READER_FOLLOWING_SHOWN,
READER_LIKED_SHOWN,
READER_SAVED_LIST_SHOWN,
READER_CUSTOM_TAB_SHOWN,
READER_DISCOVER_SHOWN,
READER_DISCOVER_PAGINATED,
READER_DISCOVER_TOPIC_TAPPED,
READER_POST_CARD_TAPPED,
READER_PULL_TO_REFRESH,
POST_CARD_MORE_TAPPED,
READER_CHIPS_MORE_TOGGLED,
ENCRYPTED_LOGGING_UPLOAD_SUCCESSFUL,
ENCRYPTED_LOGGING_UPLOAD_FAILED,
READER_POST_REPORTED,
}
private static final List<Tracker> TRACKERS = new ArrayList<>();
private AnalyticsTracker() {
}
public static void init(Context context) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
boolean hasUserOptedOut = !prefs.getBoolean("wp_pref_send_usage_stats", true);
if (hasUserOptedOut != mHasUserOptedOut) {
mHasUserOptedOut = hasUserOptedOut;
}
}
public static void setHasUserOptedOut(boolean hasUserOptedOut) {
if (hasUserOptedOut != mHasUserOptedOut) {
mHasUserOptedOut = hasUserOptedOut;
}
}
public static void registerTracker(Tracker tracker) {
if (tracker != null) {
TRACKERS.add(tracker);
}
}
public static void track(Stat stat) {
if (mHasUserOptedOut) {
return;
}
for (Tracker tracker : TRACKERS) {
tracker.track(stat);
}
}
public static void track(Stat stat, Map<String, ?> properties) {
if (mHasUserOptedOut) {
return;
}
for (Tracker tracker : TRACKERS) {
tracker.track(stat, properties);
}
}
/**
* A convenience method for logging an error event with some additional meta data.
* @param stat The stat to track.
* @param errorContext A string providing additional context (if any) about the error.
* @param errorType The type of error.
* @param errorDescription The error text or other description.
*/
public static void track(Stat stat, String errorContext, String errorType, String errorDescription) {
Map<String, String> props = new HashMap<>();
props.put("error_context", errorContext);
props.put("error_type", errorType);
props.put("error_description", errorDescription);
track(stat, props);
}
public static void flush() {
if (mHasUserOptedOut) {
return;
}
for (Tracker tracker : TRACKERS) {
tracker.flush();
}
}
public static void endSession(boolean force) {
if (mHasUserOptedOut && !force) {
return;
}
for (Tracker tracker : TRACKERS) {
tracker.endSession();
}
}
public static void clearAllData() {
for (Tracker tracker : TRACKERS) {
tracker.clearAllData();
}
}
public static void refreshMetadata(AnalyticsMetadata metadata) {
for (Tracker tracker : TRACKERS) {
tracker.refreshMetadata(metadata);
}
}
}
| |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1;
import com.google.api.core.BetaApi;
import com.google.api.gax.httpjson.ApiMessage;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import javax.annotation.Generated;
import javax.annotation.Nullable;
@Generated("by GAPIC")
@BetaApi
public final class TargetPoolInstanceHealth implements ApiMessage {
private final List<HealthStatus> healthStatus;
private final String kind;
private TargetPoolInstanceHealth() {
this.healthStatus = null;
this.kind = null;
}
private TargetPoolInstanceHealth(List<HealthStatus> healthStatus, String kind) {
this.healthStatus = healthStatus;
this.kind = kind;
}
@Override
public Object getFieldValue(String fieldName) {
if ("healthStatus".equals(fieldName)) {
return healthStatus;
}
if ("kind".equals(fieldName)) {
return kind;
}
return null;
}
@Nullable
@Override
public ApiMessage getApiMessageRequestBody() {
return null;
}
@Nullable
@Override
/**
* The fields that should be serialized (even if they have empty values). If the containing
* message object has a non-null fieldmask, then all the fields in the field mask (and only those
* fields in the field mask) will be serialized. If the containing object does not have a
* fieldmask, then only non-empty fields will be serialized.
*/
public List<String> getFieldMask() {
return null;
}
public List<HealthStatus> getHealthStatusList() {
return healthStatus;
}
/**
* [Output Only] Type of resource. Always compute#targetPoolInstanceHealth when checking the
* health of an instance.
*/
public String getKind() {
return kind;
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(TargetPoolInstanceHealth prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
public static TargetPoolInstanceHealth getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final TargetPoolInstanceHealth DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new TargetPoolInstanceHealth();
}
public static class Builder {
private List<HealthStatus> healthStatus;
private String kind;
Builder() {}
public Builder mergeFrom(TargetPoolInstanceHealth other) {
if (other == TargetPoolInstanceHealth.getDefaultInstance()) return this;
if (other.getHealthStatusList() != null) {
this.healthStatus = other.healthStatus;
}
if (other.getKind() != null) {
this.kind = other.kind;
}
return this;
}
Builder(TargetPoolInstanceHealth source) {
this.healthStatus = source.healthStatus;
this.kind = source.kind;
}
public List<HealthStatus> getHealthStatusList() {
return healthStatus;
}
public Builder addAllHealthStatus(List<HealthStatus> healthStatus) {
if (this.healthStatus == null) {
this.healthStatus = new LinkedList<>();
}
this.healthStatus.addAll(healthStatus);
return this;
}
public Builder addHealthStatus(HealthStatus healthStatus) {
if (this.healthStatus == null) {
this.healthStatus = new LinkedList<>();
}
this.healthStatus.add(healthStatus);
return this;
}
/**
* [Output Only] Type of resource. Always compute#targetPoolInstanceHealth when checking the
* health of an instance.
*/
public String getKind() {
return kind;
}
/**
* [Output Only] Type of resource. Always compute#targetPoolInstanceHealth when checking the
* health of an instance.
*/
public Builder setKind(String kind) {
this.kind = kind;
return this;
}
public TargetPoolInstanceHealth build() {
return new TargetPoolInstanceHealth(healthStatus, kind);
}
public Builder clone() {
Builder newBuilder = new Builder();
newBuilder.addAllHealthStatus(this.healthStatus);
newBuilder.setKind(this.kind);
return newBuilder;
}
}
@Override
public String toString() {
return "TargetPoolInstanceHealth{"
+ "healthStatus="
+ healthStatus
+ ", "
+ "kind="
+ kind
+ "}";
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof TargetPoolInstanceHealth) {
TargetPoolInstanceHealth that = (TargetPoolInstanceHealth) o;
return Objects.equals(this.healthStatus, that.getHealthStatusList())
&& Objects.equals(this.kind, that.getKind());
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(healthStatus, kind);
}
}
| |
package us.kbase.typedobj.test;
import static org.junit.Assert.*;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLDecoder;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import org.apache.commons.io.FileUtils;
import org.junit.AfterClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import com.fasterxml.jackson.core.JsonParser.Feature;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import us.kbase.common.test.TestCommon;
import us.kbase.common.test.TestException;
import us.kbase.typedobj.core.LocalTypeProvider;
import us.kbase.typedobj.core.TypeDefId;
import us.kbase.typedobj.core.TypeDefName;
import us.kbase.typedobj.core.ValidatedTypedObject;
import us.kbase.typedobj.core.TypedObjectValidator;
import us.kbase.typedobj.db.FileTypeStorage;
import us.kbase.typedobj.db.TypeDefinitionDB;
import us.kbase.typedobj.idref.IdReferenceHandlerSet;
import us.kbase.typedobj.idref.IdReferenceHandlerSetFactory;
import us.kbase.typedobj.idref.IdReferenceHandlerSetFactoryBuilder;
/**
* Detailed validation tests. Tests specify the instance data, the expected result (pass/error),
* the error type (exception/invalid), and the list of error messages or exception name.
*
* @author msneddon
*
*/
@RunWith(value = Parameterized.class)
public class DetailedValidationTest {
private static final int EXPECTED_TESTS = 61;
/**
* location to stash the temporary database for testing
* WARNING: THIS DIRECTORY WILL BE WIPED OUT AFTER TESTS!!!
*/
private final static Path TEST_DB_LOCATION =
Paths.get(TestCommon.getTempDir())
.resolve("DetailedValidationTest");
private final static String TEST_RESOURCE_LOCATION = "files/DetailedValidation/";
private static TypeDefinitionDB db;
private static TypedObjectValidator validator;
private final static boolean VERBOSE = true;
private final static List<String> KB_TYPES =
Arrays.asList("StringField", "IntField", "FloatField", "ListField", "MappingField", "TupleField", "StructureField");
private static List<TestInstanceInfo> resources = new ArrayList <TestInstanceInfo> ();
/**
* structures to store info on each instance we wish to validate
*/
private static class TestInstanceInfo {
public TestInstanceInfo(String resourceName) {
this.resourceName = resourceName;
}
public String resourceName;
}
/**
* As each test instance object is created, this sets which instance to actually test
*/
private int instanceNumber;
public DetailedValidationTest(Integer instanceNumber) {
this.instanceNumber = instanceNumber.intValue();
}
/**
* This is invoked before anything else, so here we invoke the creation of the db
* @return
* @throws Exception
*/
@Parameters
public static Collection<Object[]> assembleTestInstanceList() throws Exception {
prepareDb();
Object [][] instanceInfo = new Object[resources.size()][1];
for(int k=0; k<resources.size(); k++) {
instanceInfo[k][0] = k;
}
return Arrays.asList(instanceInfo);
}
/**
* Setup the typedef database, load and release the types in the simple specs, and
* identify all the files containing instances to validate.
* @throws Exception
*/
public static void prepareDb() throws Exception
{
//ensure test location is available
File dir = TEST_DB_LOCATION.toFile();
if (dir.exists()) {
//fail("database at location: "+TEST_DB_LOCATION+" already exists, remove/rename this directory first");
removeDb();
}
Files.createDirectories(TEST_DB_LOCATION);
if(VERBOSE) System.out.println("setting up the typed obj database");
// point the type definition db to point there
db = new TypeDefinitionDB(
new FileTypeStorage(TEST_DB_LOCATION.toString()));
// create a validator that uses the type def db
validator = new TypedObjectValidator(new LocalTypeProvider(db));
if(VERBOSE) System.out.println("loading db with types");
String username = "wstester1";
String kbSpec = loadResourceFile(TEST_RESOURCE_LOCATION+"KB.spec");
db.requestModuleRegistration("KB", username);
db.approveModuleRegistrationRequest(username, "KB", true);
db.registerModule(kbSpec ,KB_TYPES, username);
db.releaseModule("KB", username, false);
if(VERBOSE) System.out.print("finding test instances: ");
String [] resourcesArray = getResourceListing(TEST_RESOURCE_LOCATION);
Arrays.sort(resourcesArray);
for(int k=0; k<resourcesArray.length; k++) {
String [] tokens = resourcesArray[k].split("\\.");
if(tokens[0].equals("instance")) {
resources.add(new TestInstanceInfo(resourcesArray[k]));
}
}
if (resources.size() != EXPECTED_TESTS) {
System.out.println("Missing test resources - found " +
resources.size() + ", expected " + EXPECTED_TESTS);
throw new TestException("Missing test resources - found " +
resources.size() + ", expected " + EXPECTED_TESTS);
}
if(VERBOSE) System.out.println("found "+resources.size()+" instances.");
}
@AfterClass
public static void removeDb() throws IOException {
File dir = TEST_DB_LOCATION.toFile();
FileUtils.deleteDirectory(dir);
if(VERBOSE) System.out.println("deleting typed obj database");
}
@Test
public void testInstance() {
// load the instance information
TestInstanceInfo resource = resources.get(this.instanceNumber);
String testConditionsString;
try {
testConditionsString = loadResourceFile(TEST_RESOURCE_LOCATION+resource.resourceName);
} catch (Exception e) {
if (VERBOSE) System.out.println(" malformed test case, cannot load '"+resource.resourceName+"'. error was: "+e.getMessage());
throw new RuntimeException("malformed test case, cannot load '"+resource.resourceName+"'. error was: "+e.getMessage());
}
ObjectMapper mapper = new ObjectMapper().configure(Feature.ALLOW_COMMENTS, true);
JsonNode testConditions;
try {
testConditions = mapper.readTree(testConditionsString);
} catch (Exception e) {
if (VERBOSE) System.out.println("malformed test case, cannot parse '"+resource.resourceName+"' as JSON. error was: "+e.getMessage());
throw new RuntimeException("malformed test case, cannot parse '"+resource.resourceName+"' as JSON. error was: "+e.getMessage());
}
// perform some checking on the test condition JSON
String expectedResult = testConditions.get("result").asText();
if(!expectedResult.equals("pass") && !expectedResult.equals("error")) {
if(VERBOSE) System.out.println(" malformed test case, result must be pass/error.");
throw new RuntimeException("malformed test case, result must be pass/error in "+resource.resourceName);
}
String typeName = testConditions.get("type").asText();
String [] typeTokens = typeName.split("\\.");
if(typeTokens.length<2 || typeTokens.length>3) {
if(VERBOSE) System.out.println(" malformed test case, type name could not be parsed from type.");
throw new RuntimeException("malformed test case, type name could not be parsed from type in "+resource.resourceName);
}
JsonNode instance = testConditions.get("instance");
if(instance==null) {
if(VERBOSE) System.out.println(" malformed test case, no instance defined.");
throw new RuntimeException("malformed test case, no instance defined in "+resource.resourceName);
}
if(VERBOSE) System.out.println(" -TEST ("+resource.resourceName+") - instance of '"+typeName+"' expected result: "+expectedResult+".");
// actually perform the test and verify we get what is expected
final IdReferenceHandlerSetFactory fac = IdReferenceHandlerSetFactoryBuilder
.getBuilder(6).build().getFactory(null);
IdReferenceHandlerSet<String> handler = fac.createHandlers(String.class);
handler.associateObject("foo");
try {
ValidatedTypedObject report =
validator.validate(
instance,
new TypeDefId(new TypeDefName(typeTokens[0],typeTokens[1])),
handler
);
// if we expected to pass, we must pass without any error messages
if(expectedResult.equals("pass")) {
StringBuilder errMssgs = new StringBuilder();
List <String> mssgs = report.getErrorMessages();
for(int i=0; i<mssgs.size(); i++) {
errMssgs.append(" ["+i+"]:"+mssgs.get(i));
}
if(VERBOSE) if(!report.isInstanceValid()) System.out.println(" FAILED! Instance should validate, but doesn't. errors were: \n"+errMssgs.toString());
assertTrue(" -("+resource.resourceName+") does not validate, but should. Errors are: \n"+errMssgs.toString(),
report.isInstanceValid());
}
else { //if(expectedResult.equals("error")) {
// if we expected to fail, then we must confirm that we did fail
if(VERBOSE) if(report.isInstanceValid()) System.out.println(" FAILED! Instance passes validation, but should not");
assertFalse(" -("+resource.resourceName+") passes validation, but should not.",
report.isInstanceValid());
// and the error messages must match IN THE CORRECT ORDER!
JsonNode expectedErrorMssgs = testConditions.get("error-mssgs");
if(expectedErrorMssgs == null) {
if(VERBOSE) System.out.println(" FAILED! Instance does not validate as expected, but test case does not specify the expected error messages.");
fail("Instance does not validate as expected, but test case does not specify the expected error messages in "+resource.resourceName);
}
if(!expectedErrorMssgs.isArray()) {
if(VERBOSE) System.out.println(" FAILED! Instance does not validate as expected, but test case does not specify the expected error messages as a list.");
fail("Instance does not validate as expected, but test case does not specify the expected error messages as a list in "+resource.resourceName);
}
List <String> mssgs = report.getErrorMessages();
if(VERBOSE) {
if(mssgs.size()!=expectedErrorMssgs.size()) {
System.out.println(" FAILED! The number of expected error messages does not match! Expected "
+ expectedErrorMssgs.size() + " errors, but got:");
for(int k=0; k<mssgs.size(); k++) {
System.out.println(" ["+k+"]:"+mssgs.get(k));
}
}
}
assertTrue(" -("+resource.resourceName+") The number of expected error messages does not match. Expected "
+ expectedErrorMssgs.size() + " errors, but got "+mssgs.size(),
mssgs.size()==expectedErrorMssgs.size());
for(int i=0; i<mssgs.size(); i++) {
if(VERBOSE) {
if(!mssgs.get(i).equals(expectedErrorMssgs.get(i).asText())) {
System.out.println(" FAILED! Error["+i+"] does not match expected error message.");
System.out.println(" Expected:'"+expectedErrorMssgs.get(i).asText()+"'\n Errors were:");
for(int k=0; k<mssgs.size(); k++) {
System.out.println(" ["+k+"]:"+mssgs.get(k));
}
}
}
assertTrue(" -("+resource.resourceName+") validation provides wrong error message for error "+i+"\n"
+ " Expected: " + expectedErrorMssgs.get(i).asText()
+ " Got: " + mssgs.get(i),
mssgs.get(i).equals(expectedErrorMssgs.get(i).asText()));
}
}
} catch (Exception e) {
// if an exception was thrown, and test should pass, then we fail
if(expectedResult.equals("pass")) {
if(VERBOSE) System.out.println(" FAILED! Instance should validate, but doesn't. Exception was : "+e.getClass().getName() +"\nmssg: "+e.getMessage());
fail(" -("+resource.resourceName+") does not validate, but should. Exception was : "+e.getClass().getName() +"\nmssg: "+e.getMessage());
} else { // if(expectedResult.equals("error"))
// if exception was thrown, we need to confirm that the correct exception class was thrown
if(testConditions.get("exception-class")==null) {
if(VERBOSE) System.out.println(" FAILED! Instance validation throws exception, but test case does not specify that there is an expected exception.\n"
+" Exception was : "+e.getClass().getName() +"\n mssg: "+e.getMessage());
fail("Instance validation throws exception, but test case does not specify that there is an expected exception in "+resource.resourceName);
}
String expectedException = testConditions.get("exception-class").asText();
if(!expectedException.equals(e.getClass().getName())) {
if(VERBOSE) System.out.println(" FAILED! Validation throws wrong exception.\n Expected: '"
+expectedException+"', but was : "+e.getClass().getName() +"\n mssg: "+e.getMessage());
}
assertTrue(" -("+resource.resourceName+") validation throws wrong exception. Expected: '"
+expectedException+"', but was : "+e.getClass().getName() +"\n mssg: "+e.getMessage(),
expectedException.equals(e.getClass().getName()));
}
}
/*if(expectedResult.equals)
String expectedErrorType = testConditions.get("error-type");
"error-type":"invalid",
"error-mssgs":[],*/
if(VERBOSE) System.out.println(" PASS.");
}
/**
* helper method to load test files, mostly copied from TypeRegistering test
*/
private static String loadResourceFile(String resourceName) throws Exception {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
InputStream is = DetailedValidationTest.class.getResourceAsStream(resourceName);
if (is == null)
throw new IllegalStateException("Resource not found: " + resourceName);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
while (true) {
String line = br.readLine();
if (line == null)
break;
pw.println(line);
}
br.close();
pw.close();
return sw.toString();
}
/**
* List directory contents for a resource folder. Not recursive.
* This is basically a brute-force implementation.
* Works for regular files and also JARs.
* adapted from: http://www.uofr.net/~greg/java/get-resource-listing.html
*
* @author Greg Briggs
* @author msneddon
* @param path Should end with "/", but not start with one.
* @return Just the name of each member item, not the full paths.
* @throws URISyntaxException
* @throws IOException
*/
private static String[] getResourceListing(String path) throws URISyntaxException, IOException {
URL dirURL = DetailedValidationTest.class.getResource(path);
if (dirURL != null && dirURL.getProtocol().equals("file")) {
/* A file path: easy enough */
return new File(dirURL.toURI()).list();
}
if (dirURL == null) {
// In case of a jar file, we can't actually find a directory.
// Have to assume the same jar as the class.
String me = DetailedValidationTest.class.getName().replace(".", "/")+".class";
dirURL = DetailedValidationTest.class.getResource(me);
}
if (dirURL.getProtocol().equals("jar")) {
/* A JAR path */
String jarPath = dirURL.getPath().substring(5, dirURL.getPath().indexOf("!")); //strip out only the JAR file
JarFile jar = new JarFile(URLDecoder.decode(jarPath, "UTF-8"));
Enumeration<JarEntry> entries = jar.entries(); //gives ALL entries in jar
Set<String> result = new HashSet<String>(); //avoid duplicates in case it is a subdirectory
while(entries.hasMoreElements()) {
String name = entries.nextElement().getName();
// construct internal jar path relative to the class
String fullPath = DetailedValidationTest.class.getPackage().getName().replace(".","/") + "/" + path;
if (name.startsWith(fullPath)) { //filter according to the path
String entry = name.substring(fullPath.length());
int checkSubdir = entry.indexOf("/");
if (checkSubdir >= 0) {
// if it is a subdirectory, we just return the directory name
entry = entry.substring(0, checkSubdir);
}
result.add(entry);
}
}
jar.close();
return result.toArray(new String[result.size()]);
}
throw new UnsupportedOperationException("Cannot list files for URL "+dirURL);
}
}
| |
// Copyright 2014 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.syntax;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Ordering;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
import com.google.devtools.build.lib.events.Location;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Utilities used by the evaluator.
*/
public final class EvalUtils {
private EvalUtils() {}
/**
* The exception that SKYLARK_COMPARATOR might throw. This is an unchecked exception
* because Comparator doesn't let us declare exceptions. It should normally be caught
* and wrapped in an EvalException.
*/
public static class ComparisonException extends RuntimeException {
public ComparisonException(String msg) {
super(msg);
}
}
/**
* Compare two Skylark objects.
*
* <p> It may throw an unchecked exception ComparisonException that should be wrapped in
* an EvalException.
*/
public static final Comparator<Object> SKYLARK_COMPARATOR = new Comparator<Object>() {
private int compareLists(SkylarkList o1, SkylarkList o2) {
for (int i = 0; i < Math.min(o1.size(), o2.size()); i++) {
int cmp = compare(o1.get(i), o2.get(i));
if (cmp != 0) {
return cmp;
}
}
return Integer.compare(o1.size(), o2.size());
}
@Override
@SuppressWarnings("unchecked")
public int compare(Object o1, Object o2) {
Location loc = null;
try {
o1 = SkylarkType.convertToSkylark(o1, loc);
o2 = SkylarkType.convertToSkylark(o2, loc);
} catch (EvalException e) {
throw new ComparisonException(e.getMessage());
}
if (o1 instanceof SkylarkList && o2 instanceof SkylarkList) {
return compareLists((SkylarkList) o1, (SkylarkList) o2);
}
try {
return ((Comparable<Object>) o1).compareTo(o2);
} catch (ClassCastException e) {
try {
// Different types -> let the class names decide
return o1.getClass().getName().compareTo(o2.getClass().getName());
} catch (NullPointerException ex) {
throw new ComparisonException(
"Cannot compare " + getDataTypeName(o1) + " with " + EvalUtils.getDataTypeName(o2));
}
}
}
};
/**
* @return true if the specified sequence is a tuple; false if it's a modifiable list.
*/
public static boolean isTuple(List<?> l) {
return isTuple(l.getClass());
}
public static boolean isTuple(Class<?> c) {
Preconditions.checkState(List.class.isAssignableFrom(c));
return ImmutableList.class.isAssignableFrom(c);
}
public static boolean isTuple(Object o) {
if (o instanceof SkylarkList) {
return ((SkylarkList) o).isTuple(); // tuples are immutable, lists are not.
}
if (o instanceof List<?>) {
return isTuple(o.getClass());
}
return false;
}
/**
* Checks that an Object is a valid key for a Skylark dict.
* @param o an Object to validate
* @throws an EvalException if o is not a valid key
*/
static void checkValidDictKey(Object o) throws EvalException {
// TODO(bazel-team): check that all recursive elements are both Immutable AND Comparable.
if (isImmutable(o)) {
return;
}
// Same error message as Python (that makes it a TypeError).
throw new EvalException(null, Printer.format("unhashable type: '%r'", o.getClass()));
}
/**
* Is this object known or assumed to be recursively immutable by Skylark?
* @param o an Object
* @return true if the object is known to be an immutable value.
*/
public static boolean isImmutable(Object o) {
if (o instanceof SkylarkValue) {
return ((SkylarkValue) o).isImmutable();
}
if (!(o instanceof List<?>)) {
return isImmutable(o.getClass());
}
if (!isTuple((List<?>) o)) {
return false;
}
for (Object item : (List<?>) o) {
if (!isImmutable(item)) {
return false;
}
}
return true;
}
/**
* Is this class known to be *recursively* immutable by Skylark?
* For instance, class Tuple is not it, because it can contain mutable values.
* @param c a Class
* @return true if the class is known to represent only recursively immutable values.
*/
// NB: This is used as the basis for accepting objects in SkylarkNestedSet-s,
// as well as for accepting objects as keys for Skylark dict-s.
static boolean isImmutable(Class<?> c) {
return c.isAnnotationPresent(Immutable.class) // TODO(bazel-team): beware of containers!
|| c.equals(String.class)
|| c.equals(Integer.class)
|| c.equals(Boolean.class);
}
/**
* Returns true if the type is acceptable to be returned to the Skylark language.
*/
public static boolean isSkylarkAcceptable(Class<?> c) {
return SkylarkValue.class.isAssignableFrom(c) // implements SkylarkValue
|| c.equals(String.class) // basic values
|| c.equals(Integer.class)
|| c.equals(Boolean.class)
|| c.isAnnotationPresent(SkylarkModule.class) // registered Skylark class
|| ImmutableMap.class.isAssignableFrom(c) // will be converted to SkylarkDict
|| NestedSet.class.isAssignableFrom(c) // will be converted to SkylarkNestedSet
|| c.equals(PathFragment.class); // other known class
}
/**
* Returns a transitive superclass or interface implemented by c which is annotated
* with SkylarkModule. Returns null if no such class or interface exists.
*/
@VisibleForTesting
static Class<?> getParentWithSkylarkModule(Class<?> c) {
if (c == null) {
return null;
}
if (c.isAnnotationPresent(SkylarkModule.class)) {
return c;
}
Class<?> parent = getParentWithSkylarkModule(c.getSuperclass());
if (parent != null) {
return parent;
}
for (Class<?> ifparent : c.getInterfaces()) {
ifparent = getParentWithSkylarkModule(ifparent);
if (ifparent != null) {
return ifparent;
}
}
return null;
}
// TODO(bazel-team): move the following few type-related functions to SkylarkType
/**
* Return the Skylark-type of {@code c}
*
* <p>The result will be a type that Skylark understands and is either equal to {@code c}
* or is a supertype of it. For example, all instances of (all subclasses of) SkylarkList
* are considered to be SkylarkLists.
*
* <p>Skylark's type validation isn't equipped to deal with inheritance so we must tell it which
* of the superclasses or interfaces of {@code c} is the one that matters for type compatibility.
*
* @param c a class
* @return a super-class of c to be used in validation-time type inference.
*/
public static Class<?> getSkylarkType(Class<?> c) {
if (ImmutableList.class.isAssignableFrom(c)) {
return ImmutableList.class;
} else if (List.class.isAssignableFrom(c)) {
return List.class;
} else if (SkylarkList.class.isAssignableFrom(c)) {
return SkylarkList.class;
} else if (Map.class.isAssignableFrom(c)) {
return Map.class;
} else if (NestedSet.class.isAssignableFrom(c)) {
// This could be removed probably
return NestedSet.class;
} else if (Set.class.isAssignableFrom(c)) {
return Set.class;
} else {
// TODO(bazel-team): also unify all implementations of ClassObject,
// that we used to all print the same as "struct"?
//
// Check if one of the superclasses or implemented interfaces has the SkylarkModule
// annotation. If yes return that class.
Class<?> parent = getParentWithSkylarkModule(c);
if (parent != null) {
return parent;
}
}
return c;
}
/**
* Returns a pretty name for the datatype of object 'o' in the Build language.
*/
public static String getDataTypeName(Object o) {
return getDataTypeName(o, false);
}
/**
* Returns a pretty name for the datatype of object {@code object} in Skylark
* or the BUILD language, with full details if the {@code full} boolean is true.
*/
public static String getDataTypeName(Object object, boolean full) {
Preconditions.checkNotNull(object);
if (object instanceof SkylarkList) {
SkylarkList list = (SkylarkList) object;
if (list.isTuple()) {
return "tuple";
} else {
return "list" + (full ? " of " + list.getContentType() + "s" : "");
}
} else if (object instanceof SkylarkNestedSet) {
SkylarkNestedSet set = (SkylarkNestedSet) object;
return "set" + (full ? " of " + set.getContentType() + "s" : "");
} else {
return getDataTypeNameFromClass(object.getClass());
}
}
/**
* Returns a pretty name for the datatype equivalent of class 'c' in the Build language.
*/
public static String getDataTypeNameFromClass(Class<?> c) {
return getDataTypeNameFromClass(c, true);
}
/**
* Returns a pretty name for the datatype equivalent of class 'c' in the Build language.
* @param highlightNameSpaces Determines whether the result should also contain a special comment
* when the given class identifies a Skylark name space.
*/
public static String getDataTypeNameFromClass(Class<?> c, boolean highlightNameSpaces) {
if (c.equals(Object.class)) {
return "unknown";
} else if (c.equals(String.class)) {
return "string";
} else if (c.equals(Integer.class)) {
return "int";
} else if (c.equals(Boolean.class)) {
return "bool";
} else if (c.equals(Void.TYPE) || c.equals(Runtime.NoneType.class)) {
// TODO(bazel-team): no one should be seeing Void at all.
return "NoneType";
} else if (List.class.isAssignableFrom(c)) {
// NB: the capital here is a subtle way to distinguish java Tuple and java List
// from native SkylarkList tuple and list.
// TODO(bazel-team): refactor SkylarkList and use it everywhere.
return isTuple(c) ? "Tuple" : "List";
} else if (Map.class.isAssignableFrom(c)) {
return "dict";
} else if (BaseFunction.class.isAssignableFrom(c)) {
return "function";
} else if (c.equals(SelectorValue.class)) {
return "select";
} else if (NestedSet.class.isAssignableFrom(c) || SkylarkNestedSet.class.isAssignableFrom(c)) {
// TODO(bazel-team): no one should be seeing naked NestedSet at all.
return "set";
} else if (ClassObject.SkylarkClassObject.class.isAssignableFrom(c)) {
return "struct";
} else if (SkylarkList.class.isAssignableFrom(c)) {
// TODO(bazel-team): Refactor the class hierarchy so we can distinguish list and tuple types.
return "list";
} else if (c.isAnnotationPresent(SkylarkModule.class)) {
SkylarkModule module = c.getAnnotation(SkylarkModule.class);
return c.getAnnotation(SkylarkModule.class).name()
+ ((module.namespace() && highlightNameSpaces) ? " (a language module)" : "");
} else {
if (c.getSimpleName().isEmpty()) {
return c.getName();
} else {
return c.getSimpleName();
}
}
}
/**
* Returns a sequence of the appropriate list/tuple datatype for 'seq', based on 'isTuple'.
*/
public static List<?> makeSequence(List<?> seq, boolean isTuple) {
return isTuple ? ImmutableList.copyOf(seq) : seq;
}
public static Object checkNotNull(Expression expr, Object obj) throws EvalException {
if (obj == null) {
throw new EvalException(expr.getLocation(),
"Unexpected null value, please send a bug report. "
+ "This was generated by '" + expr + "'");
}
return obj;
}
/**
* @return the truth value of an object, according to Python rules.
* http://docs.python.org/2/library/stdtypes.html#truth-value-testing
*/
public static boolean toBoolean(Object o) {
if (o == null || o == Runtime.NONE) {
return false;
} else if (o instanceof Boolean) {
return (Boolean) o;
} else if (o instanceof String) {
return !((String) o).isEmpty();
} else if (o instanceof Integer) {
return (Integer) o != 0;
} else if (o instanceof Collection<?>) {
return !((Collection<?>) o).isEmpty();
} else if (o instanceof Map<?, ?>) {
return !((Map<?, ?>) o).isEmpty();
} else if (o instanceof NestedSet<?>) {
return !((NestedSet<?>) o).isEmpty();
} else if (o instanceof SkylarkNestedSet) {
return !((SkylarkNestedSet) o).isEmpty();
} else if (o instanceof Iterable<?>) {
return !(Iterables.isEmpty((Iterable<?>) o));
} else {
return true;
}
}
@SuppressWarnings("unchecked")
public static Collection<?> toCollection(Object o, Location loc) throws EvalException {
if (o instanceof Collection) {
return (Collection<Object>) o;
} else if (o instanceof SkylarkList) {
return ((SkylarkList) o).toList();
} else if (o instanceof Map<?, ?>) {
Map<Comparable<?>, Object> dict = (Map<Comparable<?>, Object>) o;
// For dictionaries we iterate through the keys only
// For determinism, we sort the keys.
try {
return Ordering.from(SKYLARK_COMPARATOR).sortedCopy(dict.keySet());
} catch (ComparisonException e) {
throw new EvalException(loc, e);
}
} else if (o instanceof SkylarkNestedSet) {
return ((SkylarkNestedSet) o).toCollection();
} else {
throw new EvalException(loc,
"type '" + getDataTypeName(o) + "' is not a collection");
}
}
@SuppressWarnings("unchecked")
public static Iterable<?> toIterable(Object o, Location loc) throws EvalException {
if (o instanceof String) {
// This is not as efficient as special casing String in for and dict and list comprehension
// statements. However this is a more unified way.
// The regex matches every character in the string until the end of the string,
// so "abc" will be split into ["a", "b", "c"].
return ImmutableList.<Object>copyOf(((String) o).split("(?!^)"));
} else if (o instanceof Iterable) {
return (Iterable<Object>) o;
} else if (o instanceof Map<?, ?>) {
return toCollection(o, loc);
} else {
throw new EvalException(loc,
"type '" + getDataTypeName(o) + "' is not iterable");
}
}
/**
* @return the size of the Skylark object or -1 in case the object doesn't have a size.
*/
public static int size(Object arg) {
if (arg instanceof String) {
return ((String) arg).length();
} else if (arg instanceof Map) {
return ((Map<?, ?>) arg).size();
} else if (arg instanceof SkylarkList) {
return ((SkylarkList) arg).size();
} else if (arg instanceof Iterable) {
// Iterables.size() checks if arg is a Collection so it's efficient in that sense.
return Iterables.size((Iterable<?>) arg);
}
return -1;
}
/** @return true if x is Java null or Skylark None */
public static boolean isNullOrNone(Object x) {
return x == null || x == Runtime.NONE;
}
/**
* Build a map of kwarg arguments from a list, removing null-s or None-s.
*
* @param init a series of key, value pairs (as consecutive arguments)
* as in {@code optionMap(k1, v1, k2, v2, k3, v3, map)}
* where each key is a String, each value is an arbitrary Objet.
* @return a {@code Map<String, Object>} that has all the specified entries,
* where key, value pairs appearing earlier have precedence,
* i.e. {@code k1, v1} may override {@code k3, v3}.
*
* Ignore any entry where the value is null or None.
* Keys cannot be null.
*/
@SuppressWarnings("unchecked")
public static ImmutableMap<String, Object> optionMap(Object... init) {
ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>();
Preconditions.checkState(init.length % 2 == 0);
for (int i = init.length - 2; i >= 0; i -= 2) {
String key = (String) Preconditions.checkNotNull(init[i]);
Object value = init[i + 1];
if (!isNullOrNone(value)) {
b.put(key, value);
}
}
return b.build();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.baremetalsolution.v1alpha1.model;
/**
* Configuration parameters for a new instance.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Bare Metal Solution API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class InstanceConfig extends com.google.api.client.json.GenericJson {
/**
* Client network address.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private NetworkAddress clientNetwork;
/**
* Whether the instance should be provisioned with Hyperthreading enabled.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean hyperthreading;
/**
* A transient unique identifier to idenfity an instance within an ProvisioningConfig request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* Instance type.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String instanceType;
/**
* Location where to deploy the instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String location;
/**
* OS image to initialize the instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String osImage;
/**
* Private network address, if any.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private NetworkAddress privateNetwork;
/**
* User note field, it can be used by customers to add additional information for the BMS Ops team
* (b/194021617).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String userNote;
/**
* Client network address.
* @return value or {@code null} for none
*/
public NetworkAddress getClientNetwork() {
return clientNetwork;
}
/**
* Client network address.
* @param clientNetwork clientNetwork or {@code null} for none
*/
public InstanceConfig setClientNetwork(NetworkAddress clientNetwork) {
this.clientNetwork = clientNetwork;
return this;
}
/**
* Whether the instance should be provisioned with Hyperthreading enabled.
* @return value or {@code null} for none
*/
public java.lang.Boolean getHyperthreading() {
return hyperthreading;
}
/**
* Whether the instance should be provisioned with Hyperthreading enabled.
* @param hyperthreading hyperthreading or {@code null} for none
*/
public InstanceConfig setHyperthreading(java.lang.Boolean hyperthreading) {
this.hyperthreading = hyperthreading;
return this;
}
/**
* A transient unique identifier to idenfity an instance within an ProvisioningConfig request.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* A transient unique identifier to idenfity an instance within an ProvisioningConfig request.
* @param id id or {@code null} for none
*/
public InstanceConfig setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* Instance type.
* @return value or {@code null} for none
*/
public java.lang.String getInstanceType() {
return instanceType;
}
/**
* Instance type.
* @param instanceType instanceType or {@code null} for none
*/
public InstanceConfig setInstanceType(java.lang.String instanceType) {
this.instanceType = instanceType;
return this;
}
/**
* Location where to deploy the instance.
* @return value or {@code null} for none
*/
public java.lang.String getLocation() {
return location;
}
/**
* Location where to deploy the instance.
* @param location location or {@code null} for none
*/
public InstanceConfig setLocation(java.lang.String location) {
this.location = location;
return this;
}
/**
* OS image to initialize the instance.
* @return value or {@code null} for none
*/
public java.lang.String getOsImage() {
return osImage;
}
/**
* OS image to initialize the instance.
* @param osImage osImage or {@code null} for none
*/
public InstanceConfig setOsImage(java.lang.String osImage) {
this.osImage = osImage;
return this;
}
/**
* Private network address, if any.
* @return value or {@code null} for none
*/
public NetworkAddress getPrivateNetwork() {
return privateNetwork;
}
/**
* Private network address, if any.
* @param privateNetwork privateNetwork or {@code null} for none
*/
public InstanceConfig setPrivateNetwork(NetworkAddress privateNetwork) {
this.privateNetwork = privateNetwork;
return this;
}
/**
* User note field, it can be used by customers to add additional information for the BMS Ops team
* (b/194021617).
* @return value or {@code null} for none
*/
public java.lang.String getUserNote() {
return userNote;
}
/**
* User note field, it can be used by customers to add additional information for the BMS Ops team
* (b/194021617).
* @param userNote userNote or {@code null} for none
*/
public InstanceConfig setUserNote(java.lang.String userNote) {
this.userNote = userNote;
return this;
}
@Override
public InstanceConfig set(String fieldName, Object value) {
return (InstanceConfig) super.set(fieldName, value);
}
@Override
public InstanceConfig clone() {
return (InstanceConfig) super.clone();
}
}
| |
/*
* Copyright 2014-2015 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.kotcrab.vis.ui.building.utilities;
import com.badlogic.gdx.scenes.scene2d.ui.Cell;
import com.badlogic.gdx.scenes.scene2d.ui.Table;
/**
* A simple helper class that holds informations about padding on each side of an object. Static methods can
* be used to quickly set padding or spacing of a cell or a table using this class' object.
* <p>
* When padding is set for a table, its cells will be separated from its borders by the given value. When
* padding is set for a window, additionally to the table's padding effect, top padding will be a draggable
* area, allowing to move the window.
* <p>
* When padding is set for a cell, it will be separated from other cells and table's border by the given
* value. When spacing is set for a cell, it will be separated by at least as many pixels from other cells as
* specified. If spacing or padding with the same values is used on every cell in a table, padding will
* provide twice as big distances between cells, since spacings can overlap.
* @author MJ
*/
public class Padding {
/** Common padding sizes. */
public static final Padding PAD_0 = of(0f), PAD_2 = of(2f), PAD_4 = of(4f), PAD_8 = of(8f);
private final float top, left, bottom, right;
/** @param padding will be set as padding for all directions. */
public Padding (final float padding) {
this(padding, padding, padding, padding);
}
/**
* @param horizontal will be set as left and right padding.
* @param vertical will be set as top and bottom padding.
*/
public Padding (final float horizontal, final float vertical) {
this(vertical, horizontal, vertical, horizontal);
}
/**
* @param top top padding value.
* @param left left padding value.
* @param bottom bottom padding value.
* @param right right padding value.
*/
public Padding (final float top, final float left, final float bottom, final float right) {
this.top = top;
this.left = left;
this.bottom = bottom;
this.right = right;
}
/** @param padding will be set as padding for all directions. */
public static Padding of (final float padding) {
return new Padding(padding, padding, padding, padding);
}
/**
* @param horizontal will be set as left and right padding.
* @param vertical will be set as top and bottom padding.
*/
public static Padding of (final float horizontal, final float vertical) {
return new Padding(vertical, horizontal, vertical, horizontal);
}
/**
* @param top top padding value.
* @param left left padding value.
* @param bottom bottom padding value.
* @param right right padding value.
*/
public static Padding of (final float top, final float left, final float bottom, final float right) {
return new Padding(top, left, bottom, right);
}
/** @return top padding value. */
public float getTop () {
return top;
}
/** @return left padding value. */
public float getLeft () {
return left;
}
/** @return bottom padding value. */
public float getBottom () {
return bottom;
}
/** @return right padding value. */
public float getRight () {
return right;
}
/**
* @param padding will be added to the given padding.
* @return new Padding object with summed pad values.
*/
public Padding add (final Padding padding) {
return new Padding(top + padding.getTop(), left + padding.getLeft(), bottom + padding.getBottom(),
right + padding.getRight());
}
/**
* @param padding will be subtracted from the given padding.
* @return new Padding object with subtracted pad values.
*/
public Padding subtract (final Padding padding) {
return new Padding(top - padding.getTop(), left - padding.getLeft(), bottom - padding.getBottom(),
right - padding.getRight());
}
/** @return new Padding object with reversed pad values. */
public Padding reverse () {
return new Padding(-top, -left, -bottom, -right);
}
/**
* Allows to set Table's padding with the Padding object, which has be done externally, as it's not part
* of the standard libGDX API.
* @param table will have the padding set according to the this object's data.
* @return the given table for chaining.
*/
public Table applyPadding (final Table table) {
table.pad(top, left, bottom, right);
return table;
}
/**
* Allows to set Cell's padding with the Padding object, which has be done externally, as it's not part of
* the standard libGDX API.
* @param cell will have the padding set according to the this object's data.
* @return the given cell for chaining.
*/
public Cell<?> applyPadding (final Cell<?> cell) {
cell.pad(top, left, bottom, right);
return cell;
}
/**
* Allows to set Cell's spacing with the Padding object, which has be done externally, as it's not part of
* the standard libGDX API. Padding holds 4 floats for each direction, so it's compatible with both
* padding and spacing settings without any additional changes.
* @param cell will have the padding set according to the this object's data.
* @return the given cell for chaining.
*/
public Cell<?> applySpacing (final Cell<?> cell) {
cell.space(top, left, bottom, right);
return cell;
}
// Padding utilities:
/**
* Allows to set Table's padding with the Padding object, which has be done externally, as it's not part
* of the standard libGDX API.
* @param padding contains data of padding sizes.
* @param table will have the padding set according to the given data.
* @return the given table for chaining.
*/
public static Table setPadding (final Padding padding, final Table table) {
table.pad(padding.getTop(), padding.getLeft(), padding.getBottom(), padding.getRight());
return table;
}
/**
* Allows to set Cell's padding with the Padding object, which has be done externally, as it's not part of
* the standard libGDX API.
* @param padding contains data of padding sizes.
* @param cell will have the padding set according to the given data.
* @return the given cell for chaining.
*/
public static Cell<?> setPadding (final Padding padding, final Cell<?> cell) {
return cell.pad(padding.getTop(), padding.getLeft(), padding.getBottom(), padding.getRight());
}
/**
* Allows to set Cell's spacing with the Padding object, which has be done externally, as it's not part of
* the standard libGDX API. Padding holds 4 floats for each direction, so it's compatible with both
* padding and spacing settings without any additional changes.
* @param spacing contains data of spacing sizes.
* @param cell will have the padding set according to the given data.
* @return the given cell for chaining.
*/
public static Cell<?> setSpacing (final Padding spacing, final Cell<?> cell) {
return cell.space(spacing.getTop(), spacing.getLeft(), spacing.getBottom(), spacing.getRight());
}
}
| |
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.profiler.statistics;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterators;
import com.google.devtools.build.lib.profiler.ProfilePhase;
import com.google.devtools.build.lib.profiler.ProfilerTask;
import com.google.devtools.build.lib.profiler.analysis.ProfileInfo;
import com.google.devtools.build.lib.profiler.analysis.ProfileInfo.AggregateAttr;
import com.google.devtools.build.lib.profiler.analysis.ProfileInfo.Task;
import java.util.EnumMap;
import java.util.Iterator;
import java.util.List;
import javax.annotation.Nullable;
/**
* Extracts and keeps statistics for one {@link ProfilePhase} for formatting to various outputs.
*/
public final class PhaseStatistics implements Iterable<ProfilerTask> {
private final ProfilePhase phase;
private long phaseDurationNanos;
private long totalDurationNanos;
private final EnumMap<ProfilerTask, Long> taskDurations;
private final EnumMap<ProfilerTask, Long> taskCounts;
private final PhaseVfsStatistics vfsStatistics;
private boolean wasExecuted;
public PhaseStatistics(ProfilePhase phase, boolean generateVfsStatistics) {
this.phase = phase;
this.taskDurations = new EnumMap<>(ProfilerTask.class);
this.taskCounts = new EnumMap<>(ProfilerTask.class);
if (generateVfsStatistics) {
vfsStatistics = new PhaseVfsStatistics(phase);
} else {
vfsStatistics = null;
}
}
public PhaseStatistics(ProfilePhase phase, ProfileInfo info, String workSpaceName, boolean vfs) {
this(phase, vfs);
addProfileInfo(workSpaceName, info);
}
/**
* Add statistics from {@link ProfileInfo} to the ones already accumulated for this phase.
*/
public void addProfileInfo(String workSpaceName, ProfileInfo info) {
Task phaseTask = info.getPhaseTask(phase);
if (phaseTask != null) {
if (vfsStatistics != null) {
vfsStatistics.addProfileInfo(workSpaceName, info);
}
wasExecuted = true;
long infoPhaseDuration = info.getPhaseDuration(phaseTask);
phaseDurationNanos += infoPhaseDuration;
List<Task> taskList = info.getTasksForPhase(phaseTask);
long duration = infoPhaseDuration;
for (Task task : taskList) {
// Tasks on the phaseTask thread already accounted for in the phaseDuration.
if (task.threadId != phaseTask.threadId) {
duration += task.durationNanos;
}
}
totalDurationNanos += duration;
for (ProfilerTask type : ProfilerTask.values()) {
AggregateAttr attr = info.getStatsForType(type, taskList);
long totalTime = Math.max(0, attr.totalTime);
long count = Math.max(0, attr.count);
add(taskCounts, type, count);
add(taskDurations, type, totalTime);
}
}
}
/** Add statistics accumulated in another PhaseStatistics object to this one. */
public void add(PhaseStatistics other) {
Preconditions.checkArgument(
phase == other.phase, "Should not combine statistics from different phases");
if (other.wasExecuted) {
if (vfsStatistics != null && other.vfsStatistics != null) {
vfsStatistics.add(other.vfsStatistics);
}
wasExecuted = true;
phaseDurationNanos += other.phaseDurationNanos;
totalDurationNanos += other.totalDurationNanos;
for (ProfilerTask type : other) {
long otherCount = other.getCount(type);
long otherDuration = other.getTotalDurationNanos(type);
add(taskCounts, type, otherCount);
add(taskDurations, type, otherDuration);
}
}
}
/** Helper method to sum up long values within an {@link EnumMap}. */
private static <T extends Enum<T>> void add(EnumMap<T, Long> map, T key, long value) {
long previous;
if (map.containsKey(key)) {
previous = map.get(key);
} else {
previous = 0;
}
map.put(key, previous + value);
}
public ProfilePhase getProfilePhase() {
return phase;
}
@Nullable
public PhaseVfsStatistics getVfsStatistics() {
return vfsStatistics;
}
/**
* @return true if no {@link ProfilerTask}s have been executed in this phase, false otherwise
*/
public boolean isEmpty() {
return taskCounts.isEmpty();
}
/** @return true if the phase was not executed at all, false otherwise */
public boolean wasExecuted() {
return wasExecuted;
}
/** @return true if a task of the given {@link ProfilerTask} type was executed in this phase */
public boolean wasExecuted(ProfilerTask taskType) {
Long count = taskCounts.get(taskType);
return count != null && count != 0;
}
public long getPhaseDurationNanos() {
return phaseDurationNanos;
}
/** @return the sum of all task durations of the given type */
public long getTotalDurationNanos(ProfilerTask taskType) {
Long duration = taskDurations.get(taskType);
if (duration == null) {
return 0;
}
return duration;
}
/**
* @return the average duration of all {@link ProfilerTask}
*/
public double getMeanDuration(ProfilerTask taskType) {
if (wasExecuted(taskType)) {
double duration = taskDurations.get(taskType);
long count = taskCounts.get(taskType);
return duration / count;
}
return 0;
}
/**
* @return the duration of all {@link ProfilerTask} executed in the phase relative to the total
* phase duration
*/
public double getTotalRelativeDuration(ProfilerTask taskType) {
Long duration = taskDurations.get(taskType);
if (duration == null || duration == 0) {
return 0;
}
// sanity check for broken profile files
Preconditions.checkState(
totalDurationNanos != 0,
"Profiler tasks of type %s have non-zero duration %s in phase %s but the phase itself has"
+ " zero duration. Most likely the profile file is broken.",
taskType,
duration,
phase);
return (double) duration / totalDurationNanos;
}
/**
* @return how many tasks of the given type were executed in this phase
*/
public long getCount(ProfilerTask taskType) {
Long count = taskCounts.get(taskType);
if (count == null) {
return 0;
}
return count;
}
/**
* Iterator over all {@link ProfilerTask}s that were executed at least once and have a total
* duration greater than 0.
*/
@Override
public Iterator<ProfilerTask> iterator() {
return Iterators.filter(
taskCounts.keySet().iterator(),
taskType -> getTotalDurationNanos(taskType) > 0 && wasExecuted(taskType));
}
}
| |
package org.cutyou.ircbot.plugins;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.cutyou.ircbot.IrcBot;
import org.cutyou.ircbot.plugins.geoIP.payloads.Ipresult;
import org.cutyou.ircbot.utils.CommandParser;
import org.cutyou.ircbot.utils.payloads.Command;
import org.cutyou.ircbot.utils.payloads.Plugin;
import org.pircbotx.hooks.ListenerAdapter;
import org.pircbotx.hooks.events.MessageEvent;
import org.pircbotx.hooks.events.PrivateMessageEvent;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class GeoIPHelper extends ListenerAdapter {
private static final String PLUGIN_NAME = "GeoIPHelper";
private static ObjectMapper mapper = new ObjectMapper();
static {
mapper.writerWithDefaultPrettyPrinter();
mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
mapper.setSerializationInclusion(Include.NON_EMPTY);
}
private final Logger logger = LogManager.getLogger(GeoIPHelper.class);
private Plugin pi = IrcBot.getConfig().getPlugins().getPlugin(PLUGIN_NAME).orElse(new Plugin());
private CloseableHttpClient httpClient = HttpClients.createDefault();
private JsonFactory jFactory = new JsonFactory();
private String requestUrl = pi.getApi_Url();
/**
* Sends a multi-line response to a channel message.
*
* @param event MessageEvent to respond to.
* @param response List<String> (multi-line) response to send.
*/
@SuppressWarnings({"unused"})
public static void sendResponse(Object event, List<String> response) {
if (event.getClass() == PrivateMessageEvent.class) {
PrivateMessageEvent evt = (PrivateMessageEvent) event;
for (String line : response) {
evt.respondPrivateMessage(line);
}
}
if (event.getClass() == MessageEvent.class) {
MessageEvent evt = (MessageEvent) event;
for (String line : response) {
evt.respondChannel(line);
}
}
}
/**
* Sends a single line response to a channel message.
*
* @param event MessageEvent to respond to.
* @param response String response to send.
*/
private static void sendResponse(Object event, String response) {
if (event.getClass() == PrivateMessageEvent.class) {
PrivateMessageEvent evt = (PrivateMessageEvent) event;
evt.respondPrivateMessage(response);
}
if (event.getClass() == MessageEvent.class) {
MessageEvent evt = (MessageEvent) event;
evt.respondChannel(response);
}
}
/**
* @param event onMessage event to parse and see if we need to respond
* @throws Exception Something went all heckin fucky
*/
@Override
public void onMessage(MessageEvent event) throws Exception {
CommandParser cp = new CommandParser();
Command com = cp.parseCommand(event);
for (PublicCommands cmd : PublicCommands.values()) {
if (com.getCommand().equals(cmd.getCommand())
&& !cmd.getCommand().equals(PublicCommands.NONE.getCommand())) {
performAction(event, cmd, com);
}
}
}
@SuppressWarnings("incomplete-switch")
private void performAction(MessageEvent event, PublicCommands command, Command parsedCommand)
throws Exception {
if (!requestUrl.equalsIgnoreCase("")) {
switch (command) {
case GEO_IP:
String request = parsedCommand.getText();
if (!request.equals("")) {
sendResponse(event, lookUp(request));
} else {
sendResponse(event, command.getCommandHelp());
}
break;
}
} else {
sendResponse(event, PLUGIN_NAME + " API URL Not configured in config.xml.");
}
}
// ---- Responses ---- //
// Send response with lots of lines
private String lookUp(String host) throws URISyntaxException {
String resp = "";
URIBuilder builder = new URIBuilder(String.format(requestUrl, host));
HttpGet httpget = new HttpGet(builder.build());
try (CloseableHttpResponse response = httpClient.execute(httpget)) {
JsonParser jParser = jFactory.createParser(EntityUtils.toString(response.getEntity()));
try {
Ipresult map = mapper.readValue(jParser, Ipresult.class);
resp += " GeoIP lookup for " + host;
if (!map.getIp().equals("")) {
resp += " IP Address: " + map.getIp();
}
if (!map.getCountryCode().equals("")) {
resp += " - Country Code: " + map.getCountryCode();
}
if (!map.getCountryName().equals("")) {
resp += " - Country: " + map.getCountryName();
}
if (!map.getRegionCode().equals("")) {
resp += " - Region Code: " + map.getRegionCode();
}
if (!map.getRegionName().equals("")) {
resp += " - Region: " + map.getRegionName();
}
if (!map.getCity().equals("")) {
resp += " - City: " + map.getCity();
}
if (!map.getZipCode().equals("")) {
resp += " - Zip Code: " + map.getZipCode();
}
if (!map.getTimeZone().equals("")) {
resp += " - Time Zone: " + map.getTimeZone();
}
if (!(map.getLatitude() == 0)) {
resp += " - Latitude: " + map.getLatitude();
}
if (!(map.getLongitude() == 0)) {
resp += " - Longitude: " + map.getLongitude();
}
if (!(map.getMetroCode() == 0)) {
resp += " - Metro Code: " + map.getMetroCode();
}
} catch (Exception e) {
logger.debug(this.getClass().getName() + ": " + e.getLocalizedMessage());
resp = "Invalid host";
}
} catch (Exception e) {
logger.debug(this.getClass().getName() + ": " + e.getLocalizedMessage());
resp = "Error in lookup";
}
return resp.trim();
}
// Send single line private message response
@SuppressWarnings({"unused"})
public Map<String, String> getPublicCommands() {
Map<String, String> resp = new HashMap<>();
for (PublicCommands cmd : PublicCommands.values()) {
resp.put(cmd.getCommand(), cmd.getCommandHelp());
}
return resp;
}
public enum PublicCommands {
NONE( // Template for adding a command
"", // Trigger Command, " .help "
"" // Help message for this command
),
GEO_IP(
".geo",
".geo <IP Address> or <Hostname> to get the geo-location information for the provided host."
);
private String command;
private String help;
PublicCommands(String command, String help) {
this.command = command;
this.help = help;
}
@SuppressWarnings({"unused"})
public static PublicCommands getEnum(String command) {
for (PublicCommands ws : values()) {
if (StringUtils.equalsIgnoreCase(command, ws.getCommand())) {
return ws;
}
}
return NONE;
}
public String getCommand() {
return command;
}
public String getCommandHelp() {
return help;
}
}
}
| |
package math.functions;
import java.awt.Window;
import java.awt.event.ActionEvent;
import org.trianacode.taskgraph.Unit;
import triana.types.Const;
import triana.types.EmptyingType;
import triana.types.GraphType;
import triana.types.util.FlatArray;
/**
* An ArcSinh unit to compute the inverse hyperbolic sine of the elements of an input data object. The input data can be
* either GraphType or Const. The function is applied to each element of each arithmetic dependent-data array of an
* input GraphType, regardless of dimensionality.
* <p/>
* The input data arrays can be real or complex. If the data are real, then the returned data set will be real. If the
* data are complex, then the output data will be complex.
* <p/>
*
* @author Bernard Schutz
* @version 2.1 13 January 2001
*/
public class ArcSinh extends Unit {
double Pi2 = Math.PI / 2.0;
/**
* This returns a <b>brief!</b> description of what the unit does. The text here is shown in a pop up window when
* the user puts the mouse over the unit icon for more than a second.
*/
public String getPopUpDescription() {
return "Computes the inverse hyperbolic sine of the elements of the input data set";
}
/**
* ********************************************* ** USER CODE of ArcSinh goes here ***
* *********************************************
*/
public void process() {
Object input, output;
output = null;
input = getInputAtNode(0);
if (input instanceof EmptyingType) {
return;
}
output = input;
Class outputClass = output.getClass();
//setOutputType(outputClass);
if (input instanceof GraphType) {
FlatArray tempR, tempI;
int dv, j;
double p, q, ppq, tmp1, tmp2;
double[] inputdataR, inputdataI;
for (dv = 0; dv < ((GraphType) input).getDependentVariables(); dv++) {
if (((GraphType) input).isArithmeticArray(dv)) {
tempR = new FlatArray(((GraphType) input).getDataArrayReal(dv));
inputdataR = (double[]) tempR.getFlatArray();
if (((GraphType) input).isDependentComplex(dv)) {
tempI = new FlatArray(((GraphType) input).getDataArrayImag(dv));
inputdataI = (double[]) tempI.getFlatArray();
for (j = 0; j < inputdataI.length; j++) {
if (inputdataR[j] != 0.0) {
tmp1 = 1.0 + inputdataI[j];
tmp2 = inputdataR[j] * inputdataR[j];
p = Math.sqrt(tmp1 * tmp1 + tmp2);
tmp1 -= 2.0;
q = Math.sqrt(tmp1 * tmp1 + tmp2);
ppq = (p + q) * 0.5;
inputdataI[j] = Math.asin(inputdataI[j] / ppq);
tmp1 = Math.log(ppq + Math.sqrt(ppq * ppq - 1.0));
inputdataR[j] = (inputdataR[j] < 0.0) ? -tmp1 : tmp1;
} else {
if (inputdataI[j] >= 1.0) {
inputdataR[j] = Math
.log(inputdataI[j] + Math.sqrt(inputdataI[j] * inputdataI[j] - 1.0));
inputdataI[j] = Pi2;
} else {
if (inputdataI[j] > -1.0) {
inputdataR[j] = 0.0;
inputdataI[j] = Math.asin(inputdataI[j]);
} else {
inputdataR[j] = Math
.log(-inputdataI[j] + Math.sqrt(inputdataI[j] * inputdataI[j] - 1.0));
inputdataI[j] = -Pi2;
}
}
}
}
((GraphType) output).setDataArrayReal(tempR.restoreArray(false), dv);
((GraphType) output).setDataArrayImag(tempI.restoreArray(false), dv);
} else {
for (j = 0; j < inputdataR.length; j++) {
inputdataR[j] = Math.log(inputdataR[j] + Math.sqrt(inputdataR[j] * inputdataR[j] + 1.0));
}
((GraphType) output).setDataArrayReal(tempR.restoreArray(false), dv);
}
}
}
} else if (input instanceof Const) {
double r, i, p, q, ppq, tmp1, tmp2;
r = ((Const) input).getReal();
if (((Const) input).isComplex()) {
i = ((Const) input).getImag();
if (r != 0.0) {
tmp1 = 1.0 + i;
tmp2 = r * r;
p = Math.sqrt(tmp1 * tmp1 + tmp2);
tmp1 -= 2.0;
q = Math.sqrt(tmp1 * tmp1 + tmp2);
ppq = (p + q) * 0.5;
i = Math.asin(i / ppq);
tmp1 = Math.log(ppq + Math.sqrt(ppq * ppq - 1.0));
r = (r < 0.0) ? -tmp1 : tmp1;
} else {
if (i >= 1.0) {
r = Math.log(i + Math.sqrt(i * i - 1.0));
i = Pi2;
} else {
if (i > -1.0) {
r = 0.0;
i = Math.asin(i);
} else {
r = Math.log(-i + Math.sqrt(i * i - 1.0));
i = -Pi2;
}
}
}
((Const) output).setImag(i);
} else {
r = Math.log(r + Math.sqrt(r * r + 1.0));
}
((Const) output).setReal(r);
}
output(output);
}
/**
* Initialses information specific to ArcSinh.
*/
public void init() {
super.init();
setDefaultInputNodes(1);
setMinimumInputNodes(1);
setMaximumInputNodes(Integer.MAX_VALUE);
setDefaultOutputNodes(1);
setMinimumOutputNodes(1);
setMaximumOutputNodes(Integer.MAX_VALUE);
// setResizableInputs(false);
// setResizableOutputs(true);
// // This is to ensure that we receive arrays containing double-precision numbers
// setRequireDoubleInputs(true);
// setCanProcessDoubleArrays(true);
}
/**
* Resets ArcSinh
*/
public void reset() {
super.reset();
}
/**
* Saves ArcSinh's parameters to the parameter file.
*/
public void saveParameters() {
}
/**
* Loads ArcSinh's parameters of from the parameter file.
*/
public void setParameter(String name, String value) {
}
/**
* @return a string containing the names of the types allowed to be input to ArcSinh, each separated by a white
* space.
*/
public String[] getInputTypes() {
return new String[]{"triana.types.GraphType", "triana.types.Const"};
}
public String[] getOutputTypes() {
return new String[]{"triana.types.GraphType", "triana.types.Const"};
}
/**
*
* @returns the location of the help file for this unit.
*/
public String getHelpFile() {
return "ArcSinh.html";
}
/**
* @return ArcSinh's parameter window sp that Triana can move and display it.
*/
public Window getParameterWindow() {
return null;
}
/**
* Captures the events thrown out by ArcSinh.
*/
// public void actionPerformed(ActionEvent e) {
// super.actionPerformed(e); // we need this
//
// }
}
| |
package id.bizdir.model;
import java.util.List;
/**
* Created by Hendry on 19/09/2015.
*/
public class Result {
private List<TableSynch> tableSynch;
private List<Anggota> anggota;
private List<AnggotaCategory> anggota_category;
private List<AnggotaGallery> anggota_gallery;
private List<AnggotaSubCategoryAssignment> anggota_sub_category_assignment;
private List<AnggotaSubCategory> anggota_sub_category;
private List<City> city;
private List<Common> common;
private List<DownloadRoot> download_root;
private List<DownloadSub> download_sub;
private List<EventCategory> event_category;
private List<Event> event;
private List<ForumCategory> forum_category;
private List<ForumPost> forum_post;
private List<ForumThread> forum_thread;
private List<NewsBusinessCategory> news_business_category;
private List<NewsBusiness> news_business;
private List<NewsKadin> news_kadin;
private List<OpportunityCategory> opportunity_category;
private List<Opportunity> opportunity;
private List<Promotion> promotion;
private List<Province> province;
private List<WalkThrough> walkthrough;
public List<TableSynch> getTableSynch() {
return tableSynch;
}
public void setTableSynch(List<TableSynch> tableSynch) {
this.tableSynch = tableSynch;
}
public List<Anggota> getAnggota() {
return anggota;
}
public void setAnggota(List<Anggota> anggota) {
this.anggota = anggota;
}
public List<AnggotaCategory> getAnggota_category() {
return anggota_category;
}
public void setAnggota_category(List<AnggotaCategory> anggota_category) {
this.anggota_category = anggota_category;
}
public List<AnggotaGallery> getAnggota_gallery() {
return anggota_gallery;
}
public void setAnggota_gallery(List<AnggotaGallery> anggota_gallery) {
this.anggota_gallery = anggota_gallery;
}
public List<AnggotaSubCategoryAssignment> getAnggota_sub_category_assignment() {
return anggota_sub_category_assignment;
}
public void setAnggota_sub_category_assignment(List<AnggotaSubCategoryAssignment> anggota_sub_category_assignment) {
this.anggota_sub_category_assignment = anggota_sub_category_assignment;
}
public List<AnggotaSubCategory> getAnggota_sub_category() {
return anggota_sub_category;
}
public void setAnggota_sub_category(List<AnggotaSubCategory> anggota_sub_category) {
this.anggota_sub_category = anggota_sub_category;
}
public List<City> getCity() {
return city;
}
public void setCity(List<City> city) {
this.city = city;
}
public List<Common> getCommon() {
return common;
}
public void setCommon(List<Common> common) {
this.common = common;
}
public List<DownloadRoot> getDownload_root() {
return download_root;
}
public void setDownload_root(List<DownloadRoot> download_root) {
this.download_root = download_root;
}
public List<DownloadSub> getDownload_sub() {
return download_sub;
}
public void setDownload_sub(List<DownloadSub> download_sub) {
this.download_sub = download_sub;
}
public List<EventCategory> getEvent_category() {
return event_category;
}
public void setEvent_category(List<EventCategory> event_category) {
this.event_category = event_category;
}
public List<Event> getEvent() {
return event;
}
public void setEvent(List<Event> event) {
this.event = event;
}
public List<ForumCategory> getForum_category() {
return forum_category;
}
public void setForum_category(List<ForumCategory> forum_category) {
this.forum_category = forum_category;
}
public List<ForumPost> getForum_post() {
return forum_post;
}
public void setForum_post(List<ForumPost> forum_post) {
this.forum_post = forum_post;
}
public List<ForumThread> getForum_thread() {
return forum_thread;
}
public void setForum_thread(List<ForumThread> forum_thread) {
this.forum_thread = forum_thread;
}
public List<NewsBusinessCategory> getNews_business_category() {
return news_business_category;
}
public void setNews_business_category(List<NewsBusinessCategory> news_business_category) {
this.news_business_category = news_business_category;
}
public List<NewsBusiness> getNews_business() {
return news_business;
}
public void setNews_business(List<NewsBusiness> news_business) {
this.news_business = news_business;
}
public List<NewsKadin> getNews_kadin() {
return news_kadin;
}
public void setNews_kadin(List<NewsKadin> news_kadin) {
this.news_kadin = news_kadin;
}
public List<OpportunityCategory> getOpportunity_category() {
return opportunity_category;
}
public void setOpportunity_category(List<OpportunityCategory> opportunity_category) {
this.opportunity_category = opportunity_category;
}
public List<Opportunity> getOpportunity() {
return opportunity;
}
public void setOpportunity(List<Opportunity> opportunity) {
this.opportunity = opportunity;
}
public List<Promotion> getPromotion() {
return promotion;
}
public void setPromotion(List<Promotion> promotion) {
this.promotion = promotion;
}
public List<Province> getProvince() {
return province;
}
public void setProvince(List<Province> province) {
this.province = province;
}
public List<WalkThrough> getWalkthrough() {
return walkthrough;
}
public void setWalkthrough(List<WalkThrough> walkthrough) {
this.walkthrough = walkthrough;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2020_03_01.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.AzureServiceFuture;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.ListOperationCallback;
import com.microsoft.azure.Page;
import com.microsoft.azure.PagedList;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.Path;
import retrofit2.http.Query;
import retrofit2.http.Url;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in LoadBalancerBackendAddressPools.
*/
public class LoadBalancerBackendAddressPoolsInner {
/** The Retrofit service to perform REST calls. */
private LoadBalancerBackendAddressPoolsService service;
/** The service client containing this operation class. */
private NetworkManagementClientImpl client;
/**
* Initializes an instance of LoadBalancerBackendAddressPoolsInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public LoadBalancerBackendAddressPoolsInner(Retrofit retrofit, NetworkManagementClientImpl client) {
this.service = retrofit.create(LoadBalancerBackendAddressPoolsService.class);
this.client = client;
}
/**
* The interface defining all the services for LoadBalancerBackendAddressPools to be
* used by Retrofit to perform actually REST calls.
*/
interface LoadBalancerBackendAddressPoolsService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_03_01.LoadBalancerBackendAddressPools list" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/backendAddressPools")
Observable<Response<ResponseBody>> list(@Path("resourceGroupName") String resourceGroupName, @Path("loadBalancerName") String loadBalancerName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_03_01.LoadBalancerBackendAddressPools get" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/backendAddressPools/{backendAddressPoolName}")
Observable<Response<ResponseBody>> get(@Path("resourceGroupName") String resourceGroupName, @Path("loadBalancerName") String loadBalancerName, @Path("backendAddressPoolName") String backendAddressPoolName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_03_01.LoadBalancerBackendAddressPools listNext" })
@GET
Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Gets all the load balancer backed address pools.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<BackendAddressPoolInner> object if successful.
*/
public PagedList<BackendAddressPoolInner> list(final String resourceGroupName, final String loadBalancerName) {
ServiceResponse<Page<BackendAddressPoolInner>> response = listSinglePageAsync(resourceGroupName, loadBalancerName).toBlocking().single();
return new PagedList<BackendAddressPoolInner>(response.body()) {
@Override
public Page<BackendAddressPoolInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Gets all the load balancer backed address pools.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<BackendAddressPoolInner>> listAsync(final String resourceGroupName, final String loadBalancerName, final ListOperationCallback<BackendAddressPoolInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listSinglePageAsync(resourceGroupName, loadBalancerName),
new Func1<String, Observable<ServiceResponse<Page<BackendAddressPoolInner>>>>() {
@Override
public Observable<ServiceResponse<Page<BackendAddressPoolInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Gets all the load balancer backed address pools.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<BackendAddressPoolInner> object
*/
public Observable<Page<BackendAddressPoolInner>> listAsync(final String resourceGroupName, final String loadBalancerName) {
return listWithServiceResponseAsync(resourceGroupName, loadBalancerName)
.map(new Func1<ServiceResponse<Page<BackendAddressPoolInner>>, Page<BackendAddressPoolInner>>() {
@Override
public Page<BackendAddressPoolInner> call(ServiceResponse<Page<BackendAddressPoolInner>> response) {
return response.body();
}
});
}
/**
* Gets all the load balancer backed address pools.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<BackendAddressPoolInner> object
*/
public Observable<ServiceResponse<Page<BackendAddressPoolInner>>> listWithServiceResponseAsync(final String resourceGroupName, final String loadBalancerName) {
return listSinglePageAsync(resourceGroupName, loadBalancerName)
.concatMap(new Func1<ServiceResponse<Page<BackendAddressPoolInner>>, Observable<ServiceResponse<Page<BackendAddressPoolInner>>>>() {
@Override
public Observable<ServiceResponse<Page<BackendAddressPoolInner>>> call(ServiceResponse<Page<BackendAddressPoolInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Gets all the load balancer backed address pools.
*
ServiceResponse<PageImpl<BackendAddressPoolInner>> * @param resourceGroupName The name of the resource group.
ServiceResponse<PageImpl<BackendAddressPoolInner>> * @param loadBalancerName The name of the load balancer.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<BackendAddressPoolInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<BackendAddressPoolInner>>> listSinglePageAsync(final String resourceGroupName, final String loadBalancerName) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (loadBalancerName == null) {
throw new IllegalArgumentException("Parameter loadBalancerName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
final String apiVersion = "2020-03-01";
return service.list(resourceGroupName, loadBalancerName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<BackendAddressPoolInner>>>>() {
@Override
public Observable<ServiceResponse<Page<BackendAddressPoolInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<BackendAddressPoolInner>> result = listDelegate(response);
return Observable.just(new ServiceResponse<Page<BackendAddressPoolInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<BackendAddressPoolInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<BackendAddressPoolInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<BackendAddressPoolInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Gets load balancer backend address pool.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @param backendAddressPoolName The name of the backend address pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the BackendAddressPoolInner object if successful.
*/
public BackendAddressPoolInner get(String resourceGroupName, String loadBalancerName, String backendAddressPoolName) {
return getWithServiceResponseAsync(resourceGroupName, loadBalancerName, backendAddressPoolName).toBlocking().single().body();
}
/**
* Gets load balancer backend address pool.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @param backendAddressPoolName The name of the backend address pool.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<BackendAddressPoolInner> getAsync(String resourceGroupName, String loadBalancerName, String backendAddressPoolName, final ServiceCallback<BackendAddressPoolInner> serviceCallback) {
return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, loadBalancerName, backendAddressPoolName), serviceCallback);
}
/**
* Gets load balancer backend address pool.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @param backendAddressPoolName The name of the backend address pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the BackendAddressPoolInner object
*/
public Observable<BackendAddressPoolInner> getAsync(String resourceGroupName, String loadBalancerName, String backendAddressPoolName) {
return getWithServiceResponseAsync(resourceGroupName, loadBalancerName, backendAddressPoolName).map(new Func1<ServiceResponse<BackendAddressPoolInner>, BackendAddressPoolInner>() {
@Override
public BackendAddressPoolInner call(ServiceResponse<BackendAddressPoolInner> response) {
return response.body();
}
});
}
/**
* Gets load balancer backend address pool.
*
* @param resourceGroupName The name of the resource group.
* @param loadBalancerName The name of the load balancer.
* @param backendAddressPoolName The name of the backend address pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the BackendAddressPoolInner object
*/
public Observable<ServiceResponse<BackendAddressPoolInner>> getWithServiceResponseAsync(String resourceGroupName, String loadBalancerName, String backendAddressPoolName) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (loadBalancerName == null) {
throw new IllegalArgumentException("Parameter loadBalancerName is required and cannot be null.");
}
if (backendAddressPoolName == null) {
throw new IllegalArgumentException("Parameter backendAddressPoolName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
final String apiVersion = "2020-03-01";
return service.get(resourceGroupName, loadBalancerName, backendAddressPoolName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<BackendAddressPoolInner>>>() {
@Override
public Observable<ServiceResponse<BackendAddressPoolInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<BackendAddressPoolInner> clientResponse = getDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<BackendAddressPoolInner> getDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<BackendAddressPoolInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<BackendAddressPoolInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Gets all the load balancer backed address pools.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<BackendAddressPoolInner> object if successful.
*/
public PagedList<BackendAddressPoolInner> listNext(final String nextPageLink) {
ServiceResponse<Page<BackendAddressPoolInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single();
return new PagedList<BackendAddressPoolInner>(response.body()) {
@Override
public Page<BackendAddressPoolInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Gets all the load balancer backed address pools.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param serviceFuture the ServiceFuture object tracking the Retrofit calls
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<BackendAddressPoolInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<BackendAddressPoolInner>> serviceFuture, final ListOperationCallback<BackendAddressPoolInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listNextSinglePageAsync(nextPageLink),
new Func1<String, Observable<ServiceResponse<Page<BackendAddressPoolInner>>>>() {
@Override
public Observable<ServiceResponse<Page<BackendAddressPoolInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Gets all the load balancer backed address pools.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<BackendAddressPoolInner> object
*/
public Observable<Page<BackendAddressPoolInner>> listNextAsync(final String nextPageLink) {
return listNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<BackendAddressPoolInner>>, Page<BackendAddressPoolInner>>() {
@Override
public Page<BackendAddressPoolInner> call(ServiceResponse<Page<BackendAddressPoolInner>> response) {
return response.body();
}
});
}
/**
* Gets all the load balancer backed address pools.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<BackendAddressPoolInner> object
*/
public Observable<ServiceResponse<Page<BackendAddressPoolInner>>> listNextWithServiceResponseAsync(final String nextPageLink) {
return listNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<BackendAddressPoolInner>>, Observable<ServiceResponse<Page<BackendAddressPoolInner>>>>() {
@Override
public Observable<ServiceResponse<Page<BackendAddressPoolInner>>> call(ServiceResponse<Page<BackendAddressPoolInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Gets all the load balancer backed address pools.
*
ServiceResponse<PageImpl<BackendAddressPoolInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<BackendAddressPoolInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<BackendAddressPoolInner>>> listNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
String nextUrl = String.format("%s", nextPageLink);
return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<BackendAddressPoolInner>>>>() {
@Override
public Observable<ServiceResponse<Page<BackendAddressPoolInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<BackendAddressPoolInner>> result = listNextDelegate(response);
return Observable.just(new ServiceResponse<Page<BackendAddressPoolInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<BackendAddressPoolInner>> listNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<BackendAddressPoolInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<BackendAddressPoolInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
| |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1;
import com.google.api.core.BetaApi;
import com.google.api.gax.httpjson.ApiMessage;
import java.util.List;
import java.util.Objects;
import javax.annotation.Generated;
import javax.annotation.Nullable;
@Generated("by GAPIC")
@BetaApi
/**
* Request object for method compute.instances.setMetadata. Sets metadata for the specified instance
* to the data included in the request.
*/
public final class SetMetadataInstanceHttpRequest implements ApiMessage {
private final String access_token;
private final String callback;
private final String fields;
private final String instance;
private final String key;
private final Metadata metadataResource;
private final String prettyPrint;
private final String quotaUser;
private final String requestId;
private final String userIp;
private SetMetadataInstanceHttpRequest() {
this.access_token = null;
this.callback = null;
this.fields = null;
this.instance = null;
this.key = null;
this.metadataResource = null;
this.prettyPrint = null;
this.quotaUser = null;
this.requestId = null;
this.userIp = null;
}
private SetMetadataInstanceHttpRequest(
String access_token,
String callback,
String fields,
String instance,
String key,
Metadata metadataResource,
String prettyPrint,
String quotaUser,
String requestId,
String userIp) {
this.access_token = access_token;
this.callback = callback;
this.fields = fields;
this.instance = instance;
this.key = key;
this.metadataResource = metadataResource;
this.prettyPrint = prettyPrint;
this.quotaUser = quotaUser;
this.requestId = requestId;
this.userIp = userIp;
}
@Override
public Object getFieldValue(String fieldName) {
if ("access_token".equals(fieldName)) {
return access_token;
}
if ("callback".equals(fieldName)) {
return callback;
}
if ("fields".equals(fieldName)) {
return fields;
}
if ("instance".equals(fieldName)) {
return instance;
}
if ("key".equals(fieldName)) {
return key;
}
if ("metadataResource".equals(fieldName)) {
return metadataResource;
}
if ("prettyPrint".equals(fieldName)) {
return prettyPrint;
}
if ("quotaUser".equals(fieldName)) {
return quotaUser;
}
if ("requestId".equals(fieldName)) {
return requestId;
}
if ("userIp".equals(fieldName)) {
return userIp;
}
return null;
}
@Nullable
@Override
public Metadata getApiMessageRequestBody() {
return metadataResource;
}
@Nullable
@Override
/**
* The fields that should be serialized (even if they have empty values). If the containing
* message object has a non-null fieldmask, then all the fields in the field mask (and only those
* fields in the field mask) will be serialized. If the containing object does not have a
* fieldmask, then only non-empty fields will be serialized.
*/
public List<String> getFieldMask() {
return null;
}
/** OAuth 2.0 token for the current user. */
public String getAccessToken() {
return access_token;
}
/** Name of the JavaScript callback function that handles the response. */
public String getCallback() {
return callback;
}
/** Selector specifying a subset of fields to include in the response. */
public String getFields() {
return fields;
}
/**
* Name of the instance scoping this request. It must have the format
* `{project}/zones/{zone}/instances/{instance}/setMetadata`. \`{instance}\` must start with a
* letter, and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), *
* underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent * signs
* (\`%\`). It must be between 3 and 255 characters in length, and it * must not start with
* \`"goog"\`.
*/
public String getInstance() {
return instance;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public String getKey() {
return key;
}
/** A metadata key/value entry. */
public Metadata getMetadataResource() {
return metadataResource;
}
/** Returns response with indentations and line breaks. */
public String getPrettyPrint() {
return prettyPrint;
}
/** Alternative to userIp. */
public String getQuotaUser() {
return quotaUser;
}
/**
* An optional request ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has already been
* completed.
*
* <p>For example, consider a situation where you make an initial request and the request times
* out. If you make the request again with the same request ID, the server can check if original
* operation with the same request ID was received, and if so, will ignore the second request.
* This prevents clients from accidentally creating duplicate commitments.
*
* <p>The request ID must be a valid UUID with the exception that zero UUID is not supported
* (00000000-0000-0000-0000-000000000000).
*/
public String getRequestId() {
return requestId;
}
/** IP address of the end user for whom the API call is being made. */
public String getUserIp() {
return userIp;
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(SetMetadataInstanceHttpRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
public static SetMetadataInstanceHttpRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final SetMetadataInstanceHttpRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new SetMetadataInstanceHttpRequest();
}
public static class Builder {
private String access_token;
private String callback;
private String fields;
private String instance;
private String key;
private Metadata metadataResource;
private String prettyPrint;
private String quotaUser;
private String requestId;
private String userIp;
Builder() {}
public Builder mergeFrom(SetMetadataInstanceHttpRequest other) {
if (other == SetMetadataInstanceHttpRequest.getDefaultInstance()) return this;
if (other.getAccessToken() != null) {
this.access_token = other.access_token;
}
if (other.getCallback() != null) {
this.callback = other.callback;
}
if (other.getFields() != null) {
this.fields = other.fields;
}
if (other.getInstance() != null) {
this.instance = other.instance;
}
if (other.getKey() != null) {
this.key = other.key;
}
if (other.getMetadataResource() != null) {
this.metadataResource = other.metadataResource;
}
if (other.getPrettyPrint() != null) {
this.prettyPrint = other.prettyPrint;
}
if (other.getQuotaUser() != null) {
this.quotaUser = other.quotaUser;
}
if (other.getRequestId() != null) {
this.requestId = other.requestId;
}
if (other.getUserIp() != null) {
this.userIp = other.userIp;
}
return this;
}
Builder(SetMetadataInstanceHttpRequest source) {
this.access_token = source.access_token;
this.callback = source.callback;
this.fields = source.fields;
this.instance = source.instance;
this.key = source.key;
this.metadataResource = source.metadataResource;
this.prettyPrint = source.prettyPrint;
this.quotaUser = source.quotaUser;
this.requestId = source.requestId;
this.userIp = source.userIp;
}
/** OAuth 2.0 token for the current user. */
public String getAccessToken() {
return access_token;
}
/** OAuth 2.0 token for the current user. */
public Builder setAccessToken(String access_token) {
this.access_token = access_token;
return this;
}
/** Name of the JavaScript callback function that handles the response. */
public String getCallback() {
return callback;
}
/** Name of the JavaScript callback function that handles the response. */
public Builder setCallback(String callback) {
this.callback = callback;
return this;
}
/** Selector specifying a subset of fields to include in the response. */
public String getFields() {
return fields;
}
/** Selector specifying a subset of fields to include in the response. */
public Builder setFields(String fields) {
this.fields = fields;
return this;
}
/**
* Name of the instance scoping this request. It must have the format
* `{project}/zones/{zone}/instances/{instance}/setMetadata`. \`{instance}\` must start with a
* letter, and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), *
* underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent * signs
* (\`%\`). It must be between 3 and 255 characters in length, and it * must not start with
* \`"goog"\`.
*/
public String getInstance() {
return instance;
}
/**
* Name of the instance scoping this request. It must have the format
* `{project}/zones/{zone}/instances/{instance}/setMetadata`. \`{instance}\` must start with a
* letter, and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), *
* underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent * signs
* (\`%\`). It must be between 3 and 255 characters in length, and it * must not start with
* \`"goog"\`.
*/
public Builder setInstance(String instance) {
this.instance = instance;
return this;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public String getKey() {
return key;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public Builder setKey(String key) {
this.key = key;
return this;
}
/** A metadata key/value entry. */
public Metadata getMetadataResource() {
return metadataResource;
}
/** A metadata key/value entry. */
public Builder setMetadataResource(Metadata metadataResource) {
this.metadataResource = metadataResource;
return this;
}
/** Returns response with indentations and line breaks. */
public String getPrettyPrint() {
return prettyPrint;
}
/** Returns response with indentations and line breaks. */
public Builder setPrettyPrint(String prettyPrint) {
this.prettyPrint = prettyPrint;
return this;
}
/** Alternative to userIp. */
public String getQuotaUser() {
return quotaUser;
}
/** Alternative to userIp. */
public Builder setQuotaUser(String quotaUser) {
this.quotaUser = quotaUser;
return this;
}
/**
* An optional request ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has already been
* completed.
*
* <p>For example, consider a situation where you make an initial request and the request times
* out. If you make the request again with the same request ID, the server can check if original
* operation with the same request ID was received, and if so, will ignore the second request.
* This prevents clients from accidentally creating duplicate commitments.
*
* <p>The request ID must be a valid UUID with the exception that zero UUID is not supported
* (00000000-0000-0000-0000-000000000000).
*/
public String getRequestId() {
return requestId;
}
/**
* An optional request ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has already been
* completed.
*
* <p>For example, consider a situation where you make an initial request and the request times
* out. If you make the request again with the same request ID, the server can check if original
* operation with the same request ID was received, and if so, will ignore the second request.
* This prevents clients from accidentally creating duplicate commitments.
*
* <p>The request ID must be a valid UUID with the exception that zero UUID is not supported
* (00000000-0000-0000-0000-000000000000).
*/
public Builder setRequestId(String requestId) {
this.requestId = requestId;
return this;
}
/** IP address of the end user for whom the API call is being made. */
public String getUserIp() {
return userIp;
}
/** IP address of the end user for whom the API call is being made. */
public Builder setUserIp(String userIp) {
this.userIp = userIp;
return this;
}
public SetMetadataInstanceHttpRequest build() {
String missing = "";
if (instance == null) {
missing += " instance";
}
if (!missing.isEmpty()) {
throw new IllegalStateException("Missing required properties:" + missing);
}
return new SetMetadataInstanceHttpRequest(
access_token,
callback,
fields,
instance,
key,
metadataResource,
prettyPrint,
quotaUser,
requestId,
userIp);
}
public Builder clone() {
Builder newBuilder = new Builder();
newBuilder.setAccessToken(this.access_token);
newBuilder.setCallback(this.callback);
newBuilder.setFields(this.fields);
newBuilder.setInstance(this.instance);
newBuilder.setKey(this.key);
newBuilder.setMetadataResource(this.metadataResource);
newBuilder.setPrettyPrint(this.prettyPrint);
newBuilder.setQuotaUser(this.quotaUser);
newBuilder.setRequestId(this.requestId);
newBuilder.setUserIp(this.userIp);
return newBuilder;
}
}
@Override
public String toString() {
return "SetMetadataInstanceHttpRequest{"
+ "access_token="
+ access_token
+ ", "
+ "callback="
+ callback
+ ", "
+ "fields="
+ fields
+ ", "
+ "instance="
+ instance
+ ", "
+ "key="
+ key
+ ", "
+ "metadataResource="
+ metadataResource
+ ", "
+ "prettyPrint="
+ prettyPrint
+ ", "
+ "quotaUser="
+ quotaUser
+ ", "
+ "requestId="
+ requestId
+ ", "
+ "userIp="
+ userIp
+ "}";
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof SetMetadataInstanceHttpRequest) {
SetMetadataInstanceHttpRequest that = (SetMetadataInstanceHttpRequest) o;
return Objects.equals(this.access_token, that.getAccessToken())
&& Objects.equals(this.callback, that.getCallback())
&& Objects.equals(this.fields, that.getFields())
&& Objects.equals(this.instance, that.getInstance())
&& Objects.equals(this.key, that.getKey())
&& Objects.equals(this.metadataResource, that.getMetadataResource())
&& Objects.equals(this.prettyPrint, that.getPrettyPrint())
&& Objects.equals(this.quotaUser, that.getQuotaUser())
&& Objects.equals(this.requestId, that.getRequestId())
&& Objects.equals(this.userIp, that.getUserIp());
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(
access_token,
callback,
fields,
instance,
key,
metadataResource,
prettyPrint,
quotaUser,
requestId,
userIp);
}
}
| |
package com.nosester.portlet.eventlisting.model;
import com.liferay.portal.kernel.bean.AutoEscapeBeanHandler;
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.kernel.util.ProxyUtil;
import com.liferay.portal.kernel.util.StringBundler;
import com.liferay.portal.model.BaseModel;
import com.liferay.portal.model.impl.BaseModelImpl;
import com.liferay.portal.util.PortalUtil;
import com.nosester.portlet.eventlisting.service.ClpSerializer;
import com.nosester.portlet.eventlisting.service.LocationLocalServiceUtil;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
public class LocationClp extends BaseModelImpl<Location> implements Location {
private long _locationId;
private String _name;
private String _description;
private String _streetAddress;
private String _city;
private String _stateOrProvince;
private String _country;
private long _companyId;
private long _groupId;
private long _userId;
private String _userUuid;
private Date _createDate;
private Date _modifiedDate;
private BaseModel<?> _locationRemoteModel;
public LocationClp() {
}
@Override
public Class<?> getModelClass() {
return Location.class;
}
@Override
public String getModelClassName() {
return Location.class.getName();
}
@Override
public long getPrimaryKey() {
return _locationId;
}
@Override
public void setPrimaryKey(long primaryKey) {
setLocationId(primaryKey);
}
@Override
public Serializable getPrimaryKeyObj() {
return _locationId;
}
@Override
public void setPrimaryKeyObj(Serializable primaryKeyObj) {
setPrimaryKey(((Long) primaryKeyObj).longValue());
}
@Override
public Map<String, Object> getModelAttributes() {
Map<String, Object> attributes = new HashMap<String, Object>();
attributes.put("locationId", getLocationId());
attributes.put("name", getName());
attributes.put("description", getDescription());
attributes.put("streetAddress", getStreetAddress());
attributes.put("city", getCity());
attributes.put("stateOrProvince", getStateOrProvince());
attributes.put("country", getCountry());
attributes.put("companyId", getCompanyId());
attributes.put("groupId", getGroupId());
attributes.put("userId", getUserId());
attributes.put("createDate", getCreateDate());
attributes.put("modifiedDate", getModifiedDate());
return attributes;
}
@Override
public void setModelAttributes(Map<String, Object> attributes) {
Long locationId = (Long) attributes.get("locationId");
if (locationId != null) {
setLocationId(locationId);
}
String name = (String) attributes.get("name");
if (name != null) {
setName(name);
}
String description = (String) attributes.get("description");
if (description != null) {
setDescription(description);
}
String streetAddress = (String) attributes.get("streetAddress");
if (streetAddress != null) {
setStreetAddress(streetAddress);
}
String city = (String) attributes.get("city");
if (city != null) {
setCity(city);
}
String stateOrProvince = (String) attributes.get("stateOrProvince");
if (stateOrProvince != null) {
setStateOrProvince(stateOrProvince);
}
String country = (String) attributes.get("country");
if (country != null) {
setCountry(country);
}
Long companyId = (Long) attributes.get("companyId");
if (companyId != null) {
setCompanyId(companyId);
}
Long groupId = (Long) attributes.get("groupId");
if (groupId != null) {
setGroupId(groupId);
}
Long userId = (Long) attributes.get("userId");
if (userId != null) {
setUserId(userId);
}
Date createDate = (Date) attributes.get("createDate");
if (createDate != null) {
setCreateDate(createDate);
}
Date modifiedDate = (Date) attributes.get("modifiedDate");
if (modifiedDate != null) {
setModifiedDate(modifiedDate);
}
}
@Override
public long getLocationId() {
return _locationId;
}
@Override
public void setLocationId(long locationId) {
_locationId = locationId;
if (_locationRemoteModel != null) {
try {
Class<?> clazz = _locationRemoteModel.getClass();
Method method = clazz.getMethod("setLocationId", long.class);
method.invoke(_locationRemoteModel, locationId);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getName() {
return _name;
}
@Override
public void setName(String name) {
_name = name;
if (_locationRemoteModel != null) {
try {
Class<?> clazz = _locationRemoteModel.getClass();
Method method = clazz.getMethod("setName", String.class);
method.invoke(_locationRemoteModel, name);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getDescription() {
return _description;
}
@Override
public void setDescription(String description) {
_description = description;
if (_locationRemoteModel != null) {
try {
Class<?> clazz = _locationRemoteModel.getClass();
Method method = clazz.getMethod("setDescription", String.class);
method.invoke(_locationRemoteModel, description);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getStreetAddress() {
return _streetAddress;
}
@Override
public void setStreetAddress(String streetAddress) {
_streetAddress = streetAddress;
if (_locationRemoteModel != null) {
try {
Class<?> clazz = _locationRemoteModel.getClass();
Method method = clazz.getMethod("setStreetAddress", String.class);
method.invoke(_locationRemoteModel, streetAddress);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getCity() {
return _city;
}
@Override
public void setCity(String city) {
_city = city;
if (_locationRemoteModel != null) {
try {
Class<?> clazz = _locationRemoteModel.getClass();
Method method = clazz.getMethod("setCity", String.class);
method.invoke(_locationRemoteModel, city);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getStateOrProvince() {
return _stateOrProvince;
}
@Override
public void setStateOrProvince(String stateOrProvince) {
_stateOrProvince = stateOrProvince;
if (_locationRemoteModel != null) {
try {
Class<?> clazz = _locationRemoteModel.getClass();
Method method = clazz.getMethod("setStateOrProvince",
String.class);
method.invoke(_locationRemoteModel, stateOrProvince);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getCountry() {
return _country;
}
@Override
public void setCountry(String country) {
_country = country;
if (_locationRemoteModel != null) {
try {
Class<?> clazz = _locationRemoteModel.getClass();
Method method = clazz.getMethod("setCountry", String.class);
method.invoke(_locationRemoteModel, country);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getCompanyId() {
return _companyId;
}
@Override
public void setCompanyId(long companyId) {
_companyId = companyId;
if (_locationRemoteModel != null) {
try {
Class<?> clazz = _locationRemoteModel.getClass();
Method method = clazz.getMethod("setCompanyId", long.class);
method.invoke(_locationRemoteModel, companyId);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getGroupId() {
return _groupId;
}
@Override
public void setGroupId(long groupId) {
_groupId = groupId;
if (_locationRemoteModel != null) {
try {
Class<?> clazz = _locationRemoteModel.getClass();
Method method = clazz.getMethod("setGroupId", long.class);
method.invoke(_locationRemoteModel, groupId);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getUserId() {
return _userId;
}
@Override
public void setUserId(long userId) {
_userId = userId;
if (_locationRemoteModel != null) {
try {
Class<?> clazz = _locationRemoteModel.getClass();
Method method = clazz.getMethod("setUserId", long.class);
method.invoke(_locationRemoteModel, userId);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getUserUuid() throws SystemException {
return PortalUtil.getUserValue(getUserId(), "uuid", _userUuid);
}
@Override
public void setUserUuid(String userUuid) {
_userUuid = userUuid;
}
@Override
public Date getCreateDate() {
return _createDate;
}
@Override
public void setCreateDate(Date createDate) {
_createDate = createDate;
if (_locationRemoteModel != null) {
try {
Class<?> clazz = _locationRemoteModel.getClass();
Method method = clazz.getMethod("setCreateDate", Date.class);
method.invoke(_locationRemoteModel, createDate);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public Date getModifiedDate() {
return _modifiedDate;
}
@Override
public void setModifiedDate(Date modifiedDate) {
_modifiedDate = modifiedDate;
if (_locationRemoteModel != null) {
try {
Class<?> clazz = _locationRemoteModel.getClass();
Method method = clazz.getMethod("setModifiedDate", Date.class);
method.invoke(_locationRemoteModel, modifiedDate);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
public BaseModel<?> getLocationRemoteModel() {
return _locationRemoteModel;
}
public void setLocationRemoteModel(BaseModel<?> locationRemoteModel) {
_locationRemoteModel = locationRemoteModel;
}
public Object invokeOnRemoteModel(String methodName,
Class<?>[] parameterTypes, Object[] parameterValues)
throws Exception {
Object[] remoteParameterValues = new Object[parameterValues.length];
for (int i = 0; i < parameterValues.length; i++) {
if (parameterValues[i] != null) {
remoteParameterValues[i] = ClpSerializer.translateInput(parameterValues[i]);
}
}
Class<?> remoteModelClass = _locationRemoteModel.getClass();
ClassLoader remoteModelClassLoader = remoteModelClass.getClassLoader();
Class<?>[] remoteParameterTypes = new Class[parameterTypes.length];
for (int i = 0; i < parameterTypes.length; i++) {
if (parameterTypes[i].isPrimitive()) {
remoteParameterTypes[i] = parameterTypes[i];
} else {
String parameterTypeName = parameterTypes[i].getName();
remoteParameterTypes[i] = remoteModelClassLoader.loadClass(parameterTypeName);
}
}
Method method = remoteModelClass.getMethod(methodName,
remoteParameterTypes);
Object returnValue = method.invoke(_locationRemoteModel,
remoteParameterValues);
if (returnValue != null) {
returnValue = ClpSerializer.translateOutput(returnValue);
}
return returnValue;
}
@Override
public void persist() throws SystemException {
if (this.isNew()) {
LocationLocalServiceUtil.addLocation(this);
} else {
LocationLocalServiceUtil.updateLocation(this);
}
}
@Override
public Location toEscapedModel() {
return (Location) ProxyUtil.newProxyInstance(Location.class.getClassLoader(),
new Class[] { Location.class }, new AutoEscapeBeanHandler(this));
}
@Override
public Object clone() {
LocationClp clone = new LocationClp();
clone.setLocationId(getLocationId());
clone.setName(getName());
clone.setDescription(getDescription());
clone.setStreetAddress(getStreetAddress());
clone.setCity(getCity());
clone.setStateOrProvince(getStateOrProvince());
clone.setCountry(getCountry());
clone.setCompanyId(getCompanyId());
clone.setGroupId(getGroupId());
clone.setUserId(getUserId());
clone.setCreateDate(getCreateDate());
clone.setModifiedDate(getModifiedDate());
return clone;
}
@Override
public int compareTo(Location location) {
int value = 0;
value = getName().compareTo(location.getName());
if (value != 0) {
return value;
}
return 0;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof LocationClp)) {
return false;
}
LocationClp location = (LocationClp) obj;
long primaryKey = location.getPrimaryKey();
if (getPrimaryKey() == primaryKey) {
return true;
} else {
return false;
}
}
@Override
public int hashCode() {
return (int) getPrimaryKey();
}
@Override
public String toString() {
StringBundler sb = new StringBundler(25);
sb.append("{locationId=");
sb.append(getLocationId());
sb.append(", name=");
sb.append(getName());
sb.append(", description=");
sb.append(getDescription());
sb.append(", streetAddress=");
sb.append(getStreetAddress());
sb.append(", city=");
sb.append(getCity());
sb.append(", stateOrProvince=");
sb.append(getStateOrProvince());
sb.append(", country=");
sb.append(getCountry());
sb.append(", companyId=");
sb.append(getCompanyId());
sb.append(", groupId=");
sb.append(getGroupId());
sb.append(", userId=");
sb.append(getUserId());
sb.append(", createDate=");
sb.append(getCreateDate());
sb.append(", modifiedDate=");
sb.append(getModifiedDate());
sb.append("}");
return sb.toString();
}
@Override
public String toXmlString() {
StringBundler sb = new StringBundler(40);
sb.append("<model><model-name>");
sb.append("com.nosester.portlet.eventlisting.model.Location");
sb.append("</model-name>");
sb.append(
"<column><column-name>locationId</column-name><column-value><![CDATA[");
sb.append(getLocationId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>name</column-name><column-value><![CDATA[");
sb.append(getName());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>description</column-name><column-value><![CDATA[");
sb.append(getDescription());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>streetAddress</column-name><column-value><![CDATA[");
sb.append(getStreetAddress());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>city</column-name><column-value><![CDATA[");
sb.append(getCity());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>stateOrProvince</column-name><column-value><![CDATA[");
sb.append(getStateOrProvince());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>country</column-name><column-value><![CDATA[");
sb.append(getCountry());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>companyId</column-name><column-value><![CDATA[");
sb.append(getCompanyId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>groupId</column-name><column-value><![CDATA[");
sb.append(getGroupId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>userId</column-name><column-value><![CDATA[");
sb.append(getUserId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>createDate</column-name><column-value><![CDATA[");
sb.append(getCreateDate());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>modifiedDate</column-name><column-value><![CDATA[");
sb.append(getModifiedDate());
sb.append("]]></column-value></column>");
sb.append("</model>");
return sb.toString();
}
}
| |
/*******************************************************************************
*
* Copyright FUJITSU LIMITED 2017
*
* Creation Date: May 27, 2013
*
*******************************************************************************/
package org.oscm.billingservice.business.calculation.revenue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.LinkedList;
import org.junit.Before;
import org.junit.Test;
import org.oscm.billingservice.business.calculation.revenue.model.TimeSlice;
import org.oscm.billingservice.business.calculation.revenue.model.UserAssignment;
import org.oscm.billingservice.dao.model.XParameterData;
import org.oscm.billingservice.dao.model.XParameterIdData;
import org.oscm.billingservice.dao.model.XParameterPeriodPrimitiveType;
import org.oscm.billingservice.dao.model.XParameterPeriodValue;
import org.oscm.test.DateTimeHandling;
import org.oscm.internal.types.enumtypes.ParameterType;
import org.oscm.internal.types.enumtypes.ParameterValueType;
import org.oscm.internal.types.enumtypes.PricingPeriod;
/**
* @author tokoda
*
*/
public class CostCalculatorPerUnitParameterUserFactorTest {
private static final long PERIOD_VALUE_KEY1 = 10000;
private static final long PERIOD_VALUE_KEY2 = 10001;
private CostCalculatorPerUnit calculator;
private TimeSlice timeSlice;
private XParameterIdData idData;
private LinkedList<XParameterPeriodValue> periodValues;
private ArrayList<UserAssignment> userAssignments;
@Before
public void setup() {
calculator = new CostCalculatorPerUnit();
timeSlice = new TimeSlice(
DateTimeHandling.calculateMillis("2013-04-01 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-08 00:00:00") - 1,
PricingPeriod.WEEK);
XParameterData pData = new XParameterData();
idData = pData.getIdDataInstance("ID",
ParameterType.PLATFORM_PARAMETER, ParameterValueType.BOOLEAN);
periodValues = new LinkedList<XParameterPeriodValue>();
userAssignments = new ArrayList<UserAssignment>();
}
@Test
public void computeParameterUserFactorForOneTimeSliceAndOneUser_EmptyInputs() {
// given
// when
calculator.computeParameterUserFactorForOneTimeSliceAndOneUser(
timeSlice, periodValues, userAssignments);
// then
assertTrue(periodValues.isEmpty());
}
@Test
public void computeParameterUserFactorForOneTimeSliceAndOneUser_EmptyUserAssignments() {
// given
addParameterPeriodValue(PERIOD_VALUE_KEY1, "A",
DateTimeHandling.calculateMillis("2013-04-01 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-08 00:00:00"));
// when
calculator.computeParameterUserFactorForOneTimeSliceAndOneUser(
timeSlice, periodValues, userAssignments);
// then
assertEquals(1, periodValues.size());
assertEquals(PERIOD_VALUE_KEY1, periodValues.get(0).getKey()
.longValue());
assertEquals(0, periodValues.get(0).getUserAssignmentFactor(), 0.000001);
assertNull(periodValues.get(0).getRolePrices());
}
@Test
public void computeParameterUserFactorForOneTimeSliceAndOneUser_UserExt_ParNot() {
// given
addParameterPeriodValue(PERIOD_VALUE_KEY1, "A",
DateTimeHandling.calculateMillis("2013-03-31 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-01 00:00:00"));
addUserAssignment(20000,
DateTimeHandling.calculateMillis("2013-04-01 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-08 00:00:00"));
// when
calculator.computeParameterUserFactorForOneTimeSliceAndOneUser(
timeSlice, periodValues, userAssignments);
// then
assertEquals(1, periodValues.size());
assertEquals(PERIOD_VALUE_KEY1, periodValues.get(0).getKey()
.longValue());
assertEquals(1, periodValues.get(0).getUserAssignmentFactor(), 0.000001);
assertNull(periodValues.get(0).getRolePrices());
}
@Test
public void computeParameterUserFactorForOneTimeSliceAndOneUser_OneUserAssignmentForOnePeriodValue() {
// given
addParameterPeriodValue(PERIOD_VALUE_KEY1, "A",
DateTimeHandling.calculateMillis("2013-04-01 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-08 00:00:00"));
addUserAssignment(20000,
DateTimeHandling.calculateMillis("2013-04-01 01:00:00"),
DateTimeHandling.calculateMillis("2013-04-01 02:00:00"));
// when
calculator.computeParameterUserFactorForOneTimeSliceAndOneUser(
timeSlice, periodValues, userAssignments);
// then
assertEquals(1, periodValues.size());
assertEquals(PERIOD_VALUE_KEY1, periodValues.get(0).getKey()
.longValue());
assertEquals(1, periodValues.get(0).getUserAssignmentFactor(), 0.000001);
}
@Test
public void computeParameterUserFactorForOneTimeSliceAndOneUser_TwoUserAssignmentsForOnePeriodValue() {
// given
addParameterPeriodValue(PERIOD_VALUE_KEY1, "A",
DateTimeHandling.calculateMillis("2013-04-01 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-08 00:00:00"));
addUserAssignment(20000,
DateTimeHandling.calculateMillis("2013-04-02 01:00:00"),
DateTimeHandling.calculateMillis("2013-04-02 02:00:00"));
addUserAssignment(20000,
DateTimeHandling.calculateMillis("2013-04-01 01:00:00"),
DateTimeHandling.calculateMillis("2013-04-01 02:00:00"));
// when
calculator.computeParameterUserFactorForOneTimeSliceAndOneUser(
timeSlice, periodValues, userAssignments);
// then
assertEquals(1, periodValues.size());
assertEquals(PERIOD_VALUE_KEY1, periodValues.get(0).getKey()
.longValue());
assertEquals(1, periodValues.get(0).getUserAssignmentFactor(), 0.000001);
}
@Test
public void computeParameterUserFactorForOneTimeSliceAndOneUser_OneUserAssignmentForTwoPeriodValues() {
// given
addParameterPeriodValue(PERIOD_VALUE_KEY2, "B",
DateTimeHandling.calculateMillis("2013-04-04 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-08 00:00:00"));
addParameterPeriodValue(PERIOD_VALUE_KEY1, "A",
DateTimeHandling.calculateMillis("2013-04-01 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-04 00:00:00"));
addUserAssignment(20000,
DateTimeHandling.calculateMillis("2013-04-02 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-07 00:00:00"));
// when
calculator.computeParameterUserFactorForOneTimeSliceAndOneUser(
timeSlice, periodValues, userAssignments);
// then
assertEquals(2, periodValues.size());
assertEquals(PERIOD_VALUE_KEY2, periodValues.get(0).getKey()
.longValue());
assertEquals(0.57142857, periodValues.get(0).getUserAssignmentFactor(),
0.000001);
assertEquals(PERIOD_VALUE_KEY1, periodValues.get(1).getKey()
.longValue());
assertEquals(0.42857142, periodValues.get(1).getUserAssignmentFactor(),
0.000001);
}
@Test
public void computeParameterUserFactorForOneTimeSliceAndOneUser_FirstPeriodValueHasNoUserAssignment() {
// given
addParameterPeriodValue(PERIOD_VALUE_KEY2, "B",
DateTimeHandling.calculateMillis("2013-04-04 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-08 00:00:00"));
addParameterPeriodValue(PERIOD_VALUE_KEY1, "A",
DateTimeHandling.calculateMillis("2013-04-01 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-04 00:00:00"));
addUserAssignment(20000,
DateTimeHandling.calculateMillis("2013-04-05 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-06 00:00:00"));
// when
calculator.computeParameterUserFactorForOneTimeSliceAndOneUser(
timeSlice, periodValues, userAssignments);
// then
assertEquals(2, periodValues.size());
assertEquals(PERIOD_VALUE_KEY2, periodValues.get(0).getKey()
.longValue());
assertEquals(1, periodValues.get(0).getUserAssignmentFactor(), 0.000001);
assertEquals(PERIOD_VALUE_KEY1, periodValues.get(1).getKey()
.longValue());
assertEquals(0, periodValues.get(1).getUserAssignmentFactor(), 0.000001);
}
@Test
public void computeParameterUserFactorForOneTimeSliceAndOneUser_SecondPeriodValueHasNoUserAssignment() {
// given
addParameterPeriodValue(10001, "B",
DateTimeHandling.calculateMillis("2013-04-04 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-08 00:00:00"));
addParameterPeriodValue(PERIOD_VALUE_KEY1, "A",
DateTimeHandling.calculateMillis("2013-04-01 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-04 00:00:00"));
addUserAssignment(20000,
DateTimeHandling.calculateMillis("2013-04-02 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-03 00:00:00"));
// when
calculator.computeParameterUserFactorForOneTimeSliceAndOneUser(
timeSlice, periodValues, userAssignments);
// then
assertEquals(2, periodValues.size());
assertEquals(10001, periodValues.get(0).getKey().longValue());
assertEquals(0, periodValues.get(0).getUserAssignmentFactor(), 0.000001);
assertEquals(PERIOD_VALUE_KEY1, periodValues.get(1).getKey()
.longValue());
assertEquals(1, periodValues.get(1).getUserAssignmentFactor(), 0.000001);
}
@Test
public void computeParameterUserFactorForOneTimeSliceAndOneUser_TwoUserAssignmentsForTwoPeriodValues() {
// given
addParameterPeriodValue(PERIOD_VALUE_KEY2, "B",
DateTimeHandling.calculateMillis("2013-04-04 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-08 00:00:00"));
addParameterPeriodValue(PERIOD_VALUE_KEY1, "A",
DateTimeHandling.calculateMillis("2013-04-01 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-04 00:00:00"));
addUserAssignment(20000,
DateTimeHandling.calculateMillis("2013-04-06 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-07 00:00:00"));
addUserAssignment(20000,
DateTimeHandling.calculateMillis("2013-04-02 00:00:00"),
DateTimeHandling.calculateMillis("2013-04-03 00:00:00"));
// when
calculator.computeParameterUserFactorForOneTimeSliceAndOneUser(
timeSlice, periodValues, userAssignments);
// then
assertEquals(2, periodValues.size());
// From 2013-04-06 00:00:00 to 2013-04-08 00:00:00
assertEquals(PERIOD_VALUE_KEY2, periodValues.get(0).getKey()
.longValue());
assertEquals(0.28571428, periodValues.get(0).getUserAssignmentFactor(),
0.000001);
// From 2013-04-01 00:00:00 to 2013-04-06 00:00:00
assertEquals(PERIOD_VALUE_KEY1, periodValues.get(1).getKey()
.longValue());
assertEquals(0.71428571, periodValues.get(1).getUserAssignmentFactor(),
0.000001);
}
private void addParameterPeriodValue(long key, String value,
long startTime, long endTime) {
XParameterPeriodPrimitiveType periodValue = new XParameterPeriodPrimitiveType(
idData, null, null);
periodValue.setKey(Long.valueOf(key));
periodValue.setValue(value);
periodValue.setStartTime(startTime);
periodValue.setEndTime(endTime);
periodValues.add(periodValue);
}
private void addUserAssignment(long key, long startTime, long endTime) {
UserAssignment userAssignment = new UserAssignment();
userAssignment.setUserKey(key);
userAssignment.setUsageStartTime(startTime);
userAssignment.setUsageEndTime(endTime);
userAssignments.add(userAssignment);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal;
import java.io.Externalizable;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.ObjectStreamException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.managers.checkpoint.GridCheckpointManager;
import org.apache.ignite.internal.managers.collision.GridCollisionManager;
import org.apache.ignite.internal.managers.communication.GridIoManager;
import org.apache.ignite.internal.managers.deployment.GridDeploymentManager;
import org.apache.ignite.internal.managers.discovery.GridDiscoveryManager;
import org.apache.ignite.internal.managers.eventstorage.GridEventStorageManager;
import org.apache.ignite.internal.managers.failover.GridFailoverManager;
import org.apache.ignite.internal.managers.indexing.GridIndexingManager;
import org.apache.ignite.internal.managers.loadbalancer.GridLoadBalancerManager;
import org.apache.ignite.internal.managers.swapspace.GridSwapSpaceManager;
import org.apache.ignite.internal.processors.affinity.GridAffinityProcessor;
import org.apache.ignite.internal.processors.cache.CacheConflictResolutionManager;
import org.apache.ignite.internal.processors.cache.GridCacheProcessor;
import org.apache.ignite.internal.processors.cache.binary.CacheObjectBinaryProcessorImpl;
import org.apache.ignite.internal.processors.cacheobject.IgniteCacheObjectProcessor;
import org.apache.ignite.internal.processors.clock.GridClockSource;
import org.apache.ignite.internal.processors.clock.GridClockSyncProcessor;
import org.apache.ignite.internal.processors.clock.GridJvmClockSource;
import org.apache.ignite.internal.processors.closure.GridClosureProcessor;
import org.apache.ignite.internal.processors.cluster.ClusterProcessor;
import org.apache.ignite.internal.processors.continuous.GridContinuousProcessor;
import org.apache.ignite.internal.processors.datastreamer.DataStreamProcessor;
import org.apache.ignite.internal.processors.datastructures.DataStructuresProcessor;
import org.apache.ignite.internal.processors.hadoop.HadoopProcessorAdapter;
import org.apache.ignite.internal.processors.igfs.IgfsHelper;
import org.apache.ignite.internal.processors.igfs.IgfsProcessorAdapter;
import org.apache.ignite.internal.processors.job.GridJobProcessor;
import org.apache.ignite.internal.processors.jobmetrics.GridJobMetricsProcessor;
import org.apache.ignite.internal.processors.nodevalidation.DiscoveryNodeValidationProcessor;
import org.apache.ignite.internal.processors.odbc.OdbcProcessor;
import org.apache.ignite.internal.processors.offheap.GridOffHeapProcessor;
import org.apache.ignite.internal.processors.platform.PlatformProcessor;
import org.apache.ignite.internal.processors.plugin.IgnitePluginProcessor;
import org.apache.ignite.internal.processors.port.GridPortProcessor;
import org.apache.ignite.internal.processors.query.GridQueryProcessor;
import org.apache.ignite.internal.processors.resource.GridResourceProcessor;
import org.apache.ignite.internal.processors.rest.GridRestProcessor;
import org.apache.ignite.internal.processors.schedule.IgniteScheduleProcessorAdapter;
import org.apache.ignite.internal.processors.security.GridSecurityProcessor;
import org.apache.ignite.internal.processors.segmentation.GridSegmentationProcessor;
import org.apache.ignite.internal.processors.service.GridServiceProcessor;
import org.apache.ignite.internal.processors.session.GridTaskSessionProcessor;
import org.apache.ignite.internal.processors.task.GridTaskProcessor;
import org.apache.ignite.internal.processors.timeout.GridTimeoutProcessor;
import org.apache.ignite.internal.util.IgniteExceptionRegistry;
import org.apache.ignite.internal.util.spring.IgniteSpringHelper;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.plugin.PluginNotFoundException;
import org.apache.ignite.plugin.PluginProvider;
import org.apache.ignite.thread.IgniteStripedThreadPoolExecutor;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.IgniteSystemProperties.IGNITE_DAEMON;
import static org.apache.ignite.internal.IgniteComponentType.SPRING;
/**
* Implementation of kernal context.
*/
@GridToStringExclude
public class GridKernalContextImpl implements GridKernalContext, Externalizable {
/** */
private static final long serialVersionUID = 0L;
/** */
private static final ThreadLocal<String> stash = new ThreadLocal<>();
/*
* Managers.
* ========
*/
/** */
@GridToStringExclude
private GridDeploymentManager depMgr;
/** */
@GridToStringExclude
private GridIoManager ioMgr;
/** */
@GridToStringExclude
private GridDiscoveryManager discoMgr;
/** */
@GridToStringExclude
private GridCheckpointManager cpMgr;
/** */
@GridToStringExclude
private GridEventStorageManager evtMgr;
/** */
@GridToStringExclude
private GridFailoverManager failoverMgr;
/** */
@GridToStringExclude
private GridCollisionManager colMgr;
/** */
@GridToStringExclude
private GridLoadBalancerManager loadMgr;
/** */
@GridToStringExclude
private GridSecurityProcessor authProc;
/** */
@GridToStringExclude
private GridSwapSpaceManager swapspaceMgr;
/** */
@GridToStringExclude
private GridIndexingManager indexingMgr;
/*
* Processors.
* ==========
*/
/** */
@GridToStringInclude
private OdbcProcessor odbcProc;
/** */
@GridToStringInclude
private GridQueryProcessor qryProc;
/** */
@GridToStringInclude
private GridTaskProcessor taskProc;
/** */
@GridToStringInclude
private GridJobProcessor jobProc;
/** */
@GridToStringInclude
private GridTimeoutProcessor timeProc;
/** */
@GridToStringInclude
private GridClockSyncProcessor clockSyncProc;
/** */
@GridToStringInclude
private GridResourceProcessor rsrcProc;
/** */
@GridToStringInclude
private GridJobMetricsProcessor metricsProc;
/** */
@GridToStringInclude
private GridClosureProcessor closProc;
/** */
@GridToStringInclude
private GridServiceProcessor svcProc;
/** */
@GridToStringInclude
private GridCacheProcessor cacheProc;
/** */
@GridToStringInclude
private GridTaskSessionProcessor sesProc;
/** */
@GridToStringInclude
private GridPortProcessor portProc;
/** */
@GridToStringInclude
private GridOffHeapProcessor offheapProc;
/** */
@GridToStringInclude
private IgniteScheduleProcessorAdapter scheduleProc;
/** */
@GridToStringInclude
private GridRestProcessor restProc;
/** */
@GridToStringInclude
private DataStreamProcessor dataLdrProc;
/** */
@GridToStringInclude
private IgfsProcessorAdapter igfsProc;
/** */
@GridToStringInclude
private IgfsHelper igfsHelper;
/** */
@GridToStringInclude
private GridSegmentationProcessor segProc;
/** */
@GridToStringInclude
private GridAffinityProcessor affProc;
/** */
@GridToStringExclude
private GridContinuousProcessor contProc;
/** */
@GridToStringExclude
private HadoopProcessorAdapter hadoopProc;
/** */
@GridToStringExclude
private IgnitePluginProcessor pluginProc;
/** */
@GridToStringExclude
private IgniteCacheObjectProcessor cacheObjProc;
/** */
@GridToStringExclude
private PlatformProcessor platformProc;
/** */
@GridToStringExclude
private IgniteSpringHelper spring;
/** */
@GridToStringExclude
private ClusterProcessor cluster;
/** */
@GridToStringExclude
private DataStructuresProcessor dataStructuresProc;
/** */
@GridToStringExclude
private List<GridComponent> comps = new LinkedList<>();
/** */
@GridToStringExclude
protected ExecutorService execSvc;
/** */
@GridToStringExclude
protected ExecutorService sysExecSvc;
/** */
@GridToStringExclude
private ExecutorService p2pExecSvc;
/** */
@GridToStringExclude
private ExecutorService mgmtExecSvc;
/** */
@GridToStringExclude
private ExecutorService igfsExecSvc;
/** */
@GridToStringExclude
protected ExecutorService restExecSvc;
/** */
@GridToStringExclude
protected IgniteStripedThreadPoolExecutor callbackExecSvc;
/** */
@GridToStringExclude
private Map<String, Object> attrs = new HashMap<>();
/** */
private IgniteEx grid;
/** */
private ExecutorService utilityCachePool;
/** */
private ExecutorService marshCachePool;
/** */
private IgniteConfiguration cfg;
/** */
private GridKernalGateway gw;
/** Network segmented flag. */
private volatile boolean segFlag;
/** Time source. */
private GridClockSource clockSrc = new GridJvmClockSource();
/** Performance suggestions. */
private final GridPerformanceSuggestions perf = new GridPerformanceSuggestions();
/** Marshaller context. */
private MarshallerContextImpl marshCtx;
/** */
private ClusterNode locNode;
/** */
private volatile boolean disconnected;
/**
* No-arg constructor is required by externalization.
*/
public GridKernalContextImpl() {
// No-op.
}
/**
* Creates new kernal context.
*
* @param log Logger.
* @param grid Grid instance managed by kernal.
* @param cfg Grid configuration.
* @param gw Kernal gateway.
* @param utilityCachePool Utility cache pool.
* @param marshCachePool Marshaller cache pool.
* @param execSvc Public executor service.
* @param sysExecSvc System executor service.
* @param p2pExecSvc P2P executor service.
* @param mgmtExecSvc Management executor service.
* @param igfsExecSvc IGFS executor service.
* @param restExecSvc REST executor service.
* @param plugins Plugin providers.
* @throws IgniteCheckedException In case of error.
*/
@SuppressWarnings("TypeMayBeWeakened")
protected GridKernalContextImpl(
GridLoggerProxy log,
IgniteEx grid,
IgniteConfiguration cfg,
GridKernalGateway gw,
ExecutorService utilityCachePool,
ExecutorService marshCachePool,
ExecutorService execSvc,
ExecutorService sysExecSvc,
ExecutorService p2pExecSvc,
ExecutorService mgmtExecSvc,
ExecutorService igfsExecSvc,
ExecutorService restExecSvc,
IgniteStripedThreadPoolExecutor callbackExecSvc,
List<PluginProvider> plugins) throws IgniteCheckedException {
assert grid != null;
assert cfg != null;
assert gw != null;
this.grid = grid;
this.cfg = cfg;
this.gw = gw;
this.utilityCachePool = utilityCachePool;
this.marshCachePool = marshCachePool;
this.execSvc = execSvc;
this.sysExecSvc = sysExecSvc;
this.p2pExecSvc = p2pExecSvc;
this.mgmtExecSvc = mgmtExecSvc;
this.igfsExecSvc = igfsExecSvc;
this.restExecSvc = restExecSvc;
this.callbackExecSvc = callbackExecSvc;
marshCtx = new MarshallerContextImpl(plugins);
try {
spring = SPRING.create(false);
}
catch (IgniteCheckedException ignored) {
if (log != null && log.isDebugEnabled())
log.debug("Failed to load spring component, will not be able to extract userVersion from " +
"META-INF/ignite.xml.");
}
}
/** {@inheritDoc} */
@Override public Iterator<GridComponent> iterator() {
return comps.iterator();
}
/** {@inheritDoc} */
@Override public List<GridComponent> components() {
return Collections.unmodifiableList(comps);
}
/**
* @param comp Manager to add.
*/
public void add(GridComponent comp) {
add(comp, true);
}
/**
* @param comp Manager to add.
* @param addToList If {@code true} component is added to components list.
*/
public void add(GridComponent comp, boolean addToList) {
assert comp != null;
/*
* Managers.
* ========
*/
if (comp instanceof GridDeploymentManager)
depMgr = (GridDeploymentManager)comp;
else if (comp instanceof GridIoManager)
ioMgr = (GridIoManager)comp;
else if (comp instanceof GridDiscoveryManager)
discoMgr = (GridDiscoveryManager)comp;
else if (comp instanceof GridCheckpointManager)
cpMgr = (GridCheckpointManager)comp;
else if (comp instanceof GridEventStorageManager)
evtMgr = (GridEventStorageManager)comp;
else if (comp instanceof GridFailoverManager)
failoverMgr = (GridFailoverManager)comp;
else if (comp instanceof GridCollisionManager)
colMgr = (GridCollisionManager)comp;
else if (comp instanceof GridSecurityProcessor)
authProc = (GridSecurityProcessor)comp;
else if (comp instanceof GridLoadBalancerManager)
loadMgr = (GridLoadBalancerManager)comp;
else if (comp instanceof GridSwapSpaceManager)
swapspaceMgr = (GridSwapSpaceManager)comp;
else if (comp instanceof GridIndexingManager)
indexingMgr = (GridIndexingManager)comp;
/*
* Processors.
* ==========
*/
else if (comp instanceof GridTaskProcessor)
taskProc = (GridTaskProcessor)comp;
else if (comp instanceof GridJobProcessor)
jobProc = (GridJobProcessor)comp;
else if (comp instanceof GridTimeoutProcessor)
timeProc = (GridTimeoutProcessor)comp;
else if (comp instanceof GridClockSyncProcessor)
clockSyncProc = (GridClockSyncProcessor)comp;
else if (comp instanceof GridResourceProcessor)
rsrcProc = (GridResourceProcessor)comp;
else if (comp instanceof GridJobMetricsProcessor)
metricsProc = (GridJobMetricsProcessor)comp;
else if (comp instanceof GridCacheProcessor)
cacheProc = (GridCacheProcessor)comp;
else if (comp instanceof GridTaskSessionProcessor)
sesProc = (GridTaskSessionProcessor)comp;
else if (comp instanceof GridPortProcessor)
portProc = (GridPortProcessor)comp;
else if (comp instanceof GridClosureProcessor)
closProc = (GridClosureProcessor)comp;
else if (comp instanceof GridServiceProcessor)
svcProc = (GridServiceProcessor)comp;
else if (comp instanceof IgniteScheduleProcessorAdapter)
scheduleProc = (IgniteScheduleProcessorAdapter)comp;
else if (comp instanceof GridSegmentationProcessor)
segProc = (GridSegmentationProcessor)comp;
else if (comp instanceof GridAffinityProcessor)
affProc = (GridAffinityProcessor)comp;
else if (comp instanceof GridRestProcessor)
restProc = (GridRestProcessor)comp;
else if (comp instanceof DataStreamProcessor)
dataLdrProc = (DataStreamProcessor)comp;
else if (comp instanceof IgfsProcessorAdapter)
igfsProc = (IgfsProcessorAdapter)comp;
else if (comp instanceof GridOffHeapProcessor)
offheapProc = (GridOffHeapProcessor)comp;
else if (comp instanceof GridContinuousProcessor)
contProc = (GridContinuousProcessor)comp;
else if (comp instanceof HadoopProcessorAdapter)
hadoopProc = (HadoopProcessorAdapter)comp;
else if (comp instanceof IgniteCacheObjectProcessor)
cacheObjProc = (IgniteCacheObjectProcessor)comp;
else if (comp instanceof IgnitePluginProcessor)
pluginProc = (IgnitePluginProcessor)comp;
else if (comp instanceof GridQueryProcessor)
qryProc = (GridQueryProcessor)comp;
else if (comp instanceof OdbcProcessor)
odbcProc = (OdbcProcessor)comp;
else if (comp instanceof DataStructuresProcessor)
dataStructuresProc = (DataStructuresProcessor)comp;
else if (comp instanceof ClusterProcessor)
cluster = (ClusterProcessor)comp;
else if (comp instanceof PlatformProcessor)
platformProc = (PlatformProcessor)comp;
else if (!(comp instanceof DiscoveryNodeValidationProcessor))
assert (comp instanceof GridPluginComponent) : "Unknown manager class: " + comp.getClass();
if (addToList)
comps.add(comp);
}
/**
* @param helper Helper to add.
*/
public void addHelper(Object helper) {
assert helper != null;
if (helper instanceof IgfsHelper)
igfsHelper = (IgfsHelper)helper;
else
assert false : "Unknown helper class: " + helper.getClass();
}
/** {@inheritDoc} */
@Override public boolean isStopping() {
return ((IgniteKernal)grid).isStopping();
}
/** {@inheritDoc} */
@Override public UUID localNodeId() {
if (locNode != null)
return locNode.id();
if (discoMgr != null)
locNode = discoMgr.localNode();
return locNode != null ? locNode.id() : config().getNodeId();
}
/** {@inheritDoc} */
@Override public String gridName() {
return cfg.getGridName();
}
/** {@inheritDoc} */
@Override public GridKernalGateway gateway() {
return gw;
}
/** {@inheritDoc} */
@Override public IgniteEx grid() {
return grid;
}
/** {@inheritDoc} */
@Override public IgniteConfiguration config() {
return cfg;
}
/** {@inheritDoc} */
@Override public GridTaskProcessor task() {
return taskProc;
}
/** {@inheritDoc} */
@Override public GridJobProcessor job() {
return jobProc;
}
/** {@inheritDoc} */
@Override public GridTimeoutProcessor timeout() {
return timeProc;
}
/** {@inheritDoc} */
@Override public GridClockSyncProcessor clockSync() {
return clockSyncProc;
}
/** {@inheritDoc} */
@Override public GridResourceProcessor resource() {
return rsrcProc;
}
/** {@inheritDoc} */
@Override public GridJobMetricsProcessor jobMetric() {
return metricsProc;
}
/** {@inheritDoc} */
@Override public GridCacheProcessor cache() {
return cacheProc;
}
/** {@inheritDoc} */
@Override public GridTaskSessionProcessor session() {
return sesProc;
}
/** {@inheritDoc} */
@Override public GridClosureProcessor closure() {
return closProc;
}
/** {@inheritDoc} */
@Override public GridServiceProcessor service() {
return svcProc;
}
/** {@inheritDoc} */
@Override public GridPortProcessor ports() {
return portProc;
}
/** {@inheritDoc} */
@Override public GridOffHeapProcessor offheap() {
return offheapProc;
}
/** {@inheritDoc} */
@Override public IgniteScheduleProcessorAdapter schedule() {
return scheduleProc;
}
/** {@inheritDoc} */
@Override public GridDeploymentManager deploy() {
return depMgr;
}
/** {@inheritDoc} */
@Override public GridIoManager io() {
return ioMgr;
}
/** {@inheritDoc} */
@Override public GridDiscoveryManager discovery() {
return discoMgr;
}
/** {@inheritDoc} */
@Override public GridCheckpointManager checkpoint() {
return cpMgr;
}
/** {@inheritDoc} */
@Override public GridEventStorageManager event() {
return evtMgr;
}
/** {@inheritDoc} */
@Override public GridFailoverManager failover() {
return failoverMgr;
}
/** {@inheritDoc} */
@Override public GridCollisionManager collision() {
return colMgr;
}
/** {@inheritDoc} */
@Override public GridSecurityProcessor security() {
return authProc;
}
/** {@inheritDoc} */
@Override public GridLoadBalancerManager loadBalancing() {
return loadMgr;
}
/** {@inheritDoc} */
@Override public GridSwapSpaceManager swap() {
return swapspaceMgr;
}
/** {@inheritDoc} */
@Override public GridIndexingManager indexing() {
return indexingMgr;
}
/** {@inheritDoc} */
@Override public GridAffinityProcessor affinity() {
return affProc;
}
/** {@inheritDoc} */
@Override public GridRestProcessor rest() {
return restProc;
}
/** {@inheritDoc} */
@Override public GridSegmentationProcessor segmentation() {
return segProc;
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public <K, V> DataStreamProcessor<K, V> dataStream() {
return (DataStreamProcessor<K, V>)dataLdrProc;
}
/** {@inheritDoc} */
@Override public IgfsProcessorAdapter igfs() {
return igfsProc;
}
/** {@inheritDoc} */
@Override public IgfsHelper igfsHelper() {
return igfsHelper;
}
/** {@inheritDoc} */
@Override public GridContinuousProcessor continuous() {
return contProc;
}
/** {@inheritDoc} */
@Override public HadoopProcessorAdapter hadoop() {
return hadoopProc;
}
/** {@inheritDoc} */
@Override public ExecutorService utilityCachePool() {
return utilityCachePool;
}
/** {@inheritDoc} */
@Override public ExecutorService marshallerCachePool() {
return marshCachePool;
}
/** {@inheritDoc} */
@Override public IgniteStripedThreadPoolExecutor asyncCallbackPool() {
return callbackExecSvc;
}
/** {@inheritDoc} */
@Override public IgniteCacheObjectProcessor cacheObjects() {
return cacheObjProc;
}
/** {@inheritDoc} */
@Override public GridQueryProcessor query() {
return qryProc;
}
/** {@inheritDoc} */
@Override public OdbcProcessor odbc() {
return odbcProc;
}
/** {@inheritDoc} */
@Override public DataStructuresProcessor dataStructures() {
return dataStructuresProc;
}
/** {@inheritDoc} */
@Override public IgniteLogger log(String ctgr) {
return config().getGridLogger().getLogger(ctgr);
}
/** {@inheritDoc} */
@Override public IgniteLogger log(Class<?> cls) {
return log(cls.getName());
}
/** {@inheritDoc} */
@Override public void markSegmented() {
segFlag = true;
}
/** {@inheritDoc} */
@Override public boolean segmented() {
return segFlag;
}
/** {@inheritDoc} */
@Override public GridClockSource timeSource() {
return clockSrc;
}
/**
* Sets time source. For test purposes only.
*
* @param clockSrc Time source.
*/
public void timeSource(GridClockSource clockSrc) {
this.clockSrc = clockSrc;
}
/** {@inheritDoc} */
@Override public GridPerformanceSuggestions performance() {
return perf;
}
/** {@inheritDoc} */
@Override public void printMemoryStats() {
X.println(">>> ");
X.println(">>> Grid memory stats [grid=" + gridName() + ']');
for (GridComponent comp : comps)
comp.printMemoryStats();
}
/** {@inheritDoc} */
@Override public boolean isDaemon() {
return config().isDaemon() || "true".equalsIgnoreCase(System.getProperty(IGNITE_DAEMON));
}
/** {@inheritDoc} */
@Override public String userVersion(ClassLoader ldr) {
return spring != null ? spring.userVersion(ldr, log(spring.getClass())) : U.DFLT_USER_VERSION;
}
/** {@inheritDoc} */
@Override public PluginProvider pluginProvider(String name) throws PluginNotFoundException {
PluginProvider plugin = pluginProc.pluginProvider(name);
if (plugin == null)
throw new PluginNotFoundException(name);
return plugin;
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Nullable @Override public <T> T createComponent(Class<T> cls) {
T res = pluginProc.createComponent(cls);
if (res != null)
return res;
if (cls.equals(IgniteCacheObjectProcessor.class))
return (T)new CacheObjectBinaryProcessorImpl(this);
if (cls.equals(CacheConflictResolutionManager.class))
return null;
throw new IgniteException("Unsupported component type: " + cls);
}
/**
* @return Plugin manager.
*/
@Override public IgnitePluginProcessor plugins() {
return pluginProc;
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
U.writeString(out, grid.name());
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
U.readString(in); // Read for compatibility only. See #readResolve().
}
/**
* Reconstructs object on unmarshalling.
*
* @return Reconstructed object.
* @throws ObjectStreamException Thrown in case of unmarshalling error.
*/
protected Object readResolve() throws ObjectStreamException {
try {
return IgnitionEx.localIgnite().context();
}
catch (IllegalStateException e) {
throw U.withCause(new InvalidObjectException(e.getMessage()), e);
}
finally {
stash.remove();
}
}
/** {@inheritDoc} */
@Override public ExecutorService getExecutorService() {
return execSvc;
}
/** {@inheritDoc} */
@Override public ExecutorService getSystemExecutorService() {
return sysExecSvc;
}
/** {@inheritDoc} */
@Override public ExecutorService getManagementExecutorService() {
return mgmtExecSvc;
}
/** {@inheritDoc} */
@Override public ExecutorService getPeerClassLoadingExecutorService() {
return p2pExecSvc;
}
/** {@inheritDoc} */
@Override public ExecutorService getIgfsExecutorService() {
return igfsExecSvc;
}
/** {@inheritDoc} */
@Override public ExecutorService getRestExecutorService() {
return restExecSvc;
}
/** {@inheritDoc} */
@Override public IgniteExceptionRegistry exceptionRegistry() {
return IgniteExceptionRegistry.get();
}
/** {@inheritDoc} */
@Override public Object nodeAttribute(String key) {
return attrs.get(key);
}
/** {@inheritDoc} */
@Override public boolean hasNodeAttribute(String key) {
return attrs.containsKey(key);
}
/** {@inheritDoc} */
@Override public Object addNodeAttribute(String key, Object val) {
return attrs.put(key, val);
}
/** {@inheritDoc} */
@Override public Map<String, Object> nodeAttributes() {
return attrs;
}
/** {@inheritDoc} */
@Override public ClusterProcessor cluster() {
return cluster;
}
/** {@inheritDoc} */
@Override public MarshallerContextImpl marshallerContext() {
return marshCtx;
}
/** {@inheritDoc} */
@Override public boolean clientNode() {
return cfg.isClientMode() || cfg.isDaemon();
}
/** {@inheritDoc} */
@Override public boolean clientDisconnected() {
if (locNode == null)
locNode = discoMgr != null ? discoMgr.localNode() : null;
return locNode != null ? (locNode.isClient() && disconnected) : false;
}
/** {@inheritDoc} */
@Override public PlatformProcessor platform() {
return platformProc;
}
/**
* @param disconnected Disconnected flag.
*/
void disconnected(boolean disconnected) {
this.disconnected = disconnected;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridKernalContextImpl.class, this);
}
}
| |
/*----------------------------------------------------------------------------*/
/* Copyright (c) FIRST 2008. All Rights Reserved. */
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project. */
/*----------------------------------------------------------------------------*/
package com.milkenknights;
import com.milkenknights.InsightLT.DecimalData;
import com.milkenknights.InsightLT.InsightLT;
import com.milkenknights.InsightLT.StringData;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.Counter;
import edu.wpi.first.wpilibj.DigitalInput;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.DriverStationLCD;
import edu.wpi.first.wpilibj.Encoder;
import edu.wpi.first.wpilibj.Gyro;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Relay;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.SafePWM;
import edu.wpi.first.wpilibj.SpeedController;
import edu.wpi.first.wpilibj.Talon;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.CounterBase.EncodingType;
import edu.wpi.first.wpilibj.command.Command;
import edu.wpi.first.wpilibj.command.Scheduler;
import edu.wpi.first.wpilibj.smartdashboard.SendableChooser;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Knight extends IterativeRobot {
// Constants...
private static final double JITTER_RANGE = 0.008;
private static final int LEFT_MOTOR = 4;
private static final int RIGHT_MOTOR = 9;
private static final int SHOOTER_TALON = 6;
private static final int ACTUATOR_TALON = 1;
private static final int KICKER_TALON = 5;
private static final int COMPRESSOR_PRESSURE_SWITCH = 7;
private static final int COMPRESSOR_RELAY_CHANNEL = 1;
private static final int DRIVE_SOLENOID_A = 1;
private static final int DRIVE_SOLENOID_B = 2;
private static final int HOOK_SOLENOID_A = 3;
private static final int HOOK_SOLENOID_B = 4;
private static final int CASTER_A = 7;
private static final int CASTER_B = 8;
private static final int KICKER_ENC = 8;
private static final int SHOOTER_ENC = 9;
private static final int LEFT_ENC_A = 11;
private static final int LEFT_ENC_B = 12;
private static final int RIGHT_ENC_A = 13;
private static final int RIGHT_ENC_B = 14;
private static final int GYRO = 1;
private static final int AUTON_CHECK_DI = 14;
// For slow mode
private static final double SLOW_MOD = 0.6;
// For bang bang mode
public static final double SHOOTER_RPM_HIGH = 3700;
public static final double SHOOTER_RPM_LOW = 3400;
// For voltage mode
public static final double SHOOTER_POWER_HIGH = 0.7;
public static final double SHOOTER_POWER_LOW = 0.6;
// Pseudo-enumerator for Auton options...
public static int AUTON_CENTER = 0;
public static int AUTON_SIDE = 1;
JStick xbox; // XBox controller
JStick atk; // Logitech ATK3 controller
private boolean usingCheesy;
private DriverStationLCD lcd;
private Compressor compressor;
// Pair state "true" means high gear,
// Pair state "false" means low gear
public SolenoidPair driveGear;
// used to remember the gear that is being used
// while in and out of slow mode
private boolean normalGear;
private SolenoidPair hookClimb;
private SolenoidPair caster;
public Drive drive;
private SpeedController shooter;
private SpeedController actuator;
private SpeedController kicker;
private static final int SHOOTER_MODE_VOLTAGE = 0;
private static final int SHOOTER_BANG_BANG = 1;
private static final int SHOOTER_PID = 2;
private static final int SHOOTER_COMBINED = 3;
private int shooterMode;
private Counter shooterEnc;
private Counter kickerEnc;
public Encoder leftEnc;
public Encoder rightEnc;
private Gyro gyro;
private Relay light;
Command autonomousCommand;
SendableChooser autoChooser;
// Used to determine which autonomous procedure to use
private DigitalInput autonCheck;
// stuff for the InsightLT display
private InsightLT display;
private DecimalData disp_batteryVoltage;
private StringData disp_message;
private void defaultVoltageShooter(boolean on) {
voltageShooter(on, 0.65);
}
private void voltageShooter(boolean on, double frac) {
double output = on ? Utils.voltageSpeed(frac) : 0;
shooter.set(output);
kicker.set(output);
}
public void bangBangShooter(boolean on, double targetRPM) {
double shooterOutput;
if (on) {
shooterOutput = Utils.getBangBang(targetRPM, 0.6, shooterEnc);
} else {
shooterOutput = 0;
}
shooter.set(shooterOutput);
kicker.set(shooterOutput);
}
private void combinedShooter(boolean on, double frac, double targetRPM) {
if (on) {
// shooter gets bang bang at the low speed
// kicker gets 80% voltage
shooter.set(Utils.getBangBang(targetRPM,0.6,shooterEnc));
kicker.set(Utils.voltageSpeed(frac));
} else {
shooter.set(0);
kicker.set(0);
}
}
private void shooterOff() {
shooter.set(0);
kicker.set(0);
}
public void defaultActuator(boolean on) {
actuator.set(on ? 0.4 : 0);
}
public Knight() {
// get robot preferences, stored on the cRIO
drive = new Drive(new Talon(LEFT_MOTOR), new Talon(RIGHT_MOTOR));
shooter = new Talon(SHOOTER_TALON);
actuator = new Talon(ACTUATOR_TALON);
kicker = new Talon(KICKER_TALON);
//shooterMode = SHOOTER_MODE_VOLTAGE;
shooterMode = SHOOTER_BANG_BANG;
xbox = new JStick(1);
xbox.setSlow(0.3);
atk = new JStick(2);
lcd = DriverStationLCD.getInstance();
usingCheesy = true;
integral_err = 0;
prev_err = 0;
compressor = new Compressor(COMPRESSOR_PRESSURE_SWITCH,COMPRESSOR_RELAY_CHANNEL);
driveGear = new SolenoidXORPair(DRIVE_SOLENOID_A,DRIVE_SOLENOID_B);
normalGear = driveGear.get();
hookClimb = new SolenoidXANDPair(HOOK_SOLENOID_A,HOOK_SOLENOID_B);
caster = new SolenoidXANDPair(CASTER_A,CASTER_B);
kickerEnc = new Counter(KICKER_ENC);
shooterEnc = new Counter(SHOOTER_ENC);
leftEnc = new Encoder(LEFT_ENC_A, LEFT_ENC_B, true, EncodingType.k4X);
rightEnc = new Encoder(RIGHT_ENC_A, RIGHT_ENC_B, false, EncodingType.k4X);
// inches
leftEnc.setDistancePerPulse(0.102);
rightEnc.setDistancePerPulse(0.102);
gyro = new Gyro(GYRO);
light = new Relay(2);
//autonCheck = new DigitalInput(AUTON_CHECK_DI);
// configure the display to have two lines of text
display = new InsightLT(InsightLT.TWO_ONE_LINE_ZONES);
display.startDisplay();
// add battery display
disp_batteryVoltage = new DecimalData("Bat:");
display.registerData(disp_batteryVoltage,1);
// this shows what mode the robot is in
// i.e. teleop, autonomous, disabled
disp_message = new StringData();
display.registerData(disp_message,2);
}
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
public void robotInit() {
compressor.start();
driveGear.set(true);
drive.setInvertedMotor(RobotDrive.MotorType.kRearRight, true);
drive.setInvertedMotor(RobotDrive.MotorType.kRearLeft,true);
kickerEnc.start();
shooterEnc.start();
leftEnc.start();
rightEnc.start();
SmartDashboard.putData(Scheduler.getInstance());
//SmartDashboard.putData("Reset Drive Encoders", rightEnc.reset(), leftEnc.reset());
autoChooser = new SendableChooser();
autoChooser.addDefault("Center Autonomous", new Auton(this, AUTON_CENTER));
autoChooser.addObject("Side Autonomous", new Auton(this, AUTON_SIDE));
SmartDashboard.putData("Autonomous Mode Chooser", autoChooser);
}
//This function is called at the start of autonomous
Timer timer;
double autonStart;
int frisbeesThrown;
public void autonomousInit() {
autonomousCommand = (Command) autoChooser.getSelected();
autonomousCommand.start();
}
/**
* This function is called periodically during autonomous
*/
double integral_err;
double prev_err;
double last_timer;
boolean frisbeeDone;
final double WAIT_AFTER_ACTUATOR = 1;
final double WAIT_AFTER_SHOOTING = WAIT_AFTER_ACTUATOR+3.5;
final double DELAY_BETWEEN_FRISBEES = 2.25;
final double FRISBEE_SHOOT_TIME = 0.25;
final double DRIVE_DISTANCE = 102;
final double SLOWDOWN_TIME = 0.25;
final double DRIVE_FORWARD_TIME = 2;
public double finishedMovingForward = -1;
public void autonomousPeriodic() {
Scheduler.getInstance().run();
}
public void teleopInit() {
light.set(Relay.Value.kForward);
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
xbox.update();
atk.update();
// Press A to toggle cheesy drive
if (xbox.isReleased(JStick.XBOX_A)) {
usingCheesy = !usingCheesy;
}
// use LB to toggle high and low gear
if (xbox.isReleased(JStick.XBOX_LB)) {
driveGear.toggle();
normalGear = !normalGear;
}
// show the solenoids status
lcd.println(DriverStationLCD.Line.kUser3,1,driveGear.get()?"High Gear":"Low Gear ");
// show if the compressor is running
if (compressor.getPressureSwitchValue()) {
lcd.println(DriverStationLCD.Line.kUser6,1,"Compressor is off ");
} else {
lcd.println(DriverStationLCD.Line.kUser6,1,"Compressor is running");
}
// joystick button 1 spins the actuator
defaultActuator(atk.isPressed(1));
/*
// change shooter modes
if (atk.isPressed(11)) {
shooterMode = SHOOTER_MODE_VOLTAGE;
} else if (atk.isPressed(10)) {
shooterMode = SHOOTER_BANG_BANG;
} else if (atk.isPressed(9)) {
shooterMode = SHOOTER_COMBINED;
}
*/
if (shooterMode == SHOOTER_MODE_VOLTAGE) {
if (atk.isPressed(2)) {
defaultVoltageShooter(true);
} else if (atk.isPressed(4) || atk.isPressed(5)) {
voltageShooter(true, 0.6);
} else {
shooterOff();
}
//defaultVoltageShooter(atk.isPressed(2));
} else if (shooterMode == SHOOTER_BANG_BANG) {
if (atk.isPressed(2)) {
bangBangShooter(true, SHOOTER_RPM_HIGH);
} else if (atk.isPressed(4) || atk.isPressed(5)) {
bangBangShooter(true, SHOOTER_RPM_LOW);
} else {
shooterOff();
}
} else if (shooterMode == SHOOTER_PID) {
// TO: shooter PID
} else if (shooterMode == SHOOTER_COMBINED) {
if (atk.isPressed(2)) {
combinedShooter(true,SHOOTER_POWER_HIGH,SHOOTER_RPM_HIGH);
} else if (atk.isPressed(4) || atk.isPressed(5)) {
combinedShooter(true, SHOOTER_POWER_LOW,SHOOTER_RPM_LOW);
} else {
shooterOff();
}
} else {
shooterOff();
}
// toggle the hook climb
if (atk.isReleased(11)) {
hookClimb.toggle();
}
// toggle the caster
if (xbox.isReleased(JStick.XBOX_RB)) {
caster.toggle();
}
//double leftStickX = JStick.removeJitter(xbox.getAxis(JStick.XBOX_LSX), JITTER_RANGE);
double leftStickY = JStick.removeJitter(xbox.getAxis(JStick.XBOX_LSY), JITTER_RANGE);
double rightStickX = JStick.removeJitter(xbox.getAxis(JStick.XBOX_RSX), JITTER_RANGE);
double rightStickY = JStick.removeJitter(xbox.getAxis(JStick.XBOX_RSY), JITTER_RANGE);
boolean slowMode = xbox.getAxis(JStick.XBOX_TRIG) < -0.5;
if (slowMode) {
//driveGear.set(false);
} else {
//driveGear.set(normalGear);
}
if (usingCheesy) {
drive.cheesyDrive(xbox.getSlowedAxis(JStick.XBOX_LSY)*(slowMode?SLOW_MOD:1), rightStickX,
//xbox.isPressed(JStick.XBOX_LJ)
// If either trigger is pressed, enable quickturn
Math.abs(xbox.getAxis(JStick.XBOX_TRIG)) > 0.5
);
lcd.println(DriverStationLCD.Line.kUser4,1,"cheesy drive");
} else {
drive.tankDrive(leftStickY*(slowMode?SLOW_MOD:1), rightStickY*(slowMode?SLOW_MOD:1));
lcd.println(DriverStationLCD.Line.kUser4,1,"tank drive ");
}
if (shooterMode == SHOOTER_MODE_VOLTAGE) {
lcd.println(DriverStationLCD.Line.kUser1,1,"Shooter mode:voltage ");
} else if (shooterMode == SHOOTER_BANG_BANG) {
lcd.println(DriverStationLCD.Line.kUser1,1,"Shooter mode:bangbang");
} else if (shooterMode == SHOOTER_COMBINED) {
lcd.println(DriverStationLCD.Line.kUser1,1,"Shooter mode:combined");
} else {
lcd.println(DriverStationLCD.Line.kUser1,1,"Shooter mode:????????");
}
// print encoder values to see if they're working
lcd.println(DriverStationLCD.Line.kUser2,1,""+shooterEnc.getPeriod());
SmartDashboard.putNumber("Shooter speed", shooterEnc.getPeriod());
SmartDashboard.putNumber("Shooter RPM", 60/shooterEnc.getPeriod());
SmartDashboard.putNumber("Shooter count", shooterEnc.get());
SmartDashboard.putNumber("Kicker speed", kickerEnc.getPeriod());
SmartDashboard.putNumber("Kicker RPM", 60/kickerEnc.getPeriod());
SmartDashboard.putNumber("Kicker count", kickerEnc.getPeriod());
SmartDashboard.putNumber("Left Rate", leftEnc.getRate());
SmartDashboard.putNumber("Left Distance", leftEnc.getDistance());
SmartDashboard.putNumber("Left Raw", leftEnc.getRaw());
SmartDashboard.putNumber("Right Rate", rightEnc.getRate());
SmartDashboard.putNumber("Right Distance", rightEnc.getDistance());
SmartDashboard.putNumber("Right Raw", rightEnc.getRaw());
SmartDashboard.putNumber("Gyro", gyro.getAngle());
//SmartDashboard.putBoolean("Auton check", autonCheck.get());
SmartDashboard.putNumber("Right Wheels", drive.getRight());
SmartDashboard.putNumber("Left Wheels", drive.getLeft());
lcd.updateLCD();
// update the display
disp_batteryVoltage.setData(DriverStation.getInstance().getBatteryVoltage());
disp_message.setData("teleop");
}
public void disabledPeriodic() {
disp_batteryVoltage.setData(DriverStation.getInstance().getBatteryVoltage());
disp_message.setData("disabled");
//$wag
leftEnc.reset();
rightEnc.reset();
/*
if (DriverStation.getInstance().isFMSAttached()) {
display.stopDisplay();
} else {
display.startDisplay();
}
*/
}
private boolean shootTester;
private boolean pwmtest;
private SafePWM[] pwms;
public void testInit() {
timer.start();
pwmtest = false;
pwms = new SafePWM[10];
for (int i = 0; i < 10; ++i) {
pwms[i] = new SafePWM(i+1);
}
}
/**
* This function is called periodically during test mode
*/
public void testPeriodic() {
/*
xbox.update();
atk.update();
// toggle between PWM test and austin's thing
if (xbox.isPressed(JStick.XBOX_LB)) {
pwmtest = false;
}
if (xbox.isPressed(JStick.XBOX_RB)) {
pwmtest = true;
}
if (pwmtest) {
for (int i = 0; i < 10; ++i) {
if (atk.isPressed(i+1)) {
pwms[i].setRaw(143);
}
}
} else {
if (xbox.isReleased(JStick.XBOX_A)) {
shootTester = !shootTester;
lcd.println(DriverStationLCD.Line.kUser1, 1, "Shooter Tester ");
} else {
lcd.println(DriverStationLCD.Line.kUser1, 1, "Normal Tester ");
}
//Only spins shooter
shooter.set((atk.isPressed(7)) ? -1 : 0);
//Only spins the kicker
kicker.set((atk.isPressed(6)) ? 1 : 0);
//Slow start for shooting 1
if(shootTester && atk.isPressed(1)) {
if(timer.get() > 2) {
shooter.set(1);
lcd.println(DriverStationLCD.Line.kUser2, 1, "Shooter: On ");
}
if(timer.get() > 4) {
kicker.set(1);
lcd.println(DriverStationLCD.Line.kUser3, 1, "Kicker: On ");
}
if(timer.get() > 7) {
//actuator.set(1);
lcd.println(DriverStationLCD.Line.kUser4, 1, "CAM: On ");
} else {
timer.reset();
shooter.set(0);
kicker.set(0);
actuator.set(0);
lcd.println(DriverStationLCD.Line.kUser2, 1, "Shooter: Off ");
lcd.println(DriverStationLCD.Line.kUser3, 1, "Kicker: Off ");
lcd.println(DriverStationLCD.Line.kUser4, 1, "CAM: Off ");
}
}
lcd.println(DriverStationLCD.Line.kUser1, 1, "" + timer.get());
lcd.updateLCD();
}
*/
}
}
| |
package shef.nlp.supple.prolog.cafe;
import jp.ac.kobe_u.cs.prolog.lang.*;
import jp.ac.kobe_u.cs.prolog.builtin.*;
/*
* *** Please do not edit ! ***
* @(#) PRED_clean_semantics_2.java
* @procedure clean_semantics/2 in semantics.pl
*/
/*
* @version Prolog Cafe 0.8 November 2003
* @author Mutsunori Banbara (banbara@kobe-u.ac.jp)
* @author Naoyuki Tamura (tamura@kobe-u.ac.jp)
*/
public class PRED_clean_semantics_2 extends Predicate {
static Predicate fail_0 = new PRED_fail_0();
static Predicate clean_semantics_2_1 = new PRED_clean_semantics_2_1();
static Predicate clean_semantics_2_2 = new PRED_clean_semantics_2_2();
static Predicate clean_semantics_2_3 = new PRED_clean_semantics_2_3();
static Predicate clean_semantics_2_4 = new PRED_clean_semantics_2_4();
static Predicate clean_semantics_2_5 = new PRED_clean_semantics_2_5();
static Predicate clean_semantics_2_6 = new PRED_clean_semantics_2_6();
static Predicate clean_semantics_2_7 = new PRED_clean_semantics_2_7();
static Predicate clean_semantics_2_8 = new PRED_clean_semantics_2_8();
static Predicate clean_semantics_2_lis = new PRED_clean_semantics_2_lis();
static Predicate clean_semantics_2_var = new PRED_clean_semantics_2_var();
public Term arg1, arg2;
public PRED_clean_semantics_2(Term a1, Term a2, Predicate cont) {
arg1 = a1;
arg2 = a2;
this.cont = cont;
}
public PRED_clean_semantics_2(){}
public void setArgument(Term[] args, Predicate cont) {
arg1 = args[0];
arg2 = args[1];
this.cont = cont;
}
public Predicate exec(Prolog engine) {
engine.aregs[1] = arg1;
engine.aregs[2] = arg2;
engine.cont = cont;
return call(engine);
}
public Predicate call(Prolog engine) {
engine.setB0();
return engine.switch_on_term(
clean_semantics_2_var,
fail_0,
clean_semantics_2_1,
clean_semantics_2_2,
clean_semantics_2_lis
);
}
public int arity() { return 2; }
public String toString() {
return "clean_semantics(" + arg1 + ", " + arg2 + ")";
}
}
class PRED_clean_semantics_2_var extends PRED_clean_semantics_2 {
static Predicate clean_semantics_2_var_1 = new PRED_clean_semantics_2_var_1();
public Predicate exec(Prolog engine) {
return engine.jtry(clean_semantics_2_1, clean_semantics_2_var_1);
}
}
class PRED_clean_semantics_2_var_1 extends PRED_clean_semantics_2 {
static Predicate clean_semantics_2_var_2 = new PRED_clean_semantics_2_var_2();
public Predicate exec(Prolog engine) {
return engine.retry(clean_semantics_2_2, clean_semantics_2_var_2);
}
}
class PRED_clean_semantics_2_var_2 extends PRED_clean_semantics_2 {
static Predicate clean_semantics_2_var_3 = new PRED_clean_semantics_2_var_3();
public Predicate exec(Prolog engine) {
return engine.retry(clean_semantics_2_3, clean_semantics_2_var_3);
}
}
class PRED_clean_semantics_2_var_3 extends PRED_clean_semantics_2 {
static Predicate clean_semantics_2_var_4 = new PRED_clean_semantics_2_var_4();
public Predicate exec(Prolog engine) {
return engine.retry(clean_semantics_2_4, clean_semantics_2_var_4);
}
}
class PRED_clean_semantics_2_var_4 extends PRED_clean_semantics_2 {
static Predicate clean_semantics_2_var_5 = new PRED_clean_semantics_2_var_5();
public Predicate exec(Prolog engine) {
return engine.retry(clean_semantics_2_5, clean_semantics_2_var_5);
}
}
class PRED_clean_semantics_2_var_5 extends PRED_clean_semantics_2 {
static Predicate clean_semantics_2_var_6 = new PRED_clean_semantics_2_var_6();
public Predicate exec(Prolog engine) {
return engine.retry(clean_semantics_2_6, clean_semantics_2_var_6);
}
}
class PRED_clean_semantics_2_var_6 extends PRED_clean_semantics_2 {
static Predicate clean_semantics_2_var_7 = new PRED_clean_semantics_2_var_7();
public Predicate exec(Prolog engine) {
return engine.retry(clean_semantics_2_7, clean_semantics_2_var_7);
}
}
class PRED_clean_semantics_2_var_7 extends PRED_clean_semantics_2 {
public Predicate exec(Prolog engine) {
return engine.trust(clean_semantics_2_8);
}
}
class PRED_clean_semantics_2_lis extends PRED_clean_semantics_2 {
static Predicate clean_semantics_2_lis_1 = new PRED_clean_semantics_2_lis_1();
public Predicate exec(Prolog engine) {
return engine.jtry(clean_semantics_2_3, clean_semantics_2_lis_1);
}
}
class PRED_clean_semantics_2_lis_1 extends PRED_clean_semantics_2 {
static Predicate clean_semantics_2_lis_2 = new PRED_clean_semantics_2_lis_2();
public Predicate exec(Prolog engine) {
return engine.retry(clean_semantics_2_4, clean_semantics_2_lis_2);
}
}
class PRED_clean_semantics_2_lis_2 extends PRED_clean_semantics_2 {
static Predicate clean_semantics_2_lis_3 = new PRED_clean_semantics_2_lis_3();
public Predicate exec(Prolog engine) {
return engine.retry(clean_semantics_2_5, clean_semantics_2_lis_3);
}
}
class PRED_clean_semantics_2_lis_3 extends PRED_clean_semantics_2 {
static Predicate clean_semantics_2_lis_4 = new PRED_clean_semantics_2_lis_4();
public Predicate exec(Prolog engine) {
return engine.retry(clean_semantics_2_6, clean_semantics_2_lis_4);
}
}
class PRED_clean_semantics_2_lis_4 extends PRED_clean_semantics_2 {
static Predicate clean_semantics_2_lis_5 = new PRED_clean_semantics_2_lis_5();
public Predicate exec(Prolog engine) {
return engine.retry(clean_semantics_2_7, clean_semantics_2_lis_5);
}
}
class PRED_clean_semantics_2_lis_5 extends PRED_clean_semantics_2 {
public Predicate exec(Prolog engine) {
return engine.trust(clean_semantics_2_8);
}
}
class PRED_clean_semantics_2_1 extends PRED_clean_semantics_2 {
static SymbolTerm s1 = SymbolTerm.makeSymbol("[]");
public Predicate exec(Prolog engine) {
Term a1, a2;
a1 = engine.aregs[1].dereference();
a2 = engine.aregs[2].dereference();
Predicate cont = engine.cont;
if ( !s1.unify(a1, engine.trail) ) return engine.fail();
if ( !s1.unify(a2, engine.trail) ) return engine.fail();
return new PRED_$neck_cut_0(cont);
}
}
class PRED_clean_semantics_2_2 extends PRED_clean_semantics_2 {
static SymbolTerm f1 = SymbolTerm.makeSymbol("^", 2);
static SymbolTerm s2 = SymbolTerm.makeSymbol("e");
public Predicate exec(Prolog engine) {
Term a1, a2, a3, a4, a5;
Predicate p1, p2, p3;
a1 = engine.aregs[1].dereference();
a2 = engine.aregs[2].dereference();
Predicate cont = engine.cont;
if ( a1.isStructure() ){
if (! f1.strictEqual(((StructureTerm)a1).functor()) )
return engine.fail();
Term[] args = ((StructureTerm)a1).args();
a3 = args[0];
a4 = args[1];
} else if (a1.isVariable() ){
a3 = new VariableTerm(engine);
a4 = new VariableTerm(engine);
Term[] args = {a3, a4};
if ( !a1.unify(new StructureTerm(f1, args), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
a5 = new VariableTerm(engine);
p1 = new PRED_$cut_1(a5, cont);
p2 = new PRED_clean_semantics_2(a4, a2, p1);
p3 = new PRED_buchart_gensym_2(s2, a3, p2);
return new PRED_$get_level_1(a5, p3);
}
}
class PRED_clean_semantics_2_3 extends PRED_clean_semantics_2 {
static SymbolTerm f1 = SymbolTerm.makeSymbol("^", 2);
static SymbolTerm s3 = SymbolTerm.makeSymbol("e");
public Predicate exec(Prolog engine) {
Term a1, a2, a3, a4, a5, a6, a7, a8, a9, a10;
Predicate p1, p2, p3, p4, p5, p6, p7;
a1 = engine.aregs[1].dereference();
a2 = engine.aregs[2].dereference();
Predicate cont = engine.cont;
if ( a1.isList() ){
a3 = ((ListTerm)a1).car();
a4 = ((ListTerm)a1).cdr();
} else if ( a1.isVariable() ){
a3 = new VariableTerm(engine);
a4 = new VariableTerm(engine);
if ( !a1.unify(new ListTerm(a3, a4), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
a5 = new VariableTerm(engine);
a7 = new VariableTerm(engine);
a8 = new VariableTerm(engine);
Term[] h2 = {a7, a8};
a6 = new StructureTerm(f1, h2);
a9 = new VariableTerm(engine);
a10 = new VariableTerm(engine);
p1 = new PRED_$cut_1(a5, cont);
p2 = new PRED_append_3(a9, a10, a2, p1);
p3 = new PRED_clean_semantics_2(a4, a10, p2);
p4 = new PRED_clean_semantics_2(a8, a9, p3);
p5 = new PRED_buchart_gensym_2(s3, a7, p4);
p6 = new PRED_$unify_2(a3, a6, p5);
p7 = new PRED_nonvar_1(a3, p6);
return new PRED_$get_level_1(a5, p7);
}
}
class PRED_clean_semantics_2_4 extends PRED_clean_semantics_2 {
static SymbolTerm s1 = SymbolTerm.makeSymbol("[]");
static SymbolTerm s2 = SymbolTerm.makeSymbol("name");
static SymbolTerm s3 = SymbolTerm.makeSymbol("head");
static SymbolTerm s4 = SymbolTerm.makeSymbol("head1");
static SymbolTerm s5 = SymbolTerm.makeSymbol("head2");
static SymbolTerm s6 = SymbolTerm.makeSymbol("det");
static SymbolTerm s7 = SymbolTerm.makeSymbol("adj");
static SymbolTerm s8 = SymbolTerm.makeSymbol("pronoun");
static SymbolTerm s9 = SymbolTerm.makeSymbol("count");
static SymbolTerm s10 = SymbolTerm.makeSymbol("more");
static SymbolTerm s11 = SymbolTerm.makeSymbol("less");
static SymbolTerm s12 = SymbolTerm.makeSymbol("title");
static ListTerm s13 = new ListTerm(s12, s1);
static ListTerm s14 = new ListTerm(s11, s13);
static ListTerm s15 = new ListTerm(s10, s14);
static ListTerm s16 = new ListTerm(s9, s15);
static ListTerm s17 = new ListTerm(s8, s16);
static ListTerm s18 = new ListTerm(s7, s17);
static ListTerm s19 = new ListTerm(s6, s18);
static ListTerm s20 = new ListTerm(s5, s19);
static ListTerm s21 = new ListTerm(s4, s20);
static ListTerm s22 = new ListTerm(s3, s21);
static ListTerm s23 = new ListTerm(s2, s22);
public Predicate exec(Prolog engine) {
Term a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14;
Predicate p1, p2, p3, p4, p5, p6;
a1 = engine.aregs[1].dereference();
a2 = engine.aregs[2].dereference();
Predicate cont = engine.cont;
if ( a1.isList() ){
a3 = ((ListTerm)a1).car();
a4 = ((ListTerm)a1).cdr();
} else if ( a1.isVariable() ){
a3 = new VariableTerm(engine);
a4 = new VariableTerm(engine);
if ( !a1.unify(new ListTerm(a3, a4), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
if ( a4.isList() ){
a5 = ((ListTerm)a4).car();
a6 = ((ListTerm)a4).cdr();
} else if ( a4.isVariable() ){
a5 = new VariableTerm(engine);
a6 = new VariableTerm(engine);
if ( !a4.unify(new ListTerm(a5, a6), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
if ( a6.isList() ){
a7 = ((ListTerm)a6).car();
if ( !s1.unify(((ListTerm)a6).cdr(), engine.trail) )
return engine.fail();
} else if ( a6.isVariable() ){
a7 = new VariableTerm(engine);
if ( !a6.unify(new ListTerm(a7, s1), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
if ( a2.isList() ){
a8 = ((ListTerm)a2).car();
if ( !s1.unify(((ListTerm)a2).cdr(), engine.trail) )
return engine.fail();
} else if ( a2.isVariable() ){
a8 = new VariableTerm(engine);
if ( !a2.unify(new ListTerm(a8, s1), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
a9 = new VariableTerm(engine);
a10 = new VariableTerm(engine);
a11 = new VariableTerm(engine);
a14 = new ListTerm(a11, s1);
a13 = new ListTerm(a5, a14);
a12 = new ListTerm(a3, a13);
p1 = new PRED_$cut_1(a9, cont);
p2 = new PRED_$614646_2(a8, a12, p1);
p3 = new PRED_name_conc_2(a10, a11, p2);
p4 = new PRED_semflatten_2(a7, a10, p3);
p5 = new PRED_member_2(a3, s23, p4);
p6 = new PRED_nonvar_1(a3, p5);
return new PRED_$get_level_1(a9, p6);
}
}
class PRED_clean_semantics_2_5 extends PRED_clean_semantics_2 {
static SymbolTerm s1 = SymbolTerm.makeSymbol("[]");
public Predicate exec(Prolog engine) {
Term a1, a2, a3, a4, a5, a6, a7;
Predicate p1, p2;
a1 = engine.aregs[1].dereference();
a2 = engine.aregs[2].dereference();
Predicate cont = engine.cont;
if ( a1.isList() ){
a3 = ((ListTerm)a1).car();
a4 = ((ListTerm)a1).cdr();
} else if ( a1.isVariable() ){
a3 = new VariableTerm(engine);
a4 = new VariableTerm(engine);
if ( !a1.unify(new ListTerm(a3, a4), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
if ( a2.isList() ){
a5 = ((ListTerm)a2).car();
if ( !s1.unify(((ListTerm)a2).cdr(), engine.trail) )
return engine.fail();
} else if ( a2.isVariable() ){
a5 = new VariableTerm(engine);
if ( !a2.unify(new ListTerm(a5, s1), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
a6 = new VariableTerm(engine);
a7 = new ListTerm(a6, a4);
p1 = new PRED_$614646_2(a5, a7, cont);
p2 = new PRED_lower_2(a3, a6, p1);
return new PRED_$dummy_semantics46pl_1_3(a4, new VariableTerm(engine), new VariableTerm(engine), p2);
}
}
class PRED_clean_semantics_2_6 extends PRED_clean_semantics_2 {
static SymbolTerm f1 = SymbolTerm.makeSymbol("compound", 1);
static SymbolTerm s2 = SymbolTerm.makeSymbol("[]");
public Predicate exec(Prolog engine) {
Term a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13;
Predicate p1, p2, p3, p4;
a1 = engine.aregs[1].dereference();
a2 = engine.aregs[2].dereference();
Predicate cont = engine.cont;
if ( a1.isList() ){
a3 = ((ListTerm)a1).car();
a4 = ((ListTerm)a1).cdr();
} else if ( a1.isVariable() ){
a3 = new VariableTerm(engine);
a4 = new VariableTerm(engine);
if ( !a1.unify(new ListTerm(a3, a4), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
if ( a3.isStructure() ){
if (! f1.strictEqual(((StructureTerm)a3).functor()) )
return engine.fail();
Term[] args = ((StructureTerm)a3).args();
a5 = args[0];
} else if (a3.isVariable() ){
a5 = new VariableTerm(engine);
Term[] args = {a5};
if ( !a3.unify(new StructureTerm(f1, args), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
if ( a5.isList() ){
a6 = ((ListTerm)a5).car();
a7 = ((ListTerm)a5).cdr();
} else if ( a5.isVariable() ){
a6 = new VariableTerm(engine);
a7 = new VariableTerm(engine);
if ( !a5.unify(new ListTerm(a6, a7), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
if ( a2.isList() ){
a8 = ((ListTerm)a2).car();
if ( !s2.unify(((ListTerm)a2).cdr(), engine.trail) )
return engine.fail();
} else if ( a2.isVariable() ){
a8 = new VariableTerm(engine);
if ( !a2.unify(new ListTerm(a8, s2), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
a9 = new VariableTerm(engine);
a10 = new ListTerm(a9, a7);
a11 = new VariableTerm(engine);
a12 = new VariableTerm(engine);
a13 = new ListTerm(a12, a4);
p1 = new PRED_$614646_2(a8, a13, cont);
p2 = new PRED_clean_predicate_2(a11, a12, p1);
p3 = new PRED_name_conc_2(a10, a11, p2);
p4 = new PRED_$dummy_semantics46pl_2_4(a6, new VariableTerm(engine), new VariableTerm(engine), a9, p3);
return new PRED_nonvar_1(a6, p4);
}
}
class PRED_clean_semantics_2_7 extends PRED_clean_semantics_2 {
static SymbolTerm s1 = SymbolTerm.makeSymbol("[]");
static SymbolTerm s2 = SymbolTerm.makeSymbol("MISSING");
public Predicate exec(Prolog engine) {
Term a1, a2, a3, a4;
Predicate p1, p2;
a1 = engine.aregs[1].dereference();
a2 = engine.aregs[2].dereference();
Predicate cont = engine.cont;
if ( a1.isList() ){
a3 = ((ListTerm)a1).car();
a4 = ((ListTerm)a1).cdr();
} else if ( a1.isVariable() ){
a3 = new VariableTerm(engine);
a4 = new VariableTerm(engine);
if ( !a1.unify(new ListTerm(a3, a4), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
if ( !s1.unify(a2, engine.trail) ) return engine.fail();
p1 = new PRED_member_2(s2, a4, cont);
p2 = new PRED_$dummy_semantics46pl_4_3(a4, new VariableTerm(engine), new VariableTerm(engine), p1);
return new PRED_$dummy_semantics46pl_3_3(a3, new VariableTerm(engine), new VariableTerm(engine), p2);
}
}
class PRED_clean_semantics_2_8 extends PRED_clean_semantics_2 {
public Predicate exec(Prolog engine) {
Term a1, a2, a3, a4, a5, a6, a7;
Predicate p1, p2, p3, p4;
a1 = engine.aregs[1].dereference();
a2 = engine.aregs[2].dereference();
Predicate cont = engine.cont;
if ( a1.isList() ){
a3 = ((ListTerm)a1).car();
a4 = ((ListTerm)a1).cdr();
} else if ( a1.isVariable() ){
a3 = new VariableTerm(engine);
a4 = new VariableTerm(engine);
if ( !a1.unify(new ListTerm(a3, a4), engine.trail) )
return engine.fail();
} else {
return engine.fail();
}
a5 = new VariableTerm(engine);
a6 = new VariableTerm(engine);
a7 = new VariableTerm(engine);
p1 = new PRED_$cut_1(a5, cont);
p2 = new PRED_append_3(a6, a7, a2, p1);
p3 = new PRED_clean_semantics_2(a4, a7, p2);
p4 = new PRED_clean_semantics_2(a3, a6, p3);
return new PRED_$get_level_1(a5, p4);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.shell;
import org.apache.commons.exec.CommandLine;
import org.apache.commons.exec.DefaultExecutor;
import org.apache.commons.exec.ExecuteException;
import org.apache.commons.exec.ExecuteWatchdog;
import org.apache.commons.exec.PumpStreamHandler;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterException;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResult.Code;
import org.apache.zeppelin.interpreter.KerberosInterpreter;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.apache.zeppelin.scheduler.Scheduler;
import org.apache.zeppelin.scheduler.SchedulerFactory;
/**
* Shell interpreter for Zeppelin.
*/
public class ShellInterpreter extends KerberosInterpreter {
private static final Logger LOGGER = LoggerFactory.getLogger(ShellInterpreter.class);
private static final String TIMEOUT_PROPERTY = "shell.command.timeout.millisecs";
private String defaultTimeoutProperty = "60000";
private static final String DIRECTORY_USER_HOME = "shell.working.directory.user.home";
private final boolean isWindows = System.getProperty("os.name").startsWith("Windows");
private final String shell = isWindows ? "cmd /c" : "bash -c";
ConcurrentHashMap<String, DefaultExecutor> executors;
public ShellInterpreter(Properties property) {
super(property);
}
@Override
public void open() {
super.open();
LOGGER.info("Command timeout property: {}", getProperty(TIMEOUT_PROPERTY));
executors = new ConcurrentHashMap<>();
}
@Override
public void close() {
super.close();
for (String executorKey : executors.keySet()) {
DefaultExecutor executor = executors.remove(executorKey);
if (executor != null) {
try {
executor.getWatchdog().destroyProcess();
} catch (Exception e){
LOGGER.error("error destroying executor for paragraphId: " + executorKey, e);
}
}
}
}
@Override
public InterpreterResult interpret(String originalCmd, InterpreterContext contextInterpreter) {
String cmd = Boolean.parseBoolean(getProperty("zeppelin.shell.interpolation")) ?
interpolate(originalCmd, contextInterpreter.getResourcePool()) : originalCmd;
LOGGER.debug("Run shell command '" + cmd + "'");
OutputStream outStream = new ByteArrayOutputStream();
CommandLine cmdLine = CommandLine.parse(shell);
// the Windows CMD shell doesn't handle multiline statements,
// they need to be delimited by '&&' instead
if (isWindows) {
String[] lines = StringUtils.split(cmd, "\n");
cmd = StringUtils.join(lines, " && ");
}
cmdLine.addArgument(cmd, false);
try {
DefaultExecutor executor = new DefaultExecutor();
executor.setStreamHandler(new PumpStreamHandler(
contextInterpreter.out, contextInterpreter.out));
executor.setWatchdog(new ExecuteWatchdog(
Long.valueOf(getProperty(TIMEOUT_PROPERTY, defaultTimeoutProperty))));
executors.put(contextInterpreter.getParagraphId(), executor);
if (Boolean.valueOf(getProperty(DIRECTORY_USER_HOME))) {
executor.setWorkingDirectory(new File(System.getProperty("user.home")));
}
int exitVal = executor.execute(cmdLine);
LOGGER.info("Paragraph " + contextInterpreter.getParagraphId()
+ " return with exit value: " + exitVal);
return new InterpreterResult(Code.SUCCESS, outStream.toString());
} catch (ExecuteException e) {
int exitValue = e.getExitValue();
LOGGER.error("Can not run " + cmd, e);
Code code = Code.ERROR;
String message = outStream.toString();
if (exitValue == 143) {
code = Code.INCOMPLETE;
message += "Paragraph received a SIGTERM\n";
LOGGER.info("The paragraph " + contextInterpreter.getParagraphId()
+ " stopped executing: " + message);
}
message += "ExitValue: " + exitValue;
return new InterpreterResult(code, message);
} catch (IOException e) {
LOGGER.error("Can not run " + cmd, e);
return new InterpreterResult(Code.ERROR, e.getMessage());
} finally {
executors.remove(contextInterpreter.getParagraphId());
}
}
@Override
public void cancel(InterpreterContext context) {
DefaultExecutor executor = executors.remove(context.getParagraphId());
if (executor != null) {
try {
executor.getWatchdog().destroyProcess();
} catch (Exception e){
LOGGER.error("error destroying executor for paragraphId: " + context.getParagraphId(), e);
}
}
}
@Override
public FormType getFormType() {
return FormType.SIMPLE;
}
@Override
public int getProgress(InterpreterContext context) {
return 0;
}
@Override
public Scheduler getScheduler() {
return SchedulerFactory.singleton().createOrGetParallelScheduler(
ShellInterpreter.class.getName() + this.hashCode(), 10);
}
@Override
public List<InterpreterCompletion> completion(String buf, int cursor,
InterpreterContext interpreterContext) {
return null;
}
@Override
protected boolean runKerberosLogin() {
try {
createSecureConfiguration();
return true;
} catch (Exception e) {
LOGGER.error("Unable to run kinit for zeppelin", e);
}
return false;
}
public void createSecureConfiguration() throws InterpreterException {
Properties properties = getProperties();
CommandLine cmdLine = CommandLine.parse(shell);
cmdLine.addArgument("-c", false);
String kinitCommand = String.format("kinit -k -t %s %s",
properties.getProperty("zeppelin.shell.keytab.location"),
properties.getProperty("zeppelin.shell.principal"));
cmdLine.addArgument(kinitCommand, false);
DefaultExecutor executor = new DefaultExecutor();
try {
executor.execute(cmdLine);
} catch (Exception e) {
LOGGER.error("Unable to run kinit for zeppelin user " + kinitCommand, e);
throw new InterpreterException(e);
}
}
@Override
protected boolean isKerboseEnabled() {
if (!StringUtils.isAnyEmpty(getProperty("zeppelin.shell.auth.type")) && getProperty(
"zeppelin.shell.auth.type").equalsIgnoreCase("kerberos")) {
return true;
}
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.rest.management.organizations;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import org.apache.amber.oauth2.common.exception.OAuthSystemException;
import org.apache.amber.oauth2.common.message.OAuthResponse;
import org.apache.usergrid.management.ActivationState;
import org.apache.usergrid.management.OrganizationInfo;
import org.apache.usergrid.management.export.ExportService;
import org.apache.usergrid.persistence.cassandra.CassandraService;
import org.apache.usergrid.persistence.entities.Export;
import org.apache.usergrid.rest.AbstractContextResource;
import org.apache.usergrid.rest.ApiResponse;
import org.apache.usergrid.rest.applications.ServiceResource;
import org.apache.usergrid.rest.exceptions.RedirectionException;
import org.apache.usergrid.rest.management.organizations.applications.ApplicationsResource;
import org.apache.usergrid.rest.management.organizations.users.UsersResource;
import org.apache.usergrid.rest.security.annotations.RequireOrganizationAccess;
import org.apache.usergrid.rest.utils.JSONPUtils;
import org.apache.usergrid.security.oauth.ClientCredentialsInfo;
import org.apache.usergrid.security.tokens.exceptions.TokenException;
import org.apache.usergrid.services.ServiceResults;
import com.sun.jersey.api.json.JSONWithPadding;
import com.sun.jersey.api.view.Viewable;
import static javax.servlet.http.HttpServletResponse.SC_ACCEPTED;
import static javax.servlet.http.HttpServletResponse.SC_BAD_REQUEST;
import static javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR;
import static javax.servlet.http.HttpServletResponse.SC_OK;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
@Component("org.apache.usergrid.rest.management.organizations.OrganizationResource")
@Scope("prototype")
@Produces({
MediaType.APPLICATION_JSON, "application/javascript", "application/x-javascript", "text/ecmascript",
"application/ecmascript", "text/jscript"
})
public class OrganizationResource extends AbstractContextResource {
private static final Logger logger = LoggerFactory.getLogger( OrganizationsResource.class );
@Autowired
protected ExportService exportService;
OrganizationInfo organization;
public OrganizationResource() {
}
public OrganizationResource init( OrganizationInfo organization ) {
this.organization = organization;
return this;
}
@RequireOrganizationAccess
@Path("users")
public UsersResource getOrganizationUsers( @Context UriInfo ui ) throws Exception {
return getSubResource( UsersResource.class ).init( organization );
}
@RequireOrganizationAccess
@Path("applications")
public ApplicationsResource getOrganizationApplications( @Context UriInfo ui ) throws Exception {
return getSubResource( ApplicationsResource.class ).init( organization );
}
@RequireOrganizationAccess
@Path("apps")
public ApplicationsResource getOrganizationApplications2( @Context UriInfo ui ) throws Exception {
return getSubResource( ApplicationsResource.class ).init( organization );
}
@GET
public JSONWithPadding getOrganizationDetails( @Context UriInfo ui,
@QueryParam("callback") @DefaultValue("callback") String callback )
throws Exception {
logger.info( "Get details for organization: " + organization.getUuid() );
ApiResponse response = createApiResponse();
response.setProperty( "organization", management.getOrganizationData( organization ) );
return new JSONWithPadding( response, callback );
}
@GET
@Path("activate")
@Produces(MediaType.TEXT_HTML)
public Viewable activate( @Context UriInfo ui, @QueryParam("token") String token ) {
try {
management.handleActivationTokenForOrganization( organization.getUuid(), token );
return handleViewable( "activate", this );
}
catch ( TokenException e ) {
return handleViewable( "bad_activation_token", this );
}
catch ( RedirectionException e ) {
throw e;
}
catch ( Exception e ) {
return handleViewable( "error", e );
}
}
@GET
@Path("confirm")
@Produces(MediaType.TEXT_HTML)
public Viewable confirm( @Context UriInfo ui, @QueryParam("token") String token ) {
try {
ActivationState state = management.handleActivationTokenForOrganization( organization.getUuid(), token );
if ( state == ActivationState.CONFIRMED_AWAITING_ACTIVATION ) {
return handleViewable( "confirm", this );
}
return handleViewable( "activate", this );
}
catch ( TokenException e ) {
return handleViewable( "bad_activation_token", this );
}
catch ( RedirectionException e ) {
throw e;
}
catch ( Exception e ) {
return handleViewable( "error", e );
}
}
@GET
@Path("reactivate")
public JSONWithPadding reactivate( @Context UriInfo ui,
@QueryParam("callback") @DefaultValue("callback") String callback )
throws Exception {
logger.info( "Send activation email for organization: " + organization.getUuid() );
ApiResponse response = createApiResponse();
management.startOrganizationActivationFlow( organization );
response.setAction( "reactivate organization" );
return new JSONWithPadding( response, callback );
}
@RequireOrganizationAccess
@GET
@Path("feed")
public JSONWithPadding getFeed( @Context UriInfo ui,
@QueryParam("callback") @DefaultValue("callback") String callback )
throws Exception {
ApiResponse response = createApiResponse();
response.setAction( "get organization feed" );
ServiceResults results = management.getOrganizationActivity( organization );
response.setEntities( results.getEntities() );
response.setSuccess();
return new JSONWithPadding( response, callback );
}
@RequireOrganizationAccess
@GET
@Path("credentials")
public JSONWithPadding getCredentials( @Context UriInfo ui,
@QueryParam("callback") @DefaultValue("callback") String callback )
throws Exception {
ApiResponse response = createApiResponse();
response.setAction( "get organization client credentials" );
ClientCredentialsInfo keys =
new ClientCredentialsInfo( management.getClientIdForOrganization( organization.getUuid() ),
management.getClientSecretForOrganization( organization.getUuid() ) );
response.setCredentials( keys );
return new JSONWithPadding( response, callback );
}
@RequireOrganizationAccess
@POST
@Path("credentials")
public JSONWithPadding generateCredentials( @Context UriInfo ui,
@QueryParam("callback") @DefaultValue("callback") String callback )
throws Exception {
ApiResponse response = createApiResponse();
response.setAction( "generate organization client credentials" );
ClientCredentialsInfo credentials =
new ClientCredentialsInfo( management.getClientIdForOrganization( organization.getUuid() ),
management.newClientSecretForOrganization( organization.getUuid() ) );
response.setCredentials( credentials );
return new JSONWithPadding( response, callback );
}
public OrganizationInfo getOrganization() {
return organization;
}
@RequireOrganizationAccess
@Consumes(MediaType.APPLICATION_JSON)
@PUT
public JSONWithPadding executePut( @Context UriInfo ui, Map<String, Object> json,
@QueryParam("callback") @DefaultValue("callback") String callback )
throws Exception {
logger.debug( "OrganizationResource.executePut" );
ApiResponse response = createApiResponse();
response.setAction( "put" );
response.setParams( ui.getQueryParameters() );
Map customProperties = ( Map ) json.get( OrganizationsResource.ORGANIZATION_PROPERTIES );
organization.setProperties( customProperties );
management.updateOrganization( organization );
return new JSONWithPadding( response, callback );
}
@POST
@Path("export")
@Consumes(APPLICATION_JSON)
@RequireOrganizationAccess
public Response exportPostJson( @Context UriInfo ui,Map<String, Object> json,
@QueryParam("callback") @DefaultValue("") String callback )
throws OAuthSystemException {
OAuthResponse response = null;
UUID jobUUID = null;
Map<String, String> uuidRet = new HashMap<String, String>();
Map<String,Object> properties;
Map<String, Object> storage_info;
try {
if((properties = ( Map<String, Object> ) json.get( "properties" )) == null){
throw new NullPointerException("Could not find 'properties'");
}
storage_info = ( Map<String, Object> ) properties.get( "storage_info" );
String storage_provider = ( String ) properties.get( "storage_provider" );
if(storage_provider == null) {
throw new NullPointerException( "Could not find field 'storage_provider'" );
}
if(storage_info == null) {
throw new NullPointerException( "Could not find field 'storage_info'" );
}
String bucketName = ( String ) storage_info.get( "bucket_location" );
String accessId = ( String ) storage_info.get( "s3_access_id" );
String secretKey = ( String ) storage_info.get( "s3_key" );
if(bucketName == null) {
throw new NullPointerException( "Could not find field 'bucketName'" );
}
if(accessId == null) {
throw new NullPointerException( "Could not find field 's3_access_id'" );
}
if(secretKey == null) {
throw new NullPointerException( "Could not find field 's3_key'" );
}
json.put( "organizationId",organization.getUuid());
jobUUID = exportService.schedule( json );
uuidRet.put( "Export Entity", jobUUID.toString() );
}
catch ( NullPointerException e ) {
return Response.status( SC_BAD_REQUEST ).type( JSONPUtils.jsonMediaType( callback ) )
.entity( ServiceResource.wrapWithCallback( e.getMessage(), callback ) ).build();
}
catch ( Exception e ) {
//TODO:throw descriptive error message and or include on in the response
//TODO:fix below, it doesn't work if there is an exception. Make it look like the OauthResponse.
return Response.status( SC_INTERNAL_SERVER_ERROR ).type( JSONPUtils.jsonMediaType( callback ) )
.entity( ServiceResource.wrapWithCallback( e.getMessage(), callback ) ).build();
}
return Response.status( SC_ACCEPTED ).entity( uuidRet ).build();
}
@GET
@RequireOrganizationAccess
@Path("export/{exportEntity: [A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}}")
public Response exportGetJson( @Context UriInfo ui, @PathParam("exportEntity") UUID exportEntityUUIDStr,
@QueryParam("callback") @DefaultValue("") String callback ) throws Exception {
Export entity;
try {
entity = smf.getServiceManager( CassandraService.MANAGEMENT_APPLICATION_ID ).getEntityManager()
.get( exportEntityUUIDStr, Export.class );
}
catch ( Exception e ) { //this might not be a bad request and needs better error checking
return Response.status( SC_BAD_REQUEST ).type( JSONPUtils.jsonMediaType( callback ) )
.entity( ServiceResource.wrapWithCallback( e.getMessage(), callback ) ).build();
}
if ( entity == null ) {
return Response.status( SC_BAD_REQUEST ).build();
}
return Response.status( SC_OK ).entity( entity).build();
}
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okhttp3.internal.http;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import okhttp3.Challenge;
import okhttp3.Cookie;
import okhttp3.CookieJar;
import okhttp3.Headers;
import okhttp3.HttpUrl;
import okhttp3.Request;
import okhttp3.Response;
import okhttp3.internal.Util;
import static java.net.HttpURLConnection.HTTP_NOT_MODIFIED;
import static java.net.HttpURLConnection.HTTP_NO_CONTENT;
import static okhttp3.internal.Util.equal;
import static okhttp3.internal.http.StatusLine.HTTP_CONTINUE;
/** Headers and utilities for internal use by OkHttp. */
public final class HttpHeaders {
private static final String TOKEN = "([^ \"=]*)";
private static final String QUOTED_STRING = "\"([^\"]*)\"";
private static final Pattern PARAMETER
= Pattern.compile(" +" + TOKEN + "=(:?" + QUOTED_STRING + "|" + TOKEN + ") *(:?,|$)");
private HttpHeaders() {
}
public static long contentLength(Response response) {
return contentLength(response.headers());
}
public static long contentLength(Headers headers) {
return stringToLong(headers.get("Content-Length"));
}
private static long stringToLong(String s) {
if (s == null) return -1;
try {
return Long.parseLong(s);
} catch (NumberFormatException e) {
return -1;
}
}
/**
* Returns true if none of the Vary headers have changed between {@code cachedRequest} and {@code
* newRequest}.
*/
public static boolean varyMatches(
Response cachedResponse, Headers cachedRequest, Request newRequest) {
for (String field : varyFields(cachedResponse)) {
if (!equal(cachedRequest.values(field), newRequest.headers(field))) return false;
}
return true;
}
/**
* Returns true if a Vary header contains an asterisk. Such responses cannot be cached.
*/
public static boolean hasVaryAll(Response response) {
return hasVaryAll(response.headers());
}
/**
* Returns true if a Vary header contains an asterisk. Such responses cannot be cached.
*/
public static boolean hasVaryAll(Headers responseHeaders) {
return varyFields(responseHeaders).contains("*");
}
private static Set<String> varyFields(Response response) {
return varyFields(response.headers());
}
/**
* Returns the names of the request headers that need to be checked for equality when caching.
*/
public static Set<String> varyFields(Headers responseHeaders) {
Set<String> result = Collections.emptySet();
for (int i = 0, size = responseHeaders.size(); i < size; i++) {
if (!"Vary".equalsIgnoreCase(responseHeaders.name(i))) continue;
String value = responseHeaders.value(i);
if (result.isEmpty()) {
result = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
}
for (String varyField : value.split(",")) {
result.add(varyField.trim());
}
}
return result;
}
/**
* Returns the subset of the headers in {@code response}'s request that impact the content of
* response's body.
*/
public static Headers varyHeaders(Response response) {
// Use the request headers sent over the network, since that's what the
// response varies on. Otherwise OkHttp-supplied headers like
// "Accept-Encoding: gzip" may be lost.
Headers requestHeaders = response.networkResponse().request().headers();
Headers responseHeaders = response.headers();
return varyHeaders(requestHeaders, responseHeaders);
}
/**
* Returns the subset of the headers in {@code requestHeaders} that impact the content of
* response's body.
*/
public static Headers varyHeaders(Headers requestHeaders, Headers responseHeaders) {
Set<String> varyFields = varyFields(responseHeaders);
if (varyFields.isEmpty()) return new Headers.Builder().build();
Headers.Builder result = new Headers.Builder();
for (int i = 0, size = requestHeaders.size(); i < size; i++) {
String fieldName = requestHeaders.name(i);
if (varyFields.contains(fieldName)) {
result.add(fieldName, requestHeaders.value(i));
}
}
return result.build();
}
/**
* Parse RFC 7617 challenges, also wrong ordered ones.
* This API is only interested in the scheme name and realm.
*/
public static List<Challenge> parseChallenges(Headers responseHeaders, String challengeHeader) {
// auth-scheme = token
// auth-param = token "=" ( token | quoted-string )
// challenge = auth-scheme 1*SP 1#auth-param
// realm = "realm" "=" realm-value
// realm-value = quoted-string
List<Challenge> challenges = new ArrayList<>();
List<String> authenticationHeaders = responseHeaders.values(challengeHeader);
for (String header : authenticationHeaders) {
int index = header.indexOf(' ');
if (index == -1) continue;
String scheme = header.substring(0, index);
String realm = null;
String charset = null;
Matcher matcher = PARAMETER.matcher(header);
for (int i = index; matcher.find(i); i = matcher.end()) {
if (header.regionMatches(true, matcher.start(1), "realm", 0, 5)) {
realm = matcher.group(3);
} else if (header.regionMatches(true, matcher.start(1), "charset", 0, 7)) {
charset = matcher.group(3);
}
if (realm != null && charset != null) {
break;
}
}
// "realm" is required.
if (realm == null) continue;
Challenge challenge = new Challenge(scheme, realm);
// If a charset is provided, RFC 7617 says it must be "UTF-8".
if (charset != null) {
if (charset.equalsIgnoreCase("UTF-8")) {
challenge = challenge.withCharset(Util.UTF_8);
} else {
continue;
}
}
challenges.add(challenge);
}
return challenges;
}
public static void receiveHeaders(CookieJar cookieJar, HttpUrl url, Headers headers) {
if (cookieJar == CookieJar.NO_COOKIES) return;
List<Cookie> cookies = Cookie.parseAll(url, headers);
if (cookies.isEmpty()) return;
cookieJar.saveFromResponse(url, cookies);
}
/** Returns true if the response must have a (possibly 0-length) body. See RFC 7231. */
public static boolean hasBody(Response response) {
// HEAD requests never yield a body regardless of the response headers.
if (response.request().method().equals("HEAD")) {
return false;
}
int responseCode = response.code();
if ((responseCode < HTTP_CONTINUE || responseCode >= 200)
&& responseCode != HTTP_NO_CONTENT
&& responseCode != HTTP_NOT_MODIFIED) {
return true;
}
// If the Content-Length or Transfer-Encoding headers disagree with the response code, the
// response is malformed. For best compatibility, we honor the headers.
if (contentLength(response) != -1
|| "chunked".equalsIgnoreCase(response.header("Transfer-Encoding"))) {
return true;
}
return false;
}
/**
* Returns the next index in {@code input} at or after {@code pos} that contains a character from
* {@code characters}. Returns the input length if none of the requested characters can be found.
*/
public static int skipUntil(String input, int pos, String characters) {
for (; pos < input.length(); pos++) {
if (characters.indexOf(input.charAt(pos)) != -1) {
break;
}
}
return pos;
}
/**
* Returns the next non-whitespace character in {@code input} that is white space. Result is
* undefined if input contains newline characters.
*/
public static int skipWhitespace(String input, int pos) {
for (; pos < input.length(); pos++) {
char c = input.charAt(pos);
if (c != ' ' && c != '\t') {
break;
}
}
return pos;
}
/**
* Returns {@code value} as a positive integer, or 0 if it is negative, or {@code defaultValue} if
* it cannot be parsed.
*/
public static int parseSeconds(String value, int defaultValue) {
try {
long seconds = Long.parseLong(value);
if (seconds > Integer.MAX_VALUE) {
return Integer.MAX_VALUE;
} else if (seconds < 0) {
return 0;
} else {
return (int) seconds;
}
} catch (NumberFormatException e) {
return defaultValue;
}
}
}
| |
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.math.interpolation;
import java.util.Arrays;
import com.google.common.primitives.Doubles;
import com.opengamma.analytics.math.matrix.DoubleMatrix1D;
import com.opengamma.analytics.math.matrix.DoubleMatrix2D;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.ParallelArrayBinarySort;
/**
* Cubic spline interpolation based on C.J.C. Kruger, "Constrained Cubic Spline Interpolation for Chemical Engineering Applications," 2002
*/
public class ConstrainedCubicSplineInterpolator extends PiecewisePolynomialInterpolator {
private static final double ERROR = 1.e-13;
private final HermiteCoefficientsProvider _solver = new HermiteCoefficientsProvider();
@Override
public PiecewisePolynomialResult interpolate(final double[] xValues, final double[] yValues) {
ArgumentChecker.notNull(xValues, "xValues");
ArgumentChecker.notNull(yValues, "yValues");
ArgumentChecker.isTrue(xValues.length == yValues.length, "(xValues length = yValues length) should be true");
ArgumentChecker.isTrue(xValues.length > 1, "Data points should be >= 2");
final int nDataPts = xValues.length;
for (int i = 0; i < nDataPts; ++i) {
ArgumentChecker.isFalse(Double.isNaN(xValues[i]), "xValues containing NaN");
ArgumentChecker.isFalse(Double.isInfinite(xValues[i]), "xValues containing Infinity");
ArgumentChecker.isFalse(Double.isNaN(yValues[i]), "yValues containing NaN");
ArgumentChecker.isFalse(Double.isInfinite(yValues[i]), "yValues containing Infinity");
}
for (int i = 0; i < nDataPts - 1; ++i) {
for (int j = i + 1; j < nDataPts; ++j) {
ArgumentChecker.isFalse(xValues[i] == xValues[j], "xValues should be distinct");
}
}
final double[] xValuesSrt = Arrays.copyOf(xValues, nDataPts);
final double[] yValuesSrt = Arrays.copyOf(yValues, nDataPts);
ParallelArrayBinarySort.parallelBinarySort(xValuesSrt, yValuesSrt);
final double[] intervals = _solver.intervalsCalculator(xValuesSrt);
final double[] slopes = _solver.slopesCalculator(yValuesSrt, intervals);
final double[] first = firstDerivativeCalculator(slopes);
final double[][] coefs = _solver.solve(yValuesSrt, intervals, slopes, first);
for (int i = 0; i < nDataPts - 1; ++i) {
double ref = 0.;
for (int j = 0; j < 4; ++j) {
ref += coefs[i][j] * Math.pow(intervals[i], 3 - j);
ArgumentChecker.isFalse(Double.isNaN(coefs[i][j]), "Too large input");
ArgumentChecker.isFalse(Double.isInfinite(coefs[i][j]), "Too large input");
}
final double bound = Math.max(Math.abs(ref) + Math.abs(yValuesSrt[i + 1]), 1.e-1);
ArgumentChecker.isTrue(Math.abs(ref - yValuesSrt[i + 1]) < ERROR * bound, "Input is too large/small or data points are too close");
}
return new PiecewisePolynomialResult(new DoubleMatrix1D(xValuesSrt), new DoubleMatrix2D(coefs), 4, 1);
}
@Override
public PiecewisePolynomialResult interpolate(final double[] xValues, final double[][] yValuesMatrix) {
ArgumentChecker.notNull(xValues, "xValues");
ArgumentChecker.notNull(yValuesMatrix, "yValuesMatrix");
ArgumentChecker.isTrue(xValues.length == yValuesMatrix[0].length, "(xValues length = yValuesMatrix's row vector length) should be true");
ArgumentChecker.isTrue(xValues.length > 1, "Data points should be >= 2");
final int nDataPts = xValues.length;
final int yValuesLen = yValuesMatrix[0].length;
final int dim = yValuesMatrix.length;
for (int i = 0; i < nDataPts; ++i) {
ArgumentChecker.isFalse(Double.isNaN(xValues[i]), "xValues containing NaN");
ArgumentChecker.isFalse(Double.isInfinite(xValues[i]), "xValues containing Infinity");
}
for (int i = 0; i < yValuesLen; ++i) {
for (int j = 0; j < dim; ++j) {
ArgumentChecker.isFalse(Double.isNaN(yValuesMatrix[j][i]), "yValuesMatrix containing NaN");
ArgumentChecker.isFalse(Double.isInfinite(yValuesMatrix[j][i]), "yValuesMatrix containing Infinity");
}
}
for (int i = 0; i < nDataPts; ++i) {
for (int j = i + 1; j < nDataPts; ++j) {
ArgumentChecker.isFalse(xValues[i] == xValues[j], "xValues should be distinct");
}
}
double[] xValuesSrt = new double[nDataPts];
final DoubleMatrix2D[] coefMatrix = new DoubleMatrix2D[dim];
for (int i = 0; i < dim; ++i) {
xValuesSrt = Arrays.copyOf(xValues, nDataPts);
final double[] yValuesSrt = Arrays.copyOf(yValuesMatrix[i], nDataPts);
ParallelArrayBinarySort.parallelBinarySort(xValuesSrt, yValuesSrt);
final double[] intervals = _solver.intervalsCalculator(xValuesSrt);
final double[] slopes = _solver.slopesCalculator(yValuesSrt, intervals);
final double[] first = firstDerivativeCalculator(slopes);
coefMatrix[i] = new DoubleMatrix2D(_solver.solve(yValuesSrt, intervals, slopes, first));
for (int k = 0; k < intervals.length; ++k) {
double ref = 0.;
for (int j = 0; j < 4; ++j) {
ref += coefMatrix[i].getData()[k][j] * Math.pow(intervals[k], 3 - j);
ArgumentChecker.isFalse(Double.isNaN(coefMatrix[i].getData()[k][j]), "Too large input");
ArgumentChecker.isFalse(Double.isInfinite(coefMatrix[i].getData()[k][j]), "Too large input");
}
final double bound = Math.max(Math.abs(ref) + Math.abs(yValuesSrt[k + 1]), 1.e-1);
ArgumentChecker.isTrue(Math.abs(ref - yValuesSrt[k + 1]) < ERROR * bound, "Input is too large/small or data points are too close");
}
}
final int nIntervals = coefMatrix[0].getNumberOfRows();
final int nCoefs = coefMatrix[0].getNumberOfColumns();
final double[][] resMatrix = new double[dim * nIntervals][nCoefs];
for (int i = 0; i < nIntervals; ++i) {
for (int j = 0; j < dim; ++j) {
resMatrix[dim * i + j] = coefMatrix[j].getRowVector(i).getData();
}
}
return new PiecewisePolynomialResult(new DoubleMatrix1D(xValuesSrt), new DoubleMatrix2D(resMatrix), nCoefs, dim);
}
@Override
public PiecewisePolynomialResultsWithSensitivity interpolateWithSensitivity(final double[] xValues, final double[] yValues) {
ArgumentChecker.notNull(xValues, "xValues");
ArgumentChecker.notNull(yValues, "yValues");
ArgumentChecker.isTrue(xValues.length == yValues.length, "(xValues length = yValues length) should be true");
ArgumentChecker.isTrue(xValues.length > 1, "Data points should be >= 2");
final int nDataPts = xValues.length;
for (int i = 0; i < nDataPts; ++i) {
ArgumentChecker.isFalse(Double.isNaN(xValues[i]), "xValues containing NaN");
ArgumentChecker.isFalse(Double.isInfinite(xValues[i]), "xValues containing Infinity");
ArgumentChecker.isFalse(Double.isNaN(yValues[i]), "yValues containing NaN");
ArgumentChecker.isFalse(Double.isInfinite(yValues[i]), "yValues containing Infinity");
}
for (int i = 0; i < nDataPts - 1; ++i) {
for (int j = i + 1; j < nDataPts; ++j) {
ArgumentChecker.isFalse(xValues[i] == xValues[j], "xValues should be distinct");
}
}
final double[] intervals = _solver.intervalsCalculator(xValues);
final double[] slopes = _solver.slopesCalculator(yValues, intervals);
final double[][] slopeSensitivity = _solver.slopeSensitivityCalculator(intervals);
final DoubleMatrix1D[] firstWithSensitivity = firstDerivativeWithSensitivityCalculator(slopes, slopeSensitivity);
final DoubleMatrix2D[] resMatrix = _solver.solveWithSensitivity(yValues, intervals, slopes, slopeSensitivity, firstWithSensitivity);
for (int k = 0; k < nDataPts; k++) {
final DoubleMatrix2D m = resMatrix[k];
final int rows = m.getNumberOfRows();
final int cols = m.getNumberOfColumns();
for (int i = 0; i < rows; ++i) {
for (int j = 0; j < cols; ++j) {
ArgumentChecker.isTrue(Doubles.isFinite(m.getEntry(i, j)), "Matrix contains a NaN or infinite");
}
}
}
final DoubleMatrix2D coefMatrix = resMatrix[0];
for (int i = 0; i < nDataPts - 1; ++i) {
double ref = 0.;
for (int j = 0; j < 4; ++j) {
ref += coefMatrix.getData()[i][j] * Math.pow(intervals[i], 3 - j);
}
final double bound = Math.max(Math.abs(ref) + Math.abs(yValues[i + 1]), 1.e-1);
ArgumentChecker.isTrue(Math.abs(ref - yValues[i + 1]) < ERROR * bound, "Input is too large/small or data points are too close");
}
final DoubleMatrix2D[] coefSenseMatrix = new DoubleMatrix2D[nDataPts - 1];
System.arraycopy(resMatrix, 1, coefSenseMatrix, 0, nDataPts - 1);
final int nCoefs = coefMatrix.getNumberOfColumns();
return new PiecewisePolynomialResultsWithSensitivity(new DoubleMatrix1D(xValues), coefMatrix, nCoefs, 1, coefSenseMatrix);
}
private double[] firstDerivativeCalculator(final double[] slopes) {
final int nData = slopes.length + 1;
final double[] res = new double[nData];
for (int i = 1; i < nData - 1; ++i) {
res[i] = Math.signum(slopes[i - 1]) * Math.signum(slopes[i]) <= 0. ? 0. : 2. * slopes[i] * slopes[i - 1] / (slopes[i] + slopes[i - 1]);
}
res[0] = 1.5 * slopes[0] - 0.5 * res[1];
res[nData - 1] = 1.5 * slopes[nData - 2] - 0.5 * res[nData - 2];
return res;
}
private DoubleMatrix1D[] firstDerivativeWithSensitivityCalculator(final double[] slopes, final double[][] slopeSensitivity) {
final int nData = slopes.length + 1;
final double[] first = new double[nData];
final double[][] sense = new double[nData][nData];
final DoubleMatrix1D[] res = new DoubleMatrix1D[nData + 1];
for (int i = 1; i < nData - 1; ++i) {
final double sign = Math.signum(slopes[i - 1]) * Math.signum(slopes[i]);
if (sign <= 0.) {
first[i] = 0.;
} else {
first[i] = 2. * slopes[i] * slopes[i - 1] / (slopes[i] + slopes[i - 1]);
}
if (sign < 0.) {
Arrays.fill(sense[i], 0.);
} else {
for (int k = 0; k < nData; ++k) {
if (Math.abs(slopes[i] + slopes[i - 1]) == 0.) {
Arrays.fill(sense[i], 0.);
} else {
if (sign == 0.) {
sense[i][k] = (slopes[i] * slopes[i] * slopeSensitivity[i - 1][k] + slopes[i - 1] * slopes[i - 1] * slopeSensitivity[i][k])
/ (slopes[i] + slopes[i - 1]) / (slopes[i] + slopes[i - 1]);
} else {
sense[i][k] = 2. * (slopes[i] * slopes[i] * slopeSensitivity[i - 1][k] + slopes[i - 1] * slopes[i - 1] * slopeSensitivity[i][k])
/ (slopes[i] + slopes[i - 1]) / (slopes[i] + slopes[i - 1]);
}
}
}
}
res[i + 1] = new DoubleMatrix1D(sense[i]);
}
first[0] = 1.5 * slopes[0] - 0.5 * first[1];
first[nData - 1] = 1.5 * slopes[nData - 2] - 0.5 * first[nData - 2];
res[0] = new DoubleMatrix1D(first);
for (int k = 0; k < nData; ++k) {
sense[0][k] = 1.5 * slopeSensitivity[0][k] - 0.5 * sense[1][k];
sense[nData - 1][k] = 1.5 * slopeSensitivity[nData - 2][k] - 0.5 * sense[nData - 2][k];
}
res[1] = new DoubleMatrix1D(sense[0]);
res[nData] = new DoubleMatrix1D(sense[nData - 1]);
return res;
}
}
| |
package org.recap.report;
import org.junit.Test;
import org.mockito.ArgumentMatcher;
import org.recap.BaseTestCase;
import org.recap.ReCAPConstants;
import org.recap.model.jpa.ReportDataEntity;
import org.recap.model.jpa.ReportEntity;
import org.recap.repository.ReportDetailRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import java.io.File;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/**
* Created by peris on 8/17/16.
*/
public class ReportGenerator_UT extends BaseTestCase {
@Autowired
ReportDetailRepository reportDetailRepository;
private String fileName = "test.xml";
@Value("${etl.report.directory}")
private String reportDirectory;
@Autowired
ReportGenerator reportGenerator;
@Test
public void generateFailureReportTest() throws Exception {
ReportEntity savedReportEntity1 = saveFailureReportEntity();
String generatedReportFileName = generateReport(savedReportEntity1.getCreatedDate(), "ETL",savedReportEntity1.getType(), savedReportEntity1.getInstitutionName(), org.recap.ReCAPConstants.FILE_SYSTEM);
assertNotNull(generatedReportFileName);
File directory = new File(reportDirectory);
assertTrue(directory.isDirectory());
boolean directoryContains = new File(directory, generatedReportFileName).exists();
assertTrue(directoryContains);
}
@Test
public void generateSuccessReportTest() throws Exception {
ReportEntity savedReportEntity1 = saveSuccessReportEntity();
String generatedReportFileName = generateReport(savedReportEntity1.getCreatedDate(), "ETL",savedReportEntity1.getType(), savedReportEntity1.getInstitutionName(), org.recap.ReCAPConstants.FILE_SYSTEM);
assertNotNull(generatedReportFileName);
File directory = new File(reportDirectory);
assertTrue(directory.isDirectory());
boolean directoryContains = new File(directory, generatedReportFileName).exists();
assertTrue(directoryContains);
}
@Test
public void generateFailureReportForTwoEntity() throws Exception {
ReportEntity savedReportEntity1 = saveFailureReportEntity();
ReportEntity savedReportEntity2 = saveFailureReportEntity();
String generatedReportFileName = generateReport(savedReportEntity1.getCreatedDate(), "ETL",savedReportEntity1.getType(), savedReportEntity1.getInstitutionName(), ReCAPConstants.FILE_SYSTEM);
assertNotNull(generatedReportFileName);
File directory = new File(reportDirectory);
assertTrue(directory.isDirectory());
boolean directoryContains = new File(directory, generatedReportFileName).exists();
assertTrue(directoryContains);
}
@Test
public void uploadFailureReportToFTP() throws Exception {
ReportEntity savedReportEntity = saveFailureReportEntity();
String generatedReportFileName = generateReport(savedReportEntity.getCreatedDate(), "ETL",savedReportEntity.getType(), savedReportEntity.getInstitutionName(), ReCAPConstants.FTP);
assertNotNull(generatedReportFileName);
}
@Test
public void uploadSuccessReportToFTP() throws Exception {
ReportEntity savedReportEntity = saveSuccessReportEntity();
String generatedReportFileName = generateReport(savedReportEntity.getCreatedDate(), "ETL",savedReportEntity.getType(), savedReportEntity.getInstitutionName(), ReCAPConstants.FTP);
assertNotNull(generatedReportFileName);
}
@Test
public void generateReportWithoutFileName() throws Exception {
ReportEntity savedSuccessReportEntity1 = saveSuccessReportEntity();
ReportEntity savedSuccessReportEntity2 = saveSuccessReportEntity();
fileName = "";
String generatedReportFileName = generateReport(savedSuccessReportEntity1.getCreatedDate(), "ETL",savedSuccessReportEntity1.getType(), savedSuccessReportEntity1.getInstitutionName(), ReCAPConstants.FILE_SYSTEM);
assertNotNull(generatedReportFileName);
File directory = new File(reportDirectory);
assertTrue(directory.isDirectory());
boolean directoryContains = new File(directory, generatedReportFileName).exists();
assertTrue(directoryContains);
}
private ReportEntity saveFailureReportEntity() {
List<ReportDataEntity> reportDataEntities = new ArrayList<>();
ReportEntity reportEntity = new ReportEntity();
reportEntity.setFileName(fileName);
reportEntity.setCreatedDate(new Date());
reportEntity.setType(org.recap.ReCAPConstants.FAILURE);
reportEntity.setInstitutionName("PUL");
ReportDataEntity reportDataEntity = new ReportDataEntity();
reportDataEntity.setHeaderName(ReCAPConstants.ITEM_BARCODE);
reportDataEntity.setHeaderValue("103");
reportDataEntities.add(reportDataEntity);
ReportDataEntity reportDataEntity2 = new ReportDataEntity();
reportDataEntity2.setHeaderName(ReCAPConstants.CUSTOMER_CODE);
reportDataEntity2.setHeaderValue("PA");
reportDataEntities.add(reportDataEntity2);
ReportDataEntity reportDataEntity3 = new ReportDataEntity();
reportDataEntity3.setHeaderName(ReCAPConstants.LOCAL_ITEM_ID);
reportDataEntity3.setHeaderValue("10412");
reportDataEntities.add(reportDataEntity3);
ReportDataEntity reportDataEntity4 = new ReportDataEntity();
reportDataEntity4.setHeaderName(ReCAPConstants.OWNING_INSTITUTION);
reportDataEntity4.setHeaderValue("PUL");
reportDataEntities.add(reportDataEntity4);
reportEntity.setReportDataEntities(reportDataEntities);
return reportDetailRepository.save(reportEntity);
}
private ReportEntity saveSuccessReportEntity() {
ReportEntity reportEntity = new ReportEntity();
List<ReportDataEntity> reportDataEntities = new ArrayList<>();
ReportDataEntity totalRecordsInFileEntity = new ReportDataEntity();
totalRecordsInFileEntity.setHeaderName(ReCAPConstants.TOTAL_RECORDS_IN_FILE);
totalRecordsInFileEntity.setHeaderValue(String.valueOf(10000));
reportDataEntities.add(totalRecordsInFileEntity);
ReportDataEntity totalBibsLoadedEntity = new ReportDataEntity();
totalBibsLoadedEntity.setHeaderName(ReCAPConstants.TOTAL_BIBS_LOADED);
totalBibsLoadedEntity.setHeaderValue(String.valueOf(10000));
reportDataEntities.add(totalBibsLoadedEntity);
ReportDataEntity totalHoldingsLoadedEntity = new ReportDataEntity();
totalHoldingsLoadedEntity.setHeaderName(ReCAPConstants.TOTAL_HOLDINGS_LOADED);
totalHoldingsLoadedEntity.setHeaderValue(String.valueOf(8000));
reportDataEntities.add(totalHoldingsLoadedEntity);
ReportDataEntity totalItemsLoadedEntity = new ReportDataEntity();
totalItemsLoadedEntity.setHeaderName(ReCAPConstants.TOTAL_ITEMS_LOADED);
totalItemsLoadedEntity.setHeaderValue(String.valueOf(12000));
reportDataEntities.add(totalItemsLoadedEntity);
ReportDataEntity totalBibHoldingsLoadedEntity = new ReportDataEntity();
totalBibHoldingsLoadedEntity.setHeaderName(ReCAPConstants.TOTAL_BIB_HOLDINGS_LOADED);
totalBibHoldingsLoadedEntity.setHeaderValue(String.valueOf(18000));
reportDataEntities.add(totalBibHoldingsLoadedEntity);
ReportDataEntity totalBiBItemsLoadedEntity = new ReportDataEntity();
totalBiBItemsLoadedEntity.setHeaderName(ReCAPConstants.TOTAL_BIB_ITEMS_LOADED);
totalBiBItemsLoadedEntity.setHeaderValue(String.valueOf(22000));
reportDataEntities.add(totalBiBItemsLoadedEntity);
ReportDataEntity fileNameLoadedEntity = new ReportDataEntity();
fileNameLoadedEntity.setHeaderName(ReCAPConstants.FILE_NAME);
fileNameLoadedEntity.setHeaderValue(fileName);
reportDataEntities.add(fileNameLoadedEntity);
reportEntity.setFileName(fileName);
reportEntity.setCreatedDate(new Date());
reportEntity.setType(org.recap.ReCAPConstants.SUCCESS);
reportEntity.setReportDataEntities(reportDataEntities);
reportEntity.setInstitutionName("PUL");
ReportEntity savedReportEntity = reportDetailRepository.save(reportEntity);
return savedReportEntity;
}
private String generateReport(Date createdDate, String operationType, String reportType, String institutionName, String transmissionType) throws InterruptedException {
Calendar cal = Calendar.getInstance();
Date from = createdDate;
cal.setTime(from);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
from = cal.getTime();
Date to = createdDate;
cal.setTime(to);
cal.set(Calendar.HOUR_OF_DAY, 23);
cal.set(Calendar.MINUTE, 59);
cal.set(Calendar.SECOND, 59);
to = cal.getTime();
String generatedFileName = reportGenerator.generateReport(fileName, operationType,reportType, institutionName, from, to, transmissionType);
Thread.sleep(1000);
return generatedFileName;
}
class CustomArgumentMatcher extends ArgumentMatcher {
@Override
public boolean matches(Object argument) {
return false;
}
}
}
| |
/* Copyright (C) 2013-2014 Computer Sciences Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
package ezbake.data.mongo;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.mongodb.*;
import com.mongodb.util.JSON;
import ezbake.base.thrift.EzSecurityToken;
import ezbake.base.thrift.Visibility;
import ezbake.configuration.constants.EzBakePropertyConstants;
import ezbake.data.base.EzbakeBaseDataService;
import ezbake.data.base.thrift.PurgeItems;
import ezbake.data.base.thrift.PurgeOptions;
import ezbake.data.base.thrift.PurgeResult;
import ezbake.data.common.TokenUtils;
import ezbake.data.common.classification.ClassificationUtils;
import ezbake.data.mongo.conversion.MongoConverter;
import ezbake.data.mongo.driver.thrift.*;
import ezbake.data.mongo.helper.MongoFindHelper;
import ezbake.data.mongo.helper.MongoInsertHelper;
import ezbake.data.mongo.helper.MongoUpdateHelper;
import ezbake.data.mongo.redact.RedactHelper;
import ezbake.data.mongo.thrift.*;
import ezbake.util.AuditEvent;
import ezbake.util.AuditEventType;
import ezbakehelpers.ezconfigurationhelpers.mongo.MongoConfigurationHelper;
import ezbakehelpers.mongoutils.MongoHelper;
import org.apache.accumulo.core.security.VisibilityParseException;
import org.apache.commons.lang.StringUtils;
import org.apache.thrift.TException;
import org.apache.thrift.TProcessor;
import org.bson.types.ObjectId;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.*;
@SuppressWarnings({"rawtypes", "unchecked"})
public class EzMongoHandler extends EzbakeBaseDataService implements EzMongoDriverService.Iface {
MongoDriverHandler driver;
private final Logger appLog = LoggerFactory.getLogger(EzMongoHandler.class);
private Mongo mongo;
protected String appName;
private String appId;
protected String dbName;
private MongoConfigurationHelper mongoConfigurationHelper;
private MongoFindHelper mongoFindHelper;
private MongoInsertHelper mongoInsertHelper;
private MongoUpdateHelper mongoUpdateHelper;
private static final int DEFAULT_MONGODB_PORT = 27017;
protected DB db;
public final static String MONGODB_APP_DATABASE_NAME = "mongodb.app.database.name";
public final static String READ_OPERATION = "read";
public final static String WRITE_OPERATION = "write";
public final static String DISCOVER_OPERATION = "discover";
public final static String MANAGE_OPERATION = "manage";
// Metrics
private static final String PURGE_TIMER_NAME = MetricRegistry.name(EzMongoHandler.class, "PURGE");
private static final String FIND_TIMER_NAME = MetricRegistry.name(EzMongoHandler.class, "FIND");
private static final String UPDATE_DOCUMENT_TIMER_NAME = MetricRegistry.name(EzMongoHandler.class, "UPDATE", "DOCUMENT");
private static final String INSERT_TIMER_NAME = MetricRegistry.name(EzMongoHandler.class, "INSERT");
private static final String REMOVE_TIMER_NAME = MetricRegistry.name(EzMongoHandler.class, "REMOVE");
//purge tracking collection name
private static final String PURGE_TRACKING_COLL_NAME = "purgetracker";
@SuppressWarnings("resource")
public void init() throws Exception {
final Properties properties = getConfigurationProperties();
mongoConfigurationHelper = new MongoConfigurationHelper(properties);
appLog.info("ezmongo configuration: {}", properties);
// we append the appName to the collection names to keep the apps separate within Mongo
appName = properties.getProperty(EzBakePropertyConstants.EZBAKE_APPLICATION_NAME);
appId = properties.getProperty(EzBakePropertyConstants.EZBAKE_SECURITY_ID);
// see if we are connecting to MongoDB with username/password
final String username = mongoConfigurationHelper.getMongoDBUserName();
final String password = mongoConfigurationHelper.getMongoDBPassword();
final String hostname = mongoConfigurationHelper.getMongoDBHostName();
dbName = mongoConfigurationHelper.getMongoDBDatabaseName();
appLog.debug("Mongo username/password: {}/{}", username, password);
appLog.debug("Mongo host(s): {}", hostname);
appLog.debug("Mongo dbName: {}", dbName);
if (StringUtils.isNotBlank(username) && StringUtils.isNotBlank(password)) {
MongoHelper mongoHelper = new MongoHelper(properties);
mongo = mongoHelper.getMongo();
} else {
// connect to MongoDB that is not username/password protected
mongo = new MongoClient(hostname);
}
db = mongo.getDB(dbName);
mongoFindHelper = new MongoFindHelper(this);
mongoInsertHelper = new MongoInsertHelper(appId);
mongoUpdateHelper = new MongoUpdateHelper(this);
//create purge tracking collection
createPurgeTracker();
//init metrics and audit logger
initMetrics();
initAuditLogger(EzMongoHandler.class);
// TODO: add caching to Mongo
// cacheManager = (CacheManager)context.getBean("cacheManager");
}
/**
* Create Purge Tracker Collection to Track the purge activity
* @throws EzMongoBaseException
*
*/
void createPurgeTracker() throws EzMongoBaseException {
try {
final String finalCollectionName = getCollectionName(PURGE_TRACKING_COLL_NAME);
if ( !db.collectionExists(finalCollectionName) ) {
db.createCollection(finalCollectionName, new BasicDBObject());
}
appLog.info("in createPurgeTracker, created collection: {}", finalCollectionName);
} catch (final Exception e) {
throw enrichException("createPurgeTracker", e);
}
}
/**
* Initialize our meters & timers so that we can gather statistics for EzBake
*
*/
private void initMetrics() {
final MetricRegistry mr = getMetricRegistry();
// Timers
final Timer purgeTimer = mr.timer(EzMongoHandler.PURGE_TIMER_NAME);
final Timer findTimer = mr.timer(EzMongoHandler.FIND_TIMER_NAME);
final Timer updateDocTimer = mr.timer(EzMongoHandler.UPDATE_DOCUMENT_TIMER_NAME);
final Timer insertTimer = mr.timer(EzMongoHandler.INSERT_TIMER_NAME);
final Timer removeTimer = mr.timer(EzMongoHandler.REMOVE_TIMER_NAME);
// Meters
//final Meter getMeter = mr.meter(EzElasticHandler.GET_METER_NAME);
//final Meter queryMeter = mr.meter(EzElasticHandler.QUERY_METER_NAME);
}
private List<ServerAddress> parseMongoServerString(String mongoServers) {
final List<ServerAddress> servers = new ArrayList<ServerAddress>();
for (final String server : mongoServers.split(",")) {
final String[] tokens = server.split(":");
final String host = tokens[0];
int port = DEFAULT_MONGODB_PORT;
try {
if (tokens.length == 2) {
port = Integer.parseInt(tokens[1]);
}
servers.add(new ServerAddress(host, port));
} catch (final NumberFormatException e) {
appLog.error("Unable to parse port number from server string({})", server);
} catch (final java.net.UnknownHostException e) {
appLog.error("Unknown host exception when parsing server string({})", server);
}
}
return servers;
}
@Override
public boolean ping() {
boolean result = true;
appLog.info("ping!");
try {
db.getCollectionNames();
} catch (final Exception e) {
result = false;
}
return result;
}
@Override
public TProcessor getThriftProcessor() {
try {
init();
driver = new MongoDriverHandler(this);
driver.init();
return new EzMongoDriverService.Processor(this);
} catch (final Exception e) {
e.printStackTrace();
return null;
}
}
public MongoFindHelper getMongoFindHelper() {
return mongoFindHelper;
}
public MongoInsertHelper getMongoInsertHelper() {
return mongoInsertHelper;
}
public MongoUpdateHelper getMongoUpdateHelper() {
return mongoUpdateHelper;
}
@Override
public boolean authenticate_driver(EzSecurityToken ezSecurityToken) throws TException {
return true;
}
protected void auditLog(EzSecurityToken userToken, AuditEventType eventType, Map<String, String> argsMap) {
AuditEvent event = new AuditEvent(eventType, userToken);
for (Map.Entry<String, String> entry : argsMap.entrySet())
{
event.arg(entry.getKey(), entry.getValue());
}
logEvent(event);
}
protected String printMongoObject(Object obj) {
return obj == null ? "null" : obj.toString();
}
@Override
public ResultsWrapper aggregate_driver(String collection, EzAggregationRequest ezAggregationRequest, EzSecurityToken token) throws TException, EzMongoDriverException {
return driver.aggregate_driver(collection,ezAggregationRequest,token);
}
protected void setResponseObjectWithCursor(ResultsWrapper rw, Cursor cursor) throws IOException {
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
new ObjectOutputStream(bOut).writeObject(cursor);
rw.setResponseData(bOut.toByteArray());
}
private boolean isNotSystemCollection(String collection) {
return !collection.equals("$cmd") && !collection.equals("system.indexes") &&
!collection.equals("fs.chunks") && !collection.equals("fs.files") && !collection.equals("system.namespaces");
}
@Override
public ResultsWrapper find_driver(String collection, EzFindRequest ezFindRequest, EzSecurityToken token) throws TException, EzMongoDriverException {
return driver.find_driver(collection, ezFindRequest, token);
}
@Override
public EzWriteResult insert_driver(String collection, EzInsertRequest req, EzSecurityToken token) throws TException, EzMongoDriverException {
return driver.insert_driver(collection,req,token);
}
@Override
public EzWriteResult update_driver(String collection, EzUpdateRequest req, EzSecurityToken token) throws TException, EzMongoDriverException {
return driver.update_driver(collection, req, token);
}
@Override
public ResultsWrapper drop_driver(String collection, EzSecurityToken token) throws TException, EzMongoDriverException {
return driver.drop_driver(collection,token);
}
@Override
public EzWriteResult createIndex_driver(String collection, EzCreateIndexRequest req, EzSecurityToken token) throws TException, EzMongoDriverException {
return driver.createIndex_driver(collection, req, token);
}
@Override
public EzGetMoreResponse getMore_driver(String collection, EzGetMoreRequest req, EzSecurityToken token) throws TException, EzMongoDriverException {
return driver.getMore_driver(collection, req, token);
}
@Override
public EzParallelScanResponse parallelScan_driver(String collection, EzParallelScanOptions options, EzSecurityToken token) throws EzMongoDriverException, TException {
return driver.parallelScan_driver(collection, options, token);
}
@Override
public EzWriteResult remove_driver(String collection, EzRemoveRequest req, EzSecurityToken token) throws EzMongoDriverException, TException {
return driver.remove_driver(collection, req, token);
}
@Override
public int getMaxBsonObjectSize_driver(EzSecurityToken token) throws EzMongoDriverException, TException {
return driver.getMaxBsonObjectSize_driver(token);
}
private void addResponseException(EzGetMoreResponse rw, Exception ex) throws TException {
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
try {
new ObjectOutputStream(bOut).writeObject(ex);
rw.setMongoexception(bOut.toByteArray());
} catch (Exception e) {
throw new TException(e);
}
}
protected ByteArrayOutputStream addDBCursorResult(List<DBObject> list) throws IOException {
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
new ObjectOutputStream(bOut).writeObject(list);
return bOut;
}
/**** Below are 1.3.x Mongo Dataset methods (with the IDL structs changes) ****/
public String getCollectionName(String collectionName) {
return appName + "_" + collectionName;
}
public DB getDb() {
return db;
}
@Override
public void ensureIndex(String collectionName, String jsonKeys, String jsonOptions, EzSecurityToken security)
throws TException, EzMongoBaseException {
try {
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "ensureIndex");
auditParamsMap.put("collectionName", collectionName);
auditParamsMap.put("jsonKeys", jsonKeys);
auditParamsMap.put("jsonOptions", jsonOptions);
auditLog(security, AuditEventType.FileObjectCreate, auditParamsMap);
TokenUtils.validateSecurityToken(security, this.getConfigurationProperties());
if (StringUtils.isEmpty(collectionName)) {
throw new EzMongoBaseException("collectionName is required.");
}
final String finalCollectionName = getCollectionName(collectionName);
final DBObject indexKeys = (DBObject) JSON.parse(jsonKeys);
if (!StringUtils.isEmpty(jsonOptions)) {
final DBObject indexOptions = (DBObject) JSON.parse(jsonOptions);
db.getCollection(finalCollectionName).ensureIndex(indexKeys, indexOptions);
} else {
db.getCollection(finalCollectionName).ensureIndex(indexKeys);
}
appLog.info("ensured index with keys: {}, options: {}", jsonKeys, jsonOptions);
} catch (final Exception e) {
throw enrichException("ensureIndex", e);
}
}
/**
* Create MongoDB indexes for the given fields in some collection.
*
* @param collectionName The Name of the collection the document belongs to
* @param jsonKeys The Mongo index keys
* @param jsonOptions Optional options for the particular fields being indexed.
* @param security A valid EzSecurity token
* @throws TException If authentication fails
* @throws EzMongoBaseException If some other error occurs
*/
@Override
public void createIndex(String collectionName, String jsonKeys, String jsonOptions, EzSecurityToken security)
throws TException, EzMongoBaseException {
try {
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "createIndex");
auditParamsMap.put("collectionName", collectionName);
auditParamsMap.put("jsonKeys", jsonKeys);
auditParamsMap.put("jsonOptions", jsonOptions);
auditLog(security, AuditEventType.FileObjectCreate, auditParamsMap);
TokenUtils.validateSecurityToken(security, this.getConfigurationProperties());
if (StringUtils.isEmpty(collectionName)) {
throw new EzMongoBaseException("collectionName is required.");
}
final String finalCollectionName = getCollectionName(collectionName);
final DBObject indexKeys = (DBObject) JSON.parse(jsonKeys);
// The 'options' object is optional
if (StringUtils.isEmpty(jsonOptions)) {
db.getCollection(finalCollectionName).createIndex(indexKeys);
} else {
final DBObject indexOptions = (DBObject) JSON.parse(jsonOptions);
// check if "ns" and "name" fields are not in the jsonOptions - then need to put them in.
// otherwise, mongodb throws this error:
// "Cannot authorize inserting into system.indexes documents without a string-typed \"ns\" field."
String ns = (String) indexOptions.get("ns");
if (ns == null) {
ns = dbName + "." + finalCollectionName;
indexOptions.put("ns", ns);
appLog.info("putting index's ns as : {}", ns);
}
String name = (String) indexOptions.get("name");
if (name == null) {
name = "";
final Set<String> keySet = indexKeys.keySet();
for (final String key : keySet) {
final Object keyValue = indexKeys.get(key);
if (name.length() > 0) {
name += "_";
}
name += key + "_" + keyValue.toString();
}
indexOptions.put("name", name);
appLog.info("putting index's name as : {}", name);
}
db.getCollection(finalCollectionName).createIndex(indexKeys, indexOptions);
}
appLog.info("created index with keys: {}, options: {}", jsonKeys, jsonOptions);
} catch (final Exception e) {
throw enrichException("createIndex", e);
}
}
@Override
public List<String> getIndexInfo(String collectionName, EzSecurityToken security) throws TException,
EzMongoBaseException {
try {
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "getIndexInfo");
auditParamsMap.put("collectionName", collectionName);
auditLog(security, AuditEventType.FileObjectAccess, auditParamsMap);
TokenUtils.validateSecurityToken(security, this.getConfigurationProperties());
if (StringUtils.isEmpty(collectionName)) {
throw new EzMongoBaseException("collectionName is required.");
}
final String finalCollectionName = getCollectionName(collectionName);
final List<DBObject> indexList = db.getCollection(finalCollectionName).getIndexInfo();
final List<String> results = new ArrayList<String>();
for (final DBObject index : indexList) {
results.add(JSON.serialize(index));
}
appLog.info("got index info results: {}", results);
return results;
} catch (final Exception e) {
throw enrichException("getIndexInfo", e);
}
}
@Override
public String insert(String collectionName, MongoEzbakeDocument mongoDocument,
EzSecurityToken security) throws TException, EzMongoBaseException {
try {
String document = mongoDocument.jsonDocument;
Visibility vis = mongoDocument.getVisibility();
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "insert");
auditParamsMap.put("collectionName", collectionName);
auditParamsMap.put("mongoDocument", mongoDocument.toString());
auditLog(security, AuditEventType.FileObjectCreate, auditParamsMap);
TokenUtils.validateSecurityToken(security, this.getConfigurationProperties());
if (StringUtils.isEmpty(collectionName)) {
throw new EzMongoBaseException("collectionName is required.");
}
final DBObject dbObject = MongoConverter.toDBObject(document);
// check if the user can see the document visibility
mongoInsertHelper.checkAbilityToInsert(security, vis, dbObject, null, false, false);
final String finalCollectionName = getCollectionName(collectionName);
Timer.Context context = getMetricRegistry().getTimers().get(INSERT_TIMER_NAME).time();
try {
db.getCollection(finalCollectionName).insert(dbObject);
} finally {
context.stop();
}
final String id = ((ObjectId) dbObject.get("_id")).toString();
appLog.info("inserted - id: {}", id);
return id;
} catch (final Exception e) {
throw enrichException("insert", e);
}
}
public void checkAbilityToInsert(EzSecurityToken security, Visibility vis, DBObject newObject,
DBObject existingObject, boolean isManageOperation, boolean fromDriver)
throws VisibilityParseException, EzMongoBaseException {
validateRequiredFieldsForInsert(vis, newObject, fromDriver);
validateFormalVisibilityForInsert(security, vis, newObject, fromDriver);
validateExternalCommunityVisibilityForInsert(security, vis, newObject, fromDriver);
if (isManageOperation) {
validatePlatformVisibilitiesForManagingExistingDBObject(security, vis, existingObject, fromDriver);
}
validatePlatformVisibilitiesForInsertingNewDBObject(security, vis, newObject, fromDriver);
}
private void validatePlatformVisibilitiesForManagingExistingDBObject(EzSecurityToken security, Visibility vis,
DBObject existingObject, boolean fromDriver) throws EzMongoBaseException {
// to further check if we can "Manage",
// get the intersection of user's Platform auths with the doc's "Manage" visibilities
Set<Long> platformAuths = security.getAuthorizations().getPlatformObjectAuthorizations();
Set<Long> platformViz = null;
if (fromDriver) {
// mongodb has this field saved as a List - convert it to Set.
List manageVizList = (List)existingObject.get(RedactHelper.PLATFORM_OBJECT_MANAGE_VISIBILITY_FIELD);
if (manageVizList != null) {
platformViz = new HashSet<Long>(manageVizList);
}
} else {
if (vis.isSetAdvancedMarkings() && vis.getAdvancedMarkings().isSetPlatformObjectVisibility()) {
platformViz = vis.getAdvancedMarkings().getPlatformObjectVisibility().getPlatformObjectManageVisibility();
}
}
Set<Long> origPlatformViz = null;
if (platformAuths != null && platformViz != null) {
origPlatformViz = new HashSet<Long>(platformViz);
platformViz.retainAll(platformAuths);
}
final boolean canManageViz = platformViz == null || platformViz.size() > 0;
if (!canManageViz) {
// reset the platformViz to the original value for logging purposes
existingObject.put(RedactHelper.PLATFORM_OBJECT_MANAGE_VISIBILITY_FIELD, origPlatformViz);
final String message =
"User does not have all the required Platform auths to manage the existing DBObject: " + platformAuths
+ ", platformViz needed: " + origPlatformViz;
appLog.error(message);
throw new EzMongoBaseException(message);
}
}
private void validatePlatformVisibilitiesForInsertingNewDBObject(EzSecurityToken security, Visibility vis, DBObject newObject,
boolean fromDriver) throws EzMongoBaseException {
// to further check if we can insert the new mongo doc,
// get the intersection of user's Platform auths with the doc's "Write" visibilities
Set<Long> platformAuths = security.getAuthorizations().getPlatformObjectAuthorizations();
Set<Long> platformViz = null;
if (fromDriver) {
platformViz = (Set<Long>)newObject.get(RedactHelper.PLATFORM_OBJECT_WRITE_VISIBILITY_FIELD);
} else {
if (vis.isSetAdvancedMarkings() && vis.getAdvancedMarkings().isSetPlatformObjectVisibility()) {
platformViz = vis.getAdvancedMarkings().getPlatformObjectVisibility().getPlatformObjectWriteVisibility();
}
}
Set<Long> origPlatformViz = null;
if (platformAuths != null && platformViz != null) {
origPlatformViz = new HashSet<Long>(platformViz);
platformViz.retainAll(platformAuths);
}
final boolean canInsertViz = platformViz == null || platformViz.size() > 0;
if (!canInsertViz) {
// reset the platformViz to the original value for logging purposes
newObject.put(RedactHelper.PLATFORM_OBJECT_WRITE_VISIBILITY_FIELD, origPlatformViz);
final String message =
"User does not have all the required Platform auths to insert: " + platformAuths
+ ", platformViz needed: " + origPlatformViz;
appLog.error(message);
throw new EzMongoBaseException(message);
}
}
private void validateExternalCommunityVisibilityForInsert(EzSecurityToken security, Visibility vis, DBObject dbObject, boolean fromDriver) throws EzMongoBaseException, VisibilityParseException {
if (fromDriver) {
Object extCVFieldObj = dbObject.get(RedactHelper.EXTERNAL_COMMUNITY_VISIBILITY_FIELD);
if (extCVFieldObj == null) {
return;
}
// we already have set the _ezExtV field with the double array [[ ]] format.
// iterate through the inner list elements and see if the token's auths have
// any of them as a whole.
boolean canInsertExtViz = false;
List outerList = (List)extCVFieldObj;
Set<String> tokenAuths = security.getAuthorizations().getExternalCommunityAuthorizations();
for (Object innerListObj : outerList) {
// check if the token auths have all of this inner list element
List innerList = (List)innerListObj;
Set<String> innerSet = new HashSet<String>(innerList);
if (tokenAuths.containsAll(innerSet)) {
canInsertExtViz = true;
break;
}
}
if (!canInsertExtViz) {
final String message =
"User does not have all the required External Community auths to insert: " + outerList
+ ", auths List: " + tokenAuths;
appLog.error(message);
throw new EzMongoBaseException(message);
}
} else {
if (!vis.isSetAdvancedMarkings()) {
appLog.info("validateExternalCommunityVisibilityForInsert - AdvancedMarkings is not set.");
return;
}
// check if the user can insert the External Community Visibility (boolean expression).
String externalCommunityBooleanExpression = vis.getAdvancedMarkings().getExternalCommunityVisibility();
if (!StringUtils.isEmpty(externalCommunityBooleanExpression)) {
appLog.info("checking if the user has the required classification to insert: {}", externalCommunityBooleanExpression);
final boolean canInsertExternalCommunityViz =
ClassificationUtils.confirmAuthsForAccumuloClassification(security, externalCommunityBooleanExpression, ClassificationUtils.USER_EXTERNAL_COMMUNITY_AUTHS);
if (!canInsertExternalCommunityViz) {
final String message =
"User does not have all the required External Community auths to insert: " + externalCommunityBooleanExpression;
appLog.error(message);
throw new EzMongoBaseException(message);
}
}
}
}
private void validateFormalVisibilityForInsert(
EzSecurityToken security,
Visibility vis,
DBObject dbObject,
boolean fromDriver) throws EzMongoBaseException, VisibilityParseException {
if (fromDriver) {
Object fvFieldObj = dbObject.get(RedactHelper.FORMAL_VISIBILITY_FIELD);
if (fvFieldObj == null) {
return;
}
// we already have set the _ezFV field with the double array [[ ]] format.
// iterate through the inner list elements and see if the token's auths have
// any of them as a whole.
boolean canInsertBooleanExpression = false;
List outerList = (List)fvFieldObj;
Set<String> tokenAuths = security.getAuthorizations().getFormalAuthorizations();
for (Object innerListObj : outerList) {
// check if the token auths have all of this inner list element
List innerList = (List)innerListObj;
Set<String> innerSet = new HashSet<String>(innerList);
if (tokenAuths.containsAll(innerSet)) {
canInsertBooleanExpression = true;
break;
}
}
if (!canInsertBooleanExpression) {
final String message =
"User does not have all the required Formal Visibility auths to insert: " + outerList
+ ", auths List: " + tokenAuths;
appLog.error(message);
throw new EzMongoBaseException(message);
}
} else {
// check if the user can insert the Formal Visibility (Boolean expression).
String classification = vis.getFormalVisibility();
if (!StringUtils.isEmpty(classification)) {
appLog.info("checking if the user has the required classification to insert: {}", classification);
final boolean canInsertBooleanExpression = ClassificationUtils.confirmAuthsForAccumuloClassification(
security,
classification,
ClassificationUtils.USER_FORMAL_AUTHS);
if (!canInsertBooleanExpression) {
final String message =
"User does not have all the required classifications to insert: " + classification;
appLog.error(message);
throw new EzMongoBaseException(message);
}
}
}
}
private void validateRequiredFieldsForInsert(Visibility vis, DBObject dbObject, boolean fromDriver) throws VisibilityParseException, EzMongoBaseException {
// Check if the minimally required security fields exist:
// at least one of: _ezFV, _ezExtV, _ezObjRV
if (!fromDriver) {
RedactHelper.setSecurityFieldsInDBObject(dbObject, vis, appId);
}
Object ezFV = dbObject.get(RedactHelper.FORMAL_VISIBILITY_FIELD);
Object ezExtV = dbObject.get(RedactHelper.EXTERNAL_COMMUNITY_VISIBILITY_FIELD);
Object ezObjRV = dbObject.get(RedactHelper.PLATFORM_OBJECT_READ_VISIBILITY_FIELD);
if (ezFV == null && ezExtV == null && ezObjRV == null) {
throw new EzMongoBaseException("At least one security field (_ezFV, _ezExtV, _ezObjRV) is required for inserts.");
}
}
@Override
public int remove(String collectionName, String jsonQuery, EzSecurityToken security) throws TException,
EzMongoBaseException {
try {
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "remove");
auditParamsMap.put("collectionName", collectionName);
auditParamsMap.put("jsonQuery", jsonQuery);
auditLog(security, AuditEventType.FileObjectDelete, auditParamsMap);
TokenUtils.validateSecurityToken(security, this.getConfigurationProperties());
if (StringUtils.isEmpty(collectionName)) {
throw new EzMongoBaseException("collectionName is required.");
}
final String finalCollectionName = getCollectionName(collectionName);
int removedCount = 0;
// see if we are able to remove the data in db with user's classification in the user token
final List<DBObject> results =
mongoFindHelper.findElements(collectionName, jsonQuery, "{ _id: 1}", null, 0, 0, false, security, false, WRITE_OPERATION);
if (results.size() > 0) {
// construct a list of ObjectIds to use as the filter
final List<ObjectId> idList = new ArrayList<ObjectId>();
for (final DBObject result : results) {
appLog.info("can remove DBObject (_id): {}", result);
idList.add((ObjectId) result.get("_id"));
}
final DBObject inClause = new BasicDBObject("$in", idList);
final DBObject query = new BasicDBObject("_id", inClause);
Timer.Context context = getMetricRegistry().getTimers().get(REMOVE_TIMER_NAME).time();
try {
final WriteResult writeResult = db.getCollection(finalCollectionName).remove(query);
appLog.info("removed - write result: {}", writeResult.toString());
removedCount = writeResult.getN();
} finally {
context.stop();
}
} else {
appLog.info("Did not find any documents to remove with the query {}", jsonQuery);
}
appLog.info("after remove, removedCount: {}", removedCount);
return removedCount;
} catch (final Exception e) {
throw enrichException("remove", e);
}
}
@Override
public int update(String collectionName, MongoUpdateParams mongoUpdateParams, EzSecurityToken security) throws TException, EzMongoBaseException {
try {
String jsonQuery = StringUtils.isEmpty(mongoUpdateParams.jsonQuery) ? "{}" : mongoUpdateParams.jsonQuery;
String jsonDocument = mongoUpdateParams.mongoDocument.jsonDocument;
boolean updateWithOperators = mongoUpdateParams.updateWithOperators;
Visibility vis = mongoUpdateParams.mongoDocument.getVisibility();
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "update");
auditParamsMap.put("collectionName", collectionName);
auditParamsMap.put("mongoUpdateParams", mongoUpdateParams.toString());
auditLog(security, AuditEventType.FileObjectModify, auditParamsMap);
Timer.Context context = getMetricRegistry().getTimers().get(UPDATE_DOCUMENT_TIMER_NAME).time();
int updatedCount;
try {
updatedCount =
mongoUpdateHelper.updateDocument(collectionName, jsonQuery, jsonDocument, updateWithOperators, vis, security);
appLog.info("after update, updatedCount: {}", updatedCount);
} finally {
context.stop();
}
return updatedCount;
} catch (final Exception e) {
throw enrichException("update", e);
}
}
@Override
public long getCount(String collectionName, EzSecurityToken security) throws TException, EzMongoBaseException {
try {
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "getCount");
auditParamsMap.put("collectionName", collectionName);
auditLog(security, AuditEventType.FileObjectAccess, auditParamsMap);
final String jsonQuery = "{}";
return getCountFromQuery(collectionName, jsonQuery, security);
} catch (final Exception e) {
throw enrichException("getCount", e);
}
}
@Override
public long getCountFromQuery(String collectionName, String jsonQuery, EzSecurityToken security)
throws TException, EzMongoBaseException {
try {
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "getCountFromQuery");
auditParamsMap.put("collectionName", collectionName);
auditParamsMap.put("jsonQuery", jsonQuery);
auditLog(security, AuditEventType.FileObjectAccess, auditParamsMap);
TokenUtils.validateSecurityToken(security, this.getConfigurationProperties());
if (StringUtils.isEmpty(collectionName)) {
throw new EzMongoBaseException("collectionName is required.");
}
final DBObject queryCommand = (DBObject) JSON.parse(jsonQuery);
final DBObject query = new BasicDBObject("$match", queryCommand);
final String finalCollectionName = getCollectionName(collectionName);
appLog.info("getCountFromQuery, finalCollectionName: {}, {}", finalCollectionName, jsonQuery);
final DBObject[] aggregationCommandsArray =
mongoFindHelper.getFindAggregationCommandsArray_withcounter(0, 0, null, null, security, true, READ_OPERATION);
final AggregationOutput aggregationOutput =
db.getCollection(finalCollectionName).aggregate(query, aggregationCommandsArray);
final BasicDBList resultsList = (BasicDBList) aggregationOutput.getCommandResult().get("result");
long count = 0;
if (resultsList.size() > 0) {
final BasicDBObject resultsObj = (BasicDBObject) resultsList.get(0);
count = resultsObj.getLong("count");
}
return count;
} catch (final Exception e) {
throw enrichException("getCountFromQuery", e);
}
}
@Override
public List<String> textSearch(String collectionName, String searchText, EzSecurityToken security)
throws TException, EzMongoBaseException {
try {
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "textSearch");
auditParamsMap.put("collectionName", collectionName);
auditParamsMap.put("searchText", searchText);
auditLog(security, AuditEventType.FileObjectAccess, auditParamsMap);
TokenUtils.validateSecurityToken(security, this.getConfigurationProperties());
if (StringUtils.isEmpty(collectionName)) {
throw new EzMongoBaseException("collectionName is required.");
}
final DBObject searchObj = new BasicDBObject("$search", searchText);
final DBObject textObj = new BasicDBObject("$text", searchObj);
final DBObject query = new BasicDBObject("$match", textObj);
final String finalCollectionName = getCollectionName(collectionName);
appLog.info("textSearch, finalCollectionName: {}, query: {}", finalCollectionName, query);
final DBObject[] aggregationCommandsArray = mongoFindHelper.getFindAggregationCommandsArray(0, 0, null, null, security, READ_OPERATION);
final AggregationOutput aggregationOutput =
db.getCollection(finalCollectionName).aggregate(query, aggregationCommandsArray);
final BasicDBList commandResults = (BasicDBList) aggregationOutput.getCommandResult().get("result");
final List<String> results = new ArrayList<String>();
if (commandResults != null) {
for (final Object obj : commandResults) {
final DBObject dbo = (DBObject) obj;
results.add(JSON.serialize(dbo));
}
} else {
final String message = "Text search command results were null - there probably is no text index.";
appLog.error(message);
throw new EzMongoBaseException(message);
}
appLog.info("in textSearch, results size: {}", results.size());
return results;
} catch (final Exception e) {
throw enrichException("textSearch", e);
}
}
@Override
public List<String> find(String collectionName, MongoFindParams mongoFindParams, EzSecurityToken security)
throws EzMongoBaseException, TException {
try {
String jsonQuery = StringUtils.isEmpty(mongoFindParams.jsonQuery) ? "{}" : mongoFindParams.jsonQuery;
String jsonProjection = mongoFindParams.jsonProjection;
String jsonSort = mongoFindParams.jsonSort;
int skip = mongoFindParams.skip;
int limit = mongoFindParams.limit;
boolean returnPlainObjectIdString = mongoFindParams.returnPlainObjectIdString;
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "find");
auditParamsMap.put("collectionName", collectionName);
auditParamsMap.put("mongoFindParams", printMongoObject(mongoFindParams));
auditLog(security, AuditEventType.FileObjectAccess, auditParamsMap);
TokenUtils.validateSecurityToken(security, this.getConfigurationProperties());
if (StringUtils.isEmpty(collectionName)) {
throw new EzMongoBaseException("collectionName is required.");
}
Timer.Context context = getMetricRegistry().getTimers().get(FIND_TIMER_NAME).time();
List<String> results = null;
try {
results = mongoFindHelper.findElements(collectionName, jsonQuery, jsonProjection, jsonSort, skip, limit,
returnPlainObjectIdString, security, true, READ_OPERATION);
appLog.info("in find, results size: {}", results.size());
} finally {
context.stop();
}
return results;
} catch (final Exception e) {
throw enrichException("find", e);
}
}
@Override
public boolean collectionExists(String collectionName, EzSecurityToken security) throws TException,
EzMongoBaseException {
try {
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "collectionExists");
auditParamsMap.put("collectionName", collectionName);
auditLog(security, AuditEventType.FileObjectAccess, auditParamsMap);
TokenUtils.validateSecurityToken(security, this.getConfigurationProperties());
if (StringUtils.isEmpty(collectionName)) {
throw new EzMongoBaseException("collectionName is required.");
}
final String finalCollectionName = getCollectionName(collectionName);
final boolean exists = db.collectionExists(finalCollectionName);
appLog.info("in collectionExists, collection exists: {}", exists);
return exists;
} catch (final Exception e) {
throw enrichException("collectionExists", e);
}
}
@Override
public void createCollection(String collectionName, EzSecurityToken security) throws TException,
EzMongoBaseException {
try {
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "createCollection");
auditParamsMap.put("collectionName", collectionName);
auditLog(security, AuditEventType.FileObjectCreate, auditParamsMap);
TokenUtils.validateSecurityToken(security, this.getConfigurationProperties());
if (StringUtils.isEmpty(collectionName)) {
throw new EzMongoBaseException("collectionName is required.");
}
final String finalCollectionName = getCollectionName(collectionName);
db.createCollection(finalCollectionName, new BasicDBObject());
appLog.info("in createCollection, created collection: {}", finalCollectionName);
} catch (final Exception e) {
throw enrichException("createCollection", e);
}
}
@Override
public void dropCollection(String collectionName, EzSecurityToken security) throws TException,
EzMongoBaseException {
try {
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "dropCollection");
auditParamsMap.put("collectionName", collectionName);
auditLog(security, AuditEventType.FileObjectDelete, auditParamsMap);
TokenUtils.validateSecurityToken(security, this.getConfigurationProperties());
if (StringUtils.isEmpty(collectionName)) {
throw new EzMongoBaseException("collectionName is required.");
}
final String finalCollectionName = getCollectionName(collectionName);
db.getCollection(finalCollectionName).drop();
appLog.info("in dropCollection, dropped collection: {}", finalCollectionName);
} catch (final Exception e) {
throw enrichException("dropCollection", e);
}
}
@Override
public List<String> distinct(String collectionName, MongoDistinctParams mongoDistinctParams, EzSecurityToken securityToken) throws TException, EzMongoBaseException {
List<String> results = new ArrayList<>();
String field = mongoDistinctParams.getField();
String jsonQuery = mongoDistinctParams.getQuery();
if (StringUtils.isEmpty(jsonQuery)) {
jsonQuery = "{ }";
}
try {
// call the redact pipeline
List<DBObject> findResults = mongoFindHelper.findElements(collectionName, jsonQuery, null, null, 0, 0,
false, securityToken, false, READ_OPERATION);
if (findResults.size() > 0) {
// construct a list of ObjectIds to use as the filter
final List<ObjectId> idList = new ArrayList<ObjectId>();
for (final DBObject findResult : findResults) {
appLog.info("can find DBObject (_id): {}", findResult);
idList.add((ObjectId) findResult.get("_id"));
}
final DBObject inClause = new BasicDBObject("$in", idList);
final DBObject query = new BasicDBObject("_id", inClause);
final String finalCollectionName = getCollectionName(collectionName);
// get the distinct results from the redacted list
List distinctResults = db.getCollection(finalCollectionName).distinct(field, query);
// convert to Strings
results = mongoFindHelper.addMongoResultsToList(distinctResults, false, true);
appLog.info("Distinct values for field {}, jsonQuery {}: {}", field, jsonQuery, results);
}
} catch (Exception e) {
throw enrichException("distinct", e);
}
return results;
}
/* Currently not used - if you want to create indexes dynamically, use the ezmongo CLI.
private void createIndexes() throws Exception {
InputStream inputStream = null;
Resource resource = null;
try {
resource = new ClassPathResource("indexes.txt");
inputStream = resource.getInputStream();
final Scanner scanner0 = new Scanner(inputStream);
DBCollection collection = null;
while (scanner0.hasNext()) {
final String index = scanner0.nextLine().trim();
collection = null;
if (index.startsWith("#") || index.length() == 0 || "".equals(index)) {
// skip over the comments or empty lines
continue;
} else {
final int spaceIdx = index.indexOf(" ");
final String collectionName = index.substring(0, spaceIdx).trim();
final String indexJson = index.substring(spaceIdx).trim();
if (collectionName.startsWith("{")) {
// either someone didn't put in a collection name with the index
// JSON or they started the collection name with the "{"
// log it and continue
appLog.info("Invalid collection name to create index: {}", collectionName);
continue;
}
final String finalCollectionName = getCollectionName(collectionName);
if (!mongoTemplate.collectionExists(finalCollectionName)) {
collection = mongoTemplate.createCollection(finalCollectionName);
} else {
collection = mongoTemplate.getCollection(finalCollectionName);
}
// check the index JSON
// TODO: this works but it's probably better to use a regex
// to retrieve the parts of the JSON that we want
if (indexJson.indexOf("},") != -1) {
final String[] tokens = indexJson.split("},");
final DBObject keys = retrieveDBObject(tokens[0]);
final DBObject optionsIn = retrieveDBObject(tokens[1]);
collection.ensureIndex(keys, optionsIn);
} else {
// only have the keys portion
final DBObject keys = retrieveDBObject(indexJson);;
collection.ensureIndex(keys);
}
}
}
} catch (final IOException ioe) {
appLog.error("Encountered an IOException: " + ioe.getCause());
} catch (final Exception e) {
e.printStackTrace();
} finally {
if (inputStream != null) {
try {
inputStream.close();
} catch (final IOException ignored) {
}
}
}
}
private DBObject retrieveDBObject(String jsonString) {
final DBObject dbo = new BasicDBObject();
String s = jsonString.trim();
if (s.startsWith("{")) {
s = s.substring(1);
}
if (s.endsWith("}")) {
s = s.substring(0, s.indexOf("}"));
}
final String[] tokens = s.split(",");
for (final String token : tokens) {
final String[] keyValue = token.split(":");
if (keyValue[1].toLowerCase().trim().equalsIgnoreCase("true")
|| keyValue[1].toLowerCase().trim().equalsIgnoreCase("false")) {
dbo.put(keyValue[0], keyValue[1]);
} else {
dbo.put(keyValue[0], Integer.parseInt(keyValue[1].trim()));
}
}
return dbo;
}
*/
/**
* A method to add detail to the Thrift-serialized Exception.
*
* Since the underlying MongoDB instance may throw some unknown instance of java.lang.Exception in our usage, we
* catch the base type in this handler and send it's classname and message back to the client.
*
* @param step
* @param e
* @return
*/
private EzMongoBaseException enrichException(String step, Exception e) {
final String classname = e.getClass().getCanonicalName(), messageStart =
"Caught exception " + classname + " in " + step + ": ";
appLog.error(messageStart, e);
return new EzMongoBaseException(messageStart + e.getMessage());
}
String getCollNameOfPurgeId(long purgeId) {
String collectionName = null;
Set purgeIds = new HashSet();
purgeIds.add(purgeId);
appLog.info("Get Collection Name of Purge Id {} in Purge Tracker Collection", purgeId);
DBObject query = new BasicDBObject();
query.put(RedactHelper.APP_ID_FIELD, appId);
query.put(RedactHelper.PURGE_ID, new BasicDBObject("$in", purgeIds));
String trackingPurgeCollName = getCollectionName(PURGE_TRACKING_COLL_NAME);
DBCursor cursor = db.getCollection(trackingPurgeCollName).find(query);
if (cursor.length() == 1) {
appLog.info("Found a Record getting collection name");
collectionName = (String)cursor.one().get(RedactHelper.PURGE_TRACKING_COLL_FIELD);
}
appLog.info("getCollNameOfPurgeId, returning Collection Name {}", collectionName);
return collectionName;
}
void updatePurgeTracker(long purgeId, String purgeCollName, EzSecurityToken token) {
appLog.info("updatePurgeTracker with Purge Id {} and Collection Name {}", purgeId, purgeCollName);
String trackingPurgeCollName = getCollectionName(PURGE_TRACKING_COLL_NAME);
DBObject query = new BasicDBObject();
query.put(RedactHelper.APP_ID_FIELD, appId);
query.put(RedactHelper.PURGE_ID, purgeId);
DBCursor cursor = db.getCollection(trackingPurgeCollName).find(query);
if ( cursor.length() == 0 ) {
//insert
appLog.info("No Records Found for Purge Id {} in Purge Collection {}", purgeId, trackingPurgeCollName);
try {
JSONObject jobj = new JSONObject();
jobj.put(RedactHelper.PURGE_ID, purgeId);
jobj.put(RedactHelper.PURGE_TRACKING_COLL_FIELD, purgeCollName);
jobj.put(RedactHelper.APP_ID_FIELD, appId);
appLog.info("Dumping JSON before Insert : {}",jobj.toString());
this.insert(PURGE_TRACKING_COLL_NAME, new MongoEzbakeDocument(jobj.toString(),
new Visibility().setFormalVisibility("S")), token);
} catch (TException e) {
e.printStackTrace();
} catch (org.codehaus.jettison.json.JSONException je) {
je.printStackTrace();
appLog.error("Error updating the purge record!");
}
} else {
//update
appLog.info("Updating Purge Collection {} with Purge Id", PURGE_TRACKING_COLL_NAME, purgeId);
DBObject content = new BasicDBObject();
content.put(RedactHelper.PURGE_IDS_FIELD,purgeId);
content.put(RedactHelper.PURGE_TRACKING_COLL_FIELD, purgeCollName);
mongoUpdateHelper.updateContent(getCollectionName(PURGE_TRACKING_COLL_NAME), query, content, false, false);
}
}
@Override
public PurgeResult purge(PurgeItems items, PurgeOptions options, EzSecurityToken token) throws TException {
HashMap<String, String> auditParamsMap = new HashMap<>();
auditParamsMap.put("action", "purge");
auditParamsMap.put("purgeItems", items.toString());
auditParamsMap.put("purgeOptions", printMongoObject(options));
auditLog(token, AuditEventType.FileObjectDelete, auditParamsMap);
TokenUtils.validateSecurityToken(token, getConfigurationProperties());
int batchSize = options == null ? 0 : options.getBatchSize();
final Timer.Context context = getMetricRegistry().getTimers().get(PURGE_TIMER_NAME).time();
try {
return purge(items.getPurgeId(), items.getItems(), batchSize, token);
} finally {
context.stop();
}
}
private PurgeResult purge(long id, Set<Long> toPurge, int batchSize, EzSecurityToken token) {
appLog.info("Purging ID {} with batchSize {} with items:\n {}", id, batchSize, toPurge);
final PurgeResult result = new PurgeResult(false);
final Set<Long> purged = new HashSet<>();
final Set<Long> unpurged = new HashSet<>();
Set<String> collectionNames = db.getCollectionNames();
//sort the collection names
List<String> collNamesSortedList = MongoConverter.asSortedList(collectionNames);
//get the collection name at which to start the purge
String purgeStartCollName = getCollNameOfPurgeId(id);
appLog.info("Collection Name of Collection in Purge Tracker: " + purgeStartCollName + " For Purge Id: " + id);
ListIterator<String> collNamesIterator = null;
if ( purgeStartCollName != null ) {
collNamesIterator = collNamesSortedList.listIterator(collNamesSortedList.indexOf(purgeStartCollName));
} else {
collNamesIterator = collNamesSortedList.listIterator();
}
//batch size tracker
int batchTracker = 0;
while ( (collNamesIterator.hasNext()) && (batchTracker < batchSize) ) {
String collectionName = collNamesIterator.next();
if (!isNotSystemCollection(collectionName)) {
continue;
}
DBObject query = new BasicDBObject();
query.put(RedactHelper.APP_ID_FIELD, appId);
query.put(RedactHelper.ID_FIELD, new BasicDBObject("$in", toPurge));
DBCursor cursor = db.getCollection(collectionName).find(query);
while ( (cursor.hasNext()) && (batchTracker < batchSize) ) {
DBObject dbObject = cursor.next();
appLog.info("Purge candidate dbObject: {}", dbObject);
long purgeId = (Long)dbObject.get(RedactHelper.ID_FIELD);
Object composite = dbObject.get(RedactHelper.COMPOSITE_FIELD);
if (composite != null && (Boolean)composite) {
appLog.info("Composite item cannot be purged: _id {} ", dbObject.get("_id"));
unpurged.add(purgeId);
} else {
appLog.info("Purging item _id {} and Purge Id {}", dbObject.get("_id"), purgeId);
purged.add(purgeId);
db.getCollection(collectionName).remove(dbObject);
}
batchTracker++;
}
if ( cursor.hasNext() ) {
result.setIsFinished(false);
} else {
result.setIsFinished(true);
}
//update purge tracker collection name with purge id and collection name where purge stopped
updatePurgeTracker(id,collectionName,token);
}
result.setPurged(purged);
result.setUnpurged(unpurged);
appLog.info("Size of Purged Items {}", result.getPurged().size());
return result;
}
}
| |
package edu.hku.sdb.crypto;
import java.math.BigInteger;
import java.security.SecureRandom;
import java.util.HashMap;
import org.apache.hadoop.io.Text;
import thep.paillier.EncryptedInteger;
import thep.paillier.PrivateKey;
import thep.paillier.PublicKey;
import thep.paillier.exceptions.BigIntegerClassNotValid;
public class Crypto {
public static int defaultCertainty = 100;
public static int FIVE_HUNDRED_AND_TWELVE = 512;
public static int ONE_THOUSAND_TWENTY_FOUR = 1024;
public static int EIGHTY = 80;
public static int THIRTY_TWO = 32;
public static int defaultPrimeLength = FIVE_HUNDRED_AND_TWELVE;
public static int defaultRandLengthShort = EIGHTY;
public static int defaultRandLength = ONE_THOUSAND_TWENTY_FOUR;
public static HashMap<String, BigInteger> modPowMap = new HashMap<String, BigInteger>();
/**
*
* @return a random prime number with bit length = 512, certainty = 10
*/
public static BigInteger generateRandPrime() {
return BigInteger.probablePrime(defaultPrimeLength, new SecureRandom());
}
public static BigInteger generateRandPrime(int length) {
return BigInteger.probablePrime(length, new SecureRandom());
}
/**
*
* @param p
* @param q
* @return the totient of p and q
*/
public static BigInteger evaluateTotient(BigInteger p, BigInteger q) {
return (p.subtract(BigInteger.ONE))
.multiply(q.subtract(BigInteger.ONE));
}
/**
*
* @param numBits
* @return a positive random number with specified number of bits
*/
private static BigInteger generatePositiveRand(int numBits) {
return new BigInteger(numBits, new SecureRandom());
}
/**
* Generates a positive random number of 2048 bits, which is less than n, and co-prime with both n and totient(n).
* @param p
* @param q
* @return a random positive big integer co-prime with p,q & totient(p,q)
*/
public static BigInteger generatePositiveRand(BigInteger p, BigInteger q){
return generatePositiveRandInternal(p, q, defaultRandLength);
}
public static BigInteger generatePositiveRand(BigInteger p, BigInteger q, int numBits){
return generatePositiveRandInternal(p, q, numBits);
}
/**
* Generates a positive random number of 2048 bits, which is less than n, and co-prime with both n and totient(n).
* @param p
* @param q
* @return a random positive big integer co-prime with p,q & totient(p,q)
*/
public static BigInteger generatePositiveRandShort(BigInteger p, BigInteger q){
return generatePositiveRandInternal(p, q, defaultRandLengthShort);
}
public static BigInteger generatePositiveRandExShort(BigInteger p, BigInteger q){
return generatePositiveRandInternal(p, q, THIRTY_TWO);
}
private static BigInteger generatePositiveRandInternal(BigInteger p, BigInteger q, int numBits) {
BigInteger n = p.multiply(q);
BigInteger totient = Crypto.evaluateTotient(p, q);
BigInteger r = null;
while(true){
r = generatePositiveRand(numBits);
try{
BigInteger rReverseN = r.modInverse(n);
BigInteger rReverseTotient = r.modInverse(totient);
} catch (ArithmeticException e){
//r is not co-prime with n or totient(n)
continue;
}
//r is less than n and positive
if (r.compareTo(BigInteger.ZERO) == 1 && r.compareTo(n) == -1) break;
}
return r;
}
/**
* Generates an item key based on columnKey<m,x> and row-id.
* @param m m value of columnKey whose value is less than (p * q)
* @param x x value of columnKey whose value is less than (p * q)
* @param r row-id, whose value is less than (p * q)
* @param g
* @param p
* @param q
* @return item key based on m, x, r, g, p, q
*/
private static BigInteger generateItemKey(BigInteger m, BigInteger x,
BigInteger r, BigInteger g, BigInteger p, BigInteger q) {
BigInteger n = p.multiply(q);
BigInteger totient = Crypto.evaluateTotient(p, q);
BigInteger power = r.multiply(x).mod(totient);
BigInteger grx = g.modPow(power, n);
return (m.multiply(grx)).mod(n);
}
public static BigInteger generateItemKeyOp2(BigInteger m, BigInteger x,
BigInteger r, BigInteger g, BigInteger n, BigInteger totient, BigInteger p, BigInteger q) {
String key = g.toString()+"::"+x.toString()+"::"+n.toString();
BigInteger gx;
if (!modPowMap.containsKey(key)){
gx = g.modPow(x.mod(totient), n);
modPowMap.put(key, gx);
}
else {
gx = modPowMap.get(key);
}
BigInteger power = r.mod(totient);
BigInteger grx = Crypto.modPow(gx, power, p, q);
return (m.multiply(grx)).mod(n);
}
public static BigInteger modPow(BigInteger base, BigInteger power, BigInteger p, BigInteger q){
BigInteger basePowerModQ = base.modPow(power, q);
BigInteger basePowerModP = base.modPow(power, p);
BigInteger pInverseQ = p.modInverse(q);
BigInteger result = ( basePowerModQ.subtract(basePowerModP).multiply(pInverseQ)).mod(q).multiply(p).add(basePowerModP);
return result;
}
/**
* Encrypt a plaintext with its itemKey using secret sharing
* @param plainText plaintext to be encrypted
* @param itemKey an itemKey with value less than n
* @param n
* @return encrypted value based on plainText, itemKey and n
*/
public static BigInteger encrypt(BigInteger plainText, BigInteger itemKey,
BigInteger n) {
BigInteger keyInverse = itemKey.modInverse(n);
return (plainText.multiply(keyInverse)).mod(n);
}
/**
* Decrypt a ciphertext with its itemKey using secret sharing
* @param ciphertext ciphertext to be decrypted, whose value < n
* @param itemKey an itemKey , whose value < n
* @param n
* @return decrypted value based on cipherText, itemKey and n
*/
public static BigInteger decrypt(BigInteger ciphertext, BigInteger itemKey,
BigInteger n) {
return (ciphertext.multiply(itemKey)).mod(n);
}
/**
* (DEPRECATED due to poor performance of open source package) Encrypt a plaintext with Pailier Encryption algorithm. Adopted from thep.paillier package
* @param plaintext
* @param n
* @return encrypted value using Paillier encryption
*/
private static BigInteger PaillierEncrypt(BigInteger plaintext, BigInteger n) {
try {
return new EncryptedInteger(plaintext, new PublicKey(defaultRandLength,
n)).getCipherVal();
} catch (BigIntegerClassNotValid e) {
e.printStackTrace();
}
return null;
}
private static BigInteger paillierEncrypt(BigInteger plaintext, BigInteger n, BigInteger nPlusOne, BigInteger nSquared){
BigInteger r = generatePositiveRand(defaultRandLengthShort);
BigInteger cipherText = nPlusOne.modPow(plaintext, nSquared);
BigInteger x = r.modPow(n, nSquared);
cipherText = cipherText.multiply(x).mod(nSquared);
return cipherText;
}
/**
* Decrypt a plaintext with Pailier Decryption algorithm. Adopted from thep.paillier package
* @param ciphertext
* @param p
* @param q
* @return decrypted value using Paillier encryption
*/
private static BigInteger PaillierDecrypt(BigInteger ciphertext,
BigInteger p, BigInteger q) {
PrivateKey privateKey = new PrivateKey(defaultRandLength, p, q);
try {
return new EncryptedInteger(ciphertext).decrypt(privateKey);
} catch (BigIntegerClassNotValid e) {
e.printStackTrace();
}
return null;
}
public static BigInteger SIESEncrypt(BigInteger plainText, BigInteger K, BigInteger ki, BigInteger p){
BigInteger result = K.multiply(plainText).add(ki).mod(p);
return result;
}
public static BigInteger SIESDecrypt(BigInteger cipherText, BigInteger m, BigInteger x, BigInteger n){
BigInteger KInverse = m.modInverse(n);
BigInteger result = (cipherText.subtract(x).mod(n)).multiply(KInverse).mod(n);
return result;
}
/**
* Update column A with target columnKey<mc,mx> and plaintext [a]
* @param ma m value of original columnKey A, whose value < (p * q)
* @param mc m value of target columnKey C, whose value < (p * q)
* @param ms m value of columnKey S, whose value < (p * q)
* @param xa x value of original columnKey A, whose value < (p * q)
* @param xc x value of target columnKey C, whose value < (p * q)
* @param xs x value of columnKey S, whose value < (p * q)
* @param p
* @param q
* @return [new_p, new_q] pair generated by SDB KeyUpdate client protocol
*/
public static BigInteger[] keyUpdateClient(BigInteger ma, BigInteger mc,
BigInteger ms, BigInteger xa, BigInteger xc, BigInteger xs,
BigInteger p, BigInteger q){
BigInteger[] newPQ = new BigInteger[2];
BigInteger totient = Crypto.evaluateTotient(p, q);
BigInteger n = p.multiply(q);
//prepare numbers for p
BigInteger xsInverse = xs.modInverse(totient);
BigInteger xcMinusXa = xc.subtract(xa).mod(totient);
BigInteger newP = (xsInverse.multiply(xcMinusXa)).mod(totient);
//prepare numbers for q
BigInteger msp = Crypto.modPow(ms, newP, p, q);
BigInteger mcInverse = mc.modInverse(n);
BigInteger newQ = ((ma.mod(n)).multiply(msp).multiply(mcInverse)).mod(n);
newPQ[0] = newP;
newPQ[1] = newQ;
return newPQ;
}
public static BigInteger getSecureBigInt(String cipherString){
return new BigInteger(cipherString, Character.MAX_RADIX);
}
public static BigInteger getSecureBigInt(Text cipherString){
return new BigInteger(cipherString.toString(), Character.MAX_RADIX);
}
public static Text getSecureText(BigInteger cipherString){
return new Text(cipherString.toString(Character.MAX_RADIX));
}
public static String getSecureString(BigInteger cipherString){
return cipherString.toString(Character.MAX_RADIX);
}
}
| |
package com.ezardlabs.dethsquare;
import com.ezardlabs.dethsquare.util.GameListeners;
import com.ezardlabs.dethsquare.util.GameListeners.UpdateListener;
import java.util.ArrayList;
public final class Collider extends BoundedComponent {
static {
GameListeners.addUpdateListener(new UpdateListener() {
@Override
public void onUpdate() {
if (rebuildQuadTree) {
qt.init(staticColliders.toArray(new Collider[staticColliders.size()]));
rebuildQuadTree = false;
}
}
});
}
public static final ArrayList<Collider> normalColliders = new ArrayList<>();
public static final ArrayList<Collider> staticColliders = new ArrayList<>();
public static final ArrayList<Collider> triggerColliders = new ArrayList<>();
public static QuadTree<Collider> qt = new QuadTree<>(20);
private static boolean inited = false;
private static boolean rebuildQuadTree = false;
public final RectF lastBounds = new RectF();
private final float height;
private final float width;
ArrayList<Collider> possible = new ArrayList<>();
private Collider[] triggers = new Collider[0];
boolean isTrigger = false;
public enum CollisionLocation {
TOP,
RIGHT,
BOTTOM,
LEFT
}
public class Collision {
/**
* The {@link Collider} that was hit
*/
public final Collider collider;
/**
* The {@link GameObject} of the {@link Collider} that was hit
*/
public final GameObject gameObject;
/**
* The side of the {@link Collider} that was involved in the collision
*/
public final CollisionLocation location;
/**
* The speed at which the collision occurred
*/
public final float speed;
/**
* The {@link Transform} of the {@link Collider} that was hit
*/
public final Transform transform;
/**
* The {@link Rigidbody} of the {@link Collider} that was hit
*/
public final Rigidbody rigidbody;
Collision(Collider collider, GameObject gameObject, Transform transform,
Rigidbody rigidbody, CollisionLocation location, float speed) {
this.collider = collider;
this.gameObject = gameObject;
this.transform = transform;
this.rigidbody = rigidbody;
this.location = location;
this.speed = speed;
}
}
public Collider(float width, float height) {
this(width, height, false);
}
public Collider(float width, float height, boolean isTrigger) {
this.width = width;
this.height = height;
this.isTrigger = isTrigger;
}
public static void init() {
qt.init(staticColliders.toArray(new Collider[staticColliders.size()]));
inited = true;
}
static void clearAll() {
normalColliders.clear();
staticColliders.clear();
triggerColliders.clear();
qt = new QuadTree<>(20);
inited = false;
}
public void start() {
if (gameObject.isStatic) {
staticColliders.add(this);
if (inited) {
rebuildQuadTree = true;
}
}
if (isTrigger) addTrigger();
if (!gameObject.isStatic && !isTrigger) normalColliders.add(this);
if (gameObject.isStatic && !isTrigger) gameObject.setTag("solid");
recalculateBounds();
}
private void addTrigger() {
triggerColliders.add(this);
triggers = new Collider[triggerColliders.size()];
}
private void removeTrigger() {
triggerColliders.remove(this);
triggers = new Collider[triggerColliders.size()];
}
public void destroy() {
if (gameObject.isStatic) staticColliders.remove(this);
if (isTrigger) removeTrigger();
if (!gameObject.isStatic && !isTrigger) normalColliders.remove(this);
}
void move(float x, float y) {
if (isTrigger) {
transform.position.x += x;
transform.position.y += y;
recalculateBounds();
triggerCheck();
return;
}
possible.clear();
lastBounds.left = bounds.left;
lastBounds.top = bounds.top;
lastBounds.right = bounds.right;
lastBounds.bottom = bounds.bottom;
if (x > 0) bounds.right += x;
if (x < 0) bounds.left += x;
if (y > 0) bounds.bottom += y;
if (y < 0) bounds.top += y;
QuadTree.retrieve(possible, qt, this);
bounds.left = lastBounds.left;
bounds.top = lastBounds.top;
bounds.right = lastBounds.right;
bounds.bottom = lastBounds.bottom;
if (possible.isEmpty()) {
transform.position.x += x;
transform.position.y += y;
} else {
transform.position.y += y;
recalculateBounds();
Collider c;
for (int i = 0; i < possible.size(); i++) {
c = possible.get(i);
if (c != this && c != null && !c.isTrigger && RectF.intersects(bounds, c.bounds)) {
if (y > 0 && bounds.bottom > c.bounds.top) {
transform.position.y = Math.round(c.bounds.top - bounds.height());
gameObject.onCollision(
new Collision(c, c.gameObject, c.transform, c.gameObject.rigidbody,
CollisionLocation.BOTTOM, y));
if (gameObject.rigidbody != null) gameObject.rigidbody.velocity.y = 0;
} else if (y < 0 && bounds.top < c.bounds.bottom) {
transform.position.y = Math.round(c.bounds.bottom);
if (transform.position.y != lastBounds.top) {
gameObject.onCollision(new Collision(c, c.gameObject, c.transform,
c.gameObject.rigidbody, CollisionLocation.TOP, y));
}
gameObject.rigidbody.velocity.y = 0;
}
recalculateBounds();
}
}
transform.position.x += x;
recalculateBounds();
for (int i = 0; i < possible.size(); i++) {
c = possible.get(i);
if (c != this && c != null && !c.isTrigger && RectF.intersects(bounds, c.bounds)) {
if (x > 0 && bounds.right > c.bounds.left) {
transform.position.x = Math.round(c.bounds.left - bounds.width());
if (transform.position.x != lastBounds.left) {
gameObject.onCollision(new Collision(c, c.gameObject, c.transform,
c.gameObject.rigidbody, CollisionLocation.RIGHT, x));
}
} else if (x < 0 && bounds.left < c.bounds.right) {
transform.position.x = Math.round(c.bounds.right);
if (transform.position.x != lastBounds.left) {
gameObject.onCollision(new Collision(c, c.gameObject, c.transform,
c.gameObject.rigidbody, CollisionLocation.LEFT, x));
}
}
recalculateBounds();
}
}
}
recalculateBounds();
triggerCheck();
}
public void recalculateBounds() {
bounds.left = transform.position.x;
bounds.top = transform.position.y;
bounds.right = transform.position.x + width;
bounds.bottom = transform.position.y + height;
}
public void triggerCheck() {
if (isTrigger) {
possible.clear();
QuadTree.retrieve(possible, qt, this);
for (BoundedComponent bc : possible) {
if (RectF.intersects(bounds, bc.bounds)) {
gameObject.onTriggerEnter((Collider) bc);
}
}
for (Collider c : normalColliders) {
if (c != this && RectF.intersects(bounds, c.bounds)) {
gameObject.onTriggerEnter(c);
}
}
for (Collider c : triggerColliders.toArray(triggers)) {
if (c != this && c != null && RectF.intersects(bounds, c.bounds)) {
c.gameObject.onTriggerEnter(this);
gameObject.onTriggerEnter(c);
}
}
} else {
for (Collider c : triggerColliders) {
if (c != this && RectF.intersects(bounds, c.bounds)) {
c.gameObject.onTriggerEnter(this);
}
}
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.waf.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/waf-regional-2016-11-28/ListLoggingConfigurations"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListLoggingConfigurationsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* An array of <a>LoggingConfiguration</a> objects.
* </p>
*/
private java.util.List<LoggingConfiguration> loggingConfigurations;
/**
* <p>
* If you have more <code>LoggingConfigurations</code> than the number that you specified for <code>Limit</code> in
* the request, the response includes a <code>NextMarker</code> value. To list more
* <code>LoggingConfigurations</code>, submit another <code>ListLoggingConfigurations</code> request, and specify
* the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.
* </p>
*/
private String nextMarker;
/**
* <p>
* An array of <a>LoggingConfiguration</a> objects.
* </p>
*
* @return An array of <a>LoggingConfiguration</a> objects.
*/
public java.util.List<LoggingConfiguration> getLoggingConfigurations() {
return loggingConfigurations;
}
/**
* <p>
* An array of <a>LoggingConfiguration</a> objects.
* </p>
*
* @param loggingConfigurations
* An array of <a>LoggingConfiguration</a> objects.
*/
public void setLoggingConfigurations(java.util.Collection<LoggingConfiguration> loggingConfigurations) {
if (loggingConfigurations == null) {
this.loggingConfigurations = null;
return;
}
this.loggingConfigurations = new java.util.ArrayList<LoggingConfiguration>(loggingConfigurations);
}
/**
* <p>
* An array of <a>LoggingConfiguration</a> objects.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setLoggingConfigurations(java.util.Collection)} or
* {@link #withLoggingConfigurations(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param loggingConfigurations
* An array of <a>LoggingConfiguration</a> objects.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListLoggingConfigurationsResult withLoggingConfigurations(LoggingConfiguration... loggingConfigurations) {
if (this.loggingConfigurations == null) {
setLoggingConfigurations(new java.util.ArrayList<LoggingConfiguration>(loggingConfigurations.length));
}
for (LoggingConfiguration ele : loggingConfigurations) {
this.loggingConfigurations.add(ele);
}
return this;
}
/**
* <p>
* An array of <a>LoggingConfiguration</a> objects.
* </p>
*
* @param loggingConfigurations
* An array of <a>LoggingConfiguration</a> objects.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListLoggingConfigurationsResult withLoggingConfigurations(java.util.Collection<LoggingConfiguration> loggingConfigurations) {
setLoggingConfigurations(loggingConfigurations);
return this;
}
/**
* <p>
* If you have more <code>LoggingConfigurations</code> than the number that you specified for <code>Limit</code> in
* the request, the response includes a <code>NextMarker</code> value. To list more
* <code>LoggingConfigurations</code>, submit another <code>ListLoggingConfigurations</code> request, and specify
* the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.
* </p>
*
* @param nextMarker
* If you have more <code>LoggingConfigurations</code> than the number that you specified for
* <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more
* <code>LoggingConfigurations</code>, submit another <code>ListLoggingConfigurations</code> request, and
* specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the
* next request.
*/
public void setNextMarker(String nextMarker) {
this.nextMarker = nextMarker;
}
/**
* <p>
* If you have more <code>LoggingConfigurations</code> than the number that you specified for <code>Limit</code> in
* the request, the response includes a <code>NextMarker</code> value. To list more
* <code>LoggingConfigurations</code>, submit another <code>ListLoggingConfigurations</code> request, and specify
* the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.
* </p>
*
* @return If you have more <code>LoggingConfigurations</code> than the number that you specified for
* <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more
* <code>LoggingConfigurations</code>, submit another <code>ListLoggingConfigurations</code> request, and
* specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the
* next request.
*/
public String getNextMarker() {
return this.nextMarker;
}
/**
* <p>
* If you have more <code>LoggingConfigurations</code> than the number that you specified for <code>Limit</code> in
* the request, the response includes a <code>NextMarker</code> value. To list more
* <code>LoggingConfigurations</code>, submit another <code>ListLoggingConfigurations</code> request, and specify
* the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.
* </p>
*
* @param nextMarker
* If you have more <code>LoggingConfigurations</code> than the number that you specified for
* <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more
* <code>LoggingConfigurations</code>, submit another <code>ListLoggingConfigurations</code> request, and
* specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the
* next request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListLoggingConfigurationsResult withNextMarker(String nextMarker) {
setNextMarker(nextMarker);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getLoggingConfigurations() != null)
sb.append("LoggingConfigurations: ").append(getLoggingConfigurations()).append(",");
if (getNextMarker() != null)
sb.append("NextMarker: ").append(getNextMarker());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListLoggingConfigurationsResult == false)
return false;
ListLoggingConfigurationsResult other = (ListLoggingConfigurationsResult) obj;
if (other.getLoggingConfigurations() == null ^ this.getLoggingConfigurations() == null)
return false;
if (other.getLoggingConfigurations() != null && other.getLoggingConfigurations().equals(this.getLoggingConfigurations()) == false)
return false;
if (other.getNextMarker() == null ^ this.getNextMarker() == null)
return false;
if (other.getNextMarker() != null && other.getNextMarker().equals(this.getNextMarker()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getLoggingConfigurations() == null) ? 0 : getLoggingConfigurations().hashCode());
hashCode = prime * hashCode + ((getNextMarker() == null) ? 0 : getNextMarker().hashCode());
return hashCode;
}
@Override
public ListLoggingConfigurationsResult clone() {
try {
return (ListLoggingConfigurationsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright (c) 2010-2013 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.model.impl.lens;
import java.util.ArrayList;
import java.util.List;
import com.evolveum.midpoint.prism.Objectable;
import com.evolveum.midpoint.schema.DeltaConvertor;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.util.exception.CommunicationException;
import com.evolveum.midpoint.util.exception.ConfigurationException;
import com.evolveum.midpoint.util.exception.ObjectNotFoundException;
import com.evolveum.midpoint.xml.ns._public.common.common_3.*;
import com.evolveum.midpoint.xml.ns._public.model.model_context_3.LensElementContextType;
import com.evolveum.midpoint.xml.ns._public.model.model_context_3.LensObjectDeltaOperationType;
import com.evolveum.prism.xml.ns._public.types_3.ObjectDeltaType;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.Validate;
import com.evolveum.midpoint.common.crypto.CryptoUtil;
import com.evolveum.midpoint.model.api.context.ModelElementContext;
import com.evolveum.midpoint.model.api.context.SynchronizationPolicyDecision;
import com.evolveum.midpoint.model.common.expression.ObjectDeltaObject;
import com.evolveum.midpoint.prism.PrismContext;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.PrismObjectDefinition;
import com.evolveum.midpoint.prism.delta.ChangeType;
import com.evolveum.midpoint.prism.delta.ItemDelta;
import com.evolveum.midpoint.prism.delta.ObjectDelta;
import com.evolveum.midpoint.prism.delta.PropertyDelta;
import com.evolveum.midpoint.schema.util.ShadowUtil;
import com.evolveum.midpoint.util.exception.SchemaException;
/**
* @author semancik
*
*/
public abstract class LensElementContext<O extends ObjectType> implements ModelElementContext<O> {
private static final long serialVersionUID = 1649567559396392861L;
private PrismObject<O> objectOld;
private transient PrismObject<O> objectCurrent;
private PrismObject<O> objectNew;
private ObjectDelta<O> primaryDelta;
private ObjectDelta<O> secondaryDelta;
private List<LensObjectDeltaOperation<O>> executedDeltas = new ArrayList<LensObjectDeltaOperation<O>>();
private Class<O> objectTypeClass;
private String oid = null;
private int iteration;
private String iterationToken;
/**
* Initial intent regarding the account. It indicated what the initiator of the operation WANTS TO DO with the
* context.
* If set to null then the decision is left to "the engine". Null is also a typical value
* when the context is created. It may be pre-set under some circumstances, e.g. if an account is being unlinked.
*/
private SynchronizationIntent synchronizationIntent;
private transient boolean isFresh = false;
private LensContext<? extends ObjectType> lensContext;
private transient PrismObjectDefinition<O> objectDefinition = null;
public LensElementContext(Class<O> objectTypeClass, LensContext<? extends ObjectType> lensContext) {
super();
Validate.notNull(objectTypeClass, "Object class is null");
Validate.notNull(lensContext, "Lens context is null");
this.lensContext = lensContext;
this.objectTypeClass = objectTypeClass;
}
public int getIteration() {
return iteration;
}
public void setIteration(int iteration) {
this.iteration = iteration;
}
public String getIterationToken() {
return iterationToken;
}
public void setIterationToken(String iterationToken) {
this.iterationToken = iterationToken;
}
public SynchronizationIntent getSynchronizationIntent() {
return synchronizationIntent;
}
public void setSynchronizationIntent(SynchronizationIntent synchronizationIntent) {
this.synchronizationIntent = synchronizationIntent;
}
public LensContext<? extends ObjectType> getLensContext() {
return lensContext;
}
protected PrismContext getNotNullPrismContext() {
return getLensContext().getNotNullPrismContext();
}
@Override
public Class<O> getObjectTypeClass() {
return objectTypeClass;
}
public PrismContext getPrismContext() {
return lensContext.getPrismContext();
}
@Override
public PrismObject<O> getObjectOld() {
return objectOld;
}
public void setObjectOld(PrismObject<O> objectOld) {
this.objectOld = objectOld;
}
public PrismObject<O> getObjectCurrent() {
return objectCurrent;
}
public void setObjectCurrent(PrismObject<O> objectCurrent) {
this.objectCurrent = objectCurrent;
}
public PrismObject<O> getObjectAny() {
if (objectNew != null) {
return objectNew;
}
if (objectCurrent != null) {
return objectCurrent;
}
return objectOld;
}
/**
* Sets current and possibly also old object. This method is used with
* freshly loaded object. The object is set as current object.
* If the old object was not initialized yet (and if it should be initialized)
* then the object is also set as old object.
*/
public void setLoadedObject(PrismObject<O> object) {
setObjectCurrent(object);
if (objectOld == null && !isAdd()) {
setObjectOld(object.clone());
}
}
@Override
public PrismObject<O> getObjectNew() {
return objectNew;
}
public void setObjectNew(PrismObject<O> objectNew) {
this.objectNew = objectNew;
}
@Override
public ObjectDelta<O> getPrimaryDelta() {
return primaryDelta;
}
public void setPrimaryDelta(ObjectDelta<O> primaryDelta) {
this.primaryDelta = primaryDelta;
}
public void addPrimaryDelta(ObjectDelta<O> delta) throws SchemaException {
if (primaryDelta == null) {
primaryDelta = delta;
} else {
primaryDelta.merge(delta);
}
}
@Override
public ObjectDelta<O> getSecondaryDelta() {
return secondaryDelta;
}
@Override
public void setSecondaryDelta(ObjectDelta<O> secondaryDelta) {
this.secondaryDelta = secondaryDelta;
}
public void addSecondaryDelta(ObjectDelta<O> delta) throws SchemaException {
if (secondaryDelta == null) {
secondaryDelta = delta;
} else {
secondaryDelta.merge(delta);
}
}
public void swallowToPrimaryDelta(ItemDelta<?> itemDelta) throws SchemaException {
if (primaryDelta == null) {
primaryDelta = new ObjectDelta<O>(getObjectTypeClass(), ChangeType.MODIFY, getPrismContext());
primaryDelta.setOid(oid);
}
primaryDelta.swallow(itemDelta);
}
public void swallowToSecondaryDelta(ItemDelta<?> itemDelta) throws SchemaException {
if (secondaryDelta == null) {
secondaryDelta = new ObjectDelta<O>(getObjectTypeClass(), ChangeType.MODIFY, getPrismContext());
secondaryDelta.setOid(oid);
}
secondaryDelta.swallow(itemDelta);
}
public boolean isAdd() {
if (ObjectDelta.isAdd(getPrimaryDelta())) {
return true;
}
if (ObjectDelta.isAdd(getSecondaryDelta())) {
return true;
}
return false;
}
public boolean isModify() {
if (ObjectDelta.isModify(getPrimaryDelta())) {
return true;
}
if (ObjectDelta.isModify(getSecondaryDelta())) {
return true;
}
return false;
}
public boolean isDelete() {
if (ObjectDelta.isDelete(getPrimaryDelta())) {
return true;
}
if (ObjectDelta.isDelete(getSecondaryDelta())) {
return true;
}
return false;
}
@Override
public List<LensObjectDeltaOperation<O>> getExecutedDeltas() {
return executedDeltas;
}
List<LensObjectDeltaOperation<O>> getExecutedDeltas(Boolean audited) {
if (audited == null) {
return executedDeltas;
}
List<LensObjectDeltaOperation<O>> deltas = new ArrayList<LensObjectDeltaOperation<O>>();
for (LensObjectDeltaOperation<O> delta: executedDeltas) {
if (delta.isAudited() == audited) {
deltas.add(delta);
}
}
return deltas;
}
public void markExecutedDeltasAudited() {
for(LensObjectDeltaOperation<O> executedDelta: executedDeltas) {
executedDelta.setAudited(true);
}
}
public void addToExecutedDeltas(LensObjectDeltaOperation<O> executedDelta) {
executedDeltas.add(executedDelta);
}
/**
* Returns user delta, both primary and secondary (merged together).
* The returned object is (kind of) immutable. Changing it may do strange things (but most likely the changes will be lost).
*/
public ObjectDelta<O> getDelta() throws SchemaException {
return ObjectDelta.union(primaryDelta, getSecondaryDelta());
}
public ObjectDeltaObject<O> getObjectDeltaObject() throws SchemaException {
return new ObjectDeltaObject<O>(objectOld, getDelta(), objectNew);
}
@Override
public String getOid() {
if (oid == null) {
oid = determineOid();
}
return oid;
}
public String determineOid() {
if (getObjectOld() != null && getObjectOld().getOid() != null) {
return getObjectOld().getOid();
}
if (getObjectCurrent() != null && getObjectCurrent().getOid() != null) {
return getObjectCurrent().getOid();
}
if (getObjectNew() != null && getObjectNew().getOid() != null) {
return getObjectNew().getOid();
}
if (getPrimaryDelta() != null && getPrimaryDelta().getOid() != null) {
return getPrimaryDelta().getOid();
}
if (getSecondaryDelta() != null && getSecondaryDelta().getOid() != null) {
return getSecondaryDelta().getOid();
}
return null;
}
/**
* Sets oid to the field but also to the deltas (if applicable).
*/
public void setOid(String oid) {
this.oid = oid;
if (primaryDelta != null) {
primaryDelta.setOid(oid);
}
if (secondaryDelta != null) {
secondaryDelta.setOid(oid);
}
if (objectNew != null) {
objectNew.setOid(oid);
}
}
public PrismObjectDefinition<O> getObjectDefinition() {
if (objectDefinition == null) {
if (objectOld != null) {
objectDefinition = objectOld.getDefinition();
} else if (objectCurrent != null) {
objectDefinition = objectCurrent.getDefinition();
} else if (objectNew != null) {
objectDefinition = objectNew.getDefinition();
} else {
objectDefinition = getNotNullPrismContext().getSchemaRegistry().findObjectDefinitionByCompileTimeClass(getObjectTypeClass());
}
}
return objectDefinition;
}
public boolean isFresh() {
return isFresh;
}
public void setFresh(boolean isFresh) {
this.isFresh = isFresh;
}
public void recompute() throws SchemaException {
PrismObject<O> base = objectCurrent;
if (base == null) {
base = objectOld;
}
ObjectDelta<O> delta = getDelta();
if (delta == null) {
// No change
objectNew = base;
return;
}
objectNew = delta.computeChangedObject(base);
}
/**
* Make the context as clean as new. Except for the executed deltas and other "traces" of
* what was already done and cannot be undone. Also the configuration items that were loaded may remain.
* This is used to restart the context computation but keep the trace of what was already done.
*/
public void reset() {
secondaryDelta = null;
isFresh = false;
}
public void checkConsistence() {
checkConsistence(null);
}
public void checkConsistence(String contextDesc) {
if (getObjectOld() != null) {
checkConsistence(getObjectOld(), "old "+getElementDesc() , contextDesc);
}
if (getObjectCurrent() != null) {
checkConsistence(getObjectCurrent(), "current "+getElementDesc() , contextDesc);
}
if (primaryDelta != null) {
checkConsistence(primaryDelta, false, getElementDesc()+" primary delta in "+this + (contextDesc == null ? "" : " in " +contextDesc));
}
if (secondaryDelta != null) {
boolean requireOid = isRequireSecondardyDeltaOid();
// Secondary delta may not have OID yet (as it may relate to ADD primary delta that doesn't have OID yet)
checkConsistence(secondaryDelta, requireOid, getElementDesc()+" secondary delta in "+this + (contextDesc == null ? "" : " in " +contextDesc));
}
if (getObjectNew() != null) {
checkConsistence(getObjectNew(), "new "+getElementDesc(), contextDesc);
}
}
private void checkConsistence(ObjectDelta<O> delta, boolean requireOid, String contextDesc) {
try {
delta.checkConsistence(requireOid, true, true);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(e.getMessage()+"; in "+contextDesc, e);
} catch (IllegalStateException e) {
throw new IllegalStateException(e.getMessage()+"; in "+contextDesc, e);
}
if (delta.isAdd()) {
checkConsistence(delta.getObjectToAdd(), "add object", contextDesc);
}
}
protected boolean isRequireSecondardyDeltaOid() {
return primaryDelta == null;
}
protected void checkConsistence(PrismObject<O> object, String elementDesc, String contextDesc) {
String desc = elementDesc+" in "+this + (contextDesc == null ? "" : " in " +contextDesc);
try {
object.checkConsistence(true);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(e.getMessage()+"; in "+desc, e);
} catch (IllegalStateException e) {
throw new IllegalStateException(e.getMessage()+"; in "+desc, e);
}
if (object.getDefinition() == null) {
throw new IllegalStateException("No "+getElementDesc()+" definition "+desc);
}
O objectType = object.asObjectable();
if (objectType instanceof ShadowType) {
ShadowUtil.checkConsistence((PrismObject<? extends ShadowType>) object, desc);
}
}
/**
* Cleans up the contexts by removing secondary deltas and other working state. The context after cleanup
* should be the same as originally requested.
*/
public void cleanup() {
secondaryDelta = null;
}
public void normalize() {
if (objectNew != null) {
objectNew.normalize();
}
if (objectOld != null) {
objectOld.normalize();
}
if (objectCurrent != null) {
objectCurrent.normalize();
}
if (primaryDelta != null) {
primaryDelta.normalize();
}
if (secondaryDelta != null) {
secondaryDelta.normalize();
}
}
public void adopt(PrismContext prismContext) throws SchemaException {
if (objectNew != null) {
prismContext.adopt(objectNew);
}
if (objectOld != null) {
prismContext.adopt(objectOld);
}
if (objectCurrent != null) {
prismContext.adopt(objectCurrent);
}
if (primaryDelta != null) {
prismContext.adopt(primaryDelta);
}
if (secondaryDelta != null) {
prismContext.adopt(secondaryDelta);
}
// TODO: object definition?
}
public abstract LensElementContext<O> clone(LensContext<? extends ObjectType> lensContext);
protected void copyValues(LensElementContext<O> clone, LensContext lensContext) {
clone.lensContext = lensContext;
// This is de-facto immutable
clone.objectDefinition = this.objectDefinition;
clone.objectNew = cloneObject(this.objectNew);
clone.objectOld = cloneObject(this.objectOld);
clone.objectCurrent = cloneObject(this.objectCurrent);
clone.objectTypeClass = this.objectTypeClass;
clone.oid = this.oid;
clone.primaryDelta = cloneDelta(this.primaryDelta);
clone.secondaryDelta = cloneDelta(this.secondaryDelta);
clone.isFresh = this.isFresh;
clone.iteration = this.iteration;
clone.iterationToken = this.iterationToken;
}
private ObjectDelta<O> cloneDelta(ObjectDelta<O> thisDelta) {
if (thisDelta == null) {
return null;
}
return thisDelta.clone();
}
private PrismObject<O> cloneObject(PrismObject<O> thisObject) {
if (thisObject == null) {
return null;
}
return thisObject.clone();
}
public void storeIntoLensElementContextType(LensElementContextType lensElementContextType) throws SchemaException {
lensElementContextType.setObjectOld(objectOld != null ? objectOld.asObjectable() : null);
lensElementContextType.setObjectNew(objectNew != null ? objectNew.asObjectable() : null);
lensElementContextType.setPrimaryDelta(primaryDelta != null ? DeltaConvertor.toObjectDeltaType(primaryDelta) : null);
lensElementContextType.setSecondaryDelta(secondaryDelta != null ? DeltaConvertor.toObjectDeltaType(secondaryDelta) : null);
for (LensObjectDeltaOperation executedDelta : executedDeltas) {
lensElementContextType.getExecutedDeltas().add(executedDelta.toLensObjectDeltaOperationType());
}
lensElementContextType.setObjectTypeClass(objectTypeClass != null ? objectTypeClass.getName() : null);
lensElementContextType.setOid(oid);
lensElementContextType.setIteration(iteration);
lensElementContextType.setIterationToken(iterationToken);
lensElementContextType.setSynchronizationIntent(synchronizationIntent != null ? synchronizationIntent.toSynchronizationIntentType() : null);
}
public void retrieveFromLensElementContextType(LensElementContextType lensElementContextType, OperationResult result) throws SchemaException, ConfigurationException, ObjectNotFoundException, CommunicationException {
ObjectType objectTypeOld = lensElementContextType.getObjectOld();
this.objectOld = objectTypeOld != null ? objectTypeOld.asPrismObject() : null;
fixProvisioningTypeInObject(this.objectOld, result);
ObjectType objectTypeNew = lensElementContextType.getObjectNew();
this.objectNew = objectTypeNew != null ? objectTypeNew.asPrismObject() : null;
fixProvisioningTypeInObject(this.objectNew, result);
ObjectType object = objectTypeNew != null ? objectTypeNew : objectTypeOld;
ObjectDeltaType primaryDeltaType = lensElementContextType.getPrimaryDelta();
this.primaryDelta = primaryDeltaType != null ? (ObjectDelta) DeltaConvertor.createObjectDelta(primaryDeltaType, lensContext.getPrismContext()) : null;
fixProvisioningTypeInDelta(this.primaryDelta, object, result);
ObjectDeltaType secondaryDeltaType = lensElementContextType.getSecondaryDelta();
this.secondaryDelta = secondaryDeltaType != null ? (ObjectDelta) DeltaConvertor.createObjectDelta(secondaryDeltaType, lensContext.getPrismContext()) : null;
fixProvisioningTypeInDelta(this.secondaryDelta, object, result);
for (LensObjectDeltaOperationType eDeltaOperationType : lensElementContextType.getExecutedDeltas()) {
LensObjectDeltaOperation objectDeltaOperation = LensObjectDeltaOperation.fromLensObjectDeltaOperationType(eDeltaOperationType, lensContext.getPrismContext());
if (objectDeltaOperation.getObjectDelta() != null) {
fixProvisioningTypeInDelta(objectDeltaOperation.getObjectDelta(), object, result);
}
this.executedDeltas.add(objectDeltaOperation);
}
this.oid = lensElementContextType.getOid();
this.iteration = lensElementContextType.getIteration() != null ? lensElementContextType.getIteration() : 0;
this.iterationToken = lensElementContextType.getIterationToken();
this.synchronizationIntent = SynchronizationIntent.fromSynchronizationIntentType(lensElementContextType.getSynchronizationIntent());
// note: objectTypeClass is already converted (used in the constructor)
}
protected void fixProvisioningTypeInDelta(ObjectDelta<O> delta, Objectable object, OperationResult result) throws SchemaException, ObjectNotFoundException, CommunicationException, ConfigurationException {
if (delta != null && delta.getObjectTypeClass() != null && (ShadowType.class.isAssignableFrom(delta.getObjectTypeClass()) || ResourceType.class.isAssignableFrom(delta.getObjectTypeClass()))) {
lensContext.getProvisioningService().applyDefinition(delta, object, result);
}
}
private void fixProvisioningTypeInObject(PrismObject<O> object, OperationResult result) throws SchemaException, ObjectNotFoundException, CommunicationException, ConfigurationException {
if (object != null && object.getCompileTimeClass() != null && (ShadowType.class.isAssignableFrom(object.getCompileTimeClass()) || ResourceType.class.isAssignableFrom(object.getCompileTimeClass()))) {
lensContext.getProvisioningService().applyDefinition(object, result);
}
}
public void checkEncrypted() {
if (objectNew != null) {
CryptoUtil.checkEncrypted(objectNew);
}
if (objectOld != null) {
CryptoUtil.checkEncrypted(objectOld);
}
if (objectCurrent != null) {
CryptoUtil.checkEncrypted(objectCurrent);
}
if (primaryDelta != null) {
CryptoUtil.checkEncrypted(primaryDelta);
}
if (secondaryDelta != null) {
CryptoUtil.checkEncrypted(secondaryDelta);
}
}
protected abstract String getElementDefaultDesc();
protected String getElementDesc() {
PrismObject<O> object = getObjectNew();
if (object == null) {
object = getObjectOld();
}
if (object == null) {
object = getObjectCurrent();
}
if (object == null) {
return getElementDefaultDesc();
}
return object.toDebugType();
}
protected String getDebugDumpTitle() {
return StringUtils.capitalize(getElementDesc());
}
protected String getDebugDumpTitle(String suffix) {
return getDebugDumpTitle()+" "+suffix;
}
public abstract String getHumanReadableName();
}
| |
/*
* Copyright 2006 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.BASE_CLASS_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.CLOSURE_DEFINES_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.DUPLICATE_NAMESPACE_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.EXPECTED_OBJECTLIT_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.FUNCTION_NAMESPACE_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.GOOG_BASE_CLASS_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.INVALID_ARGUMENT_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.INVALID_CSS_RENAMING_MAP;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.INVALID_DEFINE_NAME_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.INVALID_PROVIDE_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.INVALID_STYLE_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.LATE_PROVIDE_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.MISSING_DEFINE_ANNOTATION;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.MISSING_PROVIDE_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.NULL_ARGUMENT_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.TOO_MANY_ARGUMENTS_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.XMODULE_REQUIRE_ERROR;
import com.google.javascript.rhino.Node;
/**
* Tests for {@link ProcessClosurePrimitives}.
*
*/
public class ProcessClosurePrimitivesTest extends CompilerTestCase {
private String additionalCode;
private String additionalEndCode;
private boolean addAdditionalNamespace;
private boolean preserveGoogRequires;
private boolean banGoogBase;
public ProcessClosurePrimitivesTest() {
enableLineNumberCheck(true);
}
@Override protected void setUp() {
additionalCode = null;
additionalEndCode = null;
addAdditionalNamespace = false;
preserveGoogRequires = false;
banGoogBase = false;
compareJsDoc = false;
}
@Override
protected CompilerOptions getOptions() {
CompilerOptions options = super.getOptions();
if (banGoogBase) {
options.setWarningLevel(
DiagnosticGroups.USE_OF_GOOG_BASE, CheckLevel.ERROR);
}
return options;
}
@Override public CompilerPass getProcessor(final Compiler compiler) {
if ((additionalCode == null) && (additionalEndCode == null)) {
return new ProcessClosurePrimitives(
compiler, null, CheckLevel.ERROR, preserveGoogRequires);
} else {
return new CompilerPass() {
@Override
public void process(Node externs, Node root) {
// Process the original code.
new ProcessClosurePrimitives(
compiler, null, CheckLevel.OFF, preserveGoogRequires)
.process(externs, root);
// Inject additional code at the beginning.
if (additionalCode != null) {
SourceFile file =
SourceFile.fromCode("additionalcode", additionalCode);
Node scriptNode = root.getFirstChild();
Node newScriptNode = new CompilerInput(file).getAstRoot(compiler);
if (addAdditionalNamespace) {
newScriptNode.getFirstChild()
.putBooleanProp(Node.IS_NAMESPACE, true);
}
while (newScriptNode.getLastChild() != null) {
Node lastChild = newScriptNode.getLastChild();
newScriptNode.removeChild(lastChild);
scriptNode.addChildBefore(lastChild, scriptNode.getFirstChild());
}
}
// Inject additional code at the end.
if (additionalEndCode != null) {
SourceFile file =
SourceFile.fromCode("additionalendcode", additionalEndCode);
Node scriptNode = root.getFirstChild();
Node newScriptNode = new CompilerInput(file).getAstRoot(compiler);
if (addAdditionalNamespace) {
newScriptNode.getFirstChild()
.putBooleanProp(Node.IS_NAMESPACE, true);
}
while (newScriptNode.getFirstChild() != null) {
Node firstChild = newScriptNode.getFirstChild();
newScriptNode.removeChild(firstChild);
scriptNode.addChildToBack(firstChild);
}
}
// Process the tree a second time.
new ProcessClosurePrimitives(
compiler, null, CheckLevel.ERROR, preserveGoogRequires)
.process(externs, root);
}
};
}
}
@Override public int getNumRepetitions() {
return 1;
}
public void testSimpleProvides() {
test("goog.provide('foo');",
"var foo={};");
test("goog.provide('foo.bar');",
"var foo={}; foo.bar={};");
test("goog.provide('foo.bar.baz');",
"var foo={}; foo.bar={}; foo.bar.baz={};");
test("goog.provide('foo.bar.baz.boo');",
"var foo={}; foo.bar={}; foo.bar.baz={}; foo.bar.baz.boo={};");
test("goog.provide('goog.bar');",
"goog.bar={};"); // goog is special-cased
}
public void testMultipleProvides() {
test("goog.provide('foo.bar'); goog.provide('foo.baz');",
"var foo={}; foo.bar={}; foo.baz={};");
test("goog.provide('foo.bar.baz'); goog.provide('foo.boo.foo');",
"var foo={}; foo.bar={}; foo.bar.baz={}; foo.boo={}; foo.boo.foo={};");
test("goog.provide('foo.bar.baz'); goog.provide('foo.bar.boo');",
"var foo={}; foo.bar={}; foo.bar.baz={}; foo.bar.boo={};");
test("goog.provide('foo.bar.baz'); goog.provide('goog.bar.boo');",
"var foo={}; foo.bar={}; foo.bar.baz={}; goog.bar={}; " +
"goog.bar.boo={};");
}
public void testRemovalOfProvidedObjLit() {
test("goog.provide('foo'); foo = 0;",
"var foo = 0;");
test("goog.provide('foo'); foo = {a: 0};",
"var foo = {a: 0};");
test("goog.provide('foo'); foo = function(){};",
"var foo = function(){};");
test("goog.provide('foo'); var foo = 0;",
"var foo = 0;");
test("goog.provide('foo'); var foo = {a: 0};",
"var foo = {a: 0};");
test("goog.provide('foo'); var foo = function(){};",
"var foo = function(){};");
test("goog.provide('foo.bar.Baz'); foo.bar.Baz=function(){};",
"var foo={}; foo.bar={}; foo.bar.Baz=function(){};");
test("goog.provide('foo.bar.moo'); foo.bar.moo={E:1,S:2};",
"var foo={}; foo.bar={}; foo.bar.moo={E:1,S:2};");
test("goog.provide('foo.bar.moo'); foo.bar.moo={E:1}; foo.bar.moo={E:2};",
"var foo={}; foo.bar={}; foo.bar.moo={E:1}; foo.bar.moo={E:2};");
}
public void testProvidedDeclaredFunctionError() {
test("goog.provide('foo'); function foo(){}",
null, FUNCTION_NAMESPACE_ERROR);
}
public void testRemovalMultipleAssignment1() {
test("goog.provide('foo'); foo = 0; foo = 1",
"var foo = 0; foo = 1;");
}
public void testRemovalMultipleAssignment2() {
test("goog.provide('foo'); var foo = 0; foo = 1",
"var foo = 0; foo = 1;");
}
public void testRemovalMultipleAssignment3() {
test("goog.provide('foo'); foo = 0; var foo = 1",
"foo = 0; var foo = 1;");
}
public void testRemovalMultipleAssignment4() {
test("goog.provide('foo.bar'); foo.bar = 0; foo.bar = 1",
"var foo = {}; foo.bar = 0; foo.bar = 1");
}
public void testNoRemovalFunction1() {
test("goog.provide('foo'); function f(){foo = 0}",
"var foo = {}; function f(){foo = 0}");
}
public void testNoRemovalFunction2() {
test("goog.provide('foo'); function f(){var foo = 0}",
"var foo = {}; function f(){var foo = 0}");
}
public void testRemovalMultipleAssignmentInIf1() {
test("goog.provide('foo'); if (true) { var foo = 0 } else { foo = 1 }",
"if (true) { var foo = 0 } else { foo = 1 }");
}
public void testRemovalMultipleAssignmentInIf2() {
test("goog.provide('foo'); if (true) { foo = 0 } else { var foo = 1 }",
"if (true) { foo = 0 } else { var foo = 1 }");
}
public void testRemovalMultipleAssignmentInIf3() {
test("goog.provide('foo'); if (true) { foo = 0 } else { foo = 1 }",
"if (true) { var foo = 0 } else { foo = 1 }");
}
public void testRemovalMultipleAssignmentInIf4() {
test("goog.provide('foo.bar');" +
"if (true) { foo.bar = 0 } else { foo.bar = 1 }",
"var foo = {}; if (true) { foo.bar = 0 } else { foo.bar = 1 }");
}
public void testMultipleDeclarationError1() {
String rest = "if (true) { foo.bar = 0 } else { foo.bar = 1 }";
test("goog.provide('foo.bar');" + "var foo = {};" + rest,
"var foo = {};" + "var foo = {};" + rest);
}
public void testMultipleDeclarationError2() {
test("goog.provide('foo.bar');" +
"if (true) { var foo = {}; foo.bar = 0 } else { foo.bar = 1 }",
"var foo = {};" +
"if (true) {" +
" var foo = {}; foo.bar = 0" +
"} else {" +
" foo.bar = 1" +
"}");
}
public void testMultipleDeclarationError3() {
test("goog.provide('foo.bar');" +
"if (true) { foo.bar = 0 } else { var foo = {}; foo.bar = 1 }",
"var foo = {};" +
"if (true) {" +
" foo.bar = 0" +
"} else {" +
" var foo = {}; foo.bar = 1" +
"}");
}
public void testProvideAfterDeclarationError() {
test("var x = 42; goog.provide('x');",
"var x = 42; var x = {}");
}
public void testProvideErrorCases() {
test("goog.provide();", "", NULL_ARGUMENT_ERROR);
test("goog.provide(5);", "", INVALID_ARGUMENT_ERROR);
test("goog.provide([]);", "", INVALID_ARGUMENT_ERROR);
test("goog.provide({});", "", INVALID_ARGUMENT_ERROR);
test("goog.provide('foo', 'bar');", "", TOO_MANY_ARGUMENTS_ERROR);
test("goog.provide('foo'); goog.provide('foo');", "",
DUPLICATE_NAMESPACE_ERROR);
test("goog.provide('foo.bar'); goog.provide('foo'); goog.provide('foo');",
"", DUPLICATE_NAMESPACE_ERROR);
}
public void testRemovalOfRequires() {
test("goog.provide('foo'); goog.require('foo');",
"var foo={};");
test("goog.provide('foo.bar'); goog.require('foo.bar');",
"var foo={}; foo.bar={};");
test("goog.provide('foo.bar.baz'); goog.require('foo.bar.baz');",
"var foo={}; foo.bar={}; foo.bar.baz={};");
test("goog.provide('foo'); var x = 3; goog.require('foo'); something();",
"var foo={}; var x = 3; something();");
testSame("foo.require('foo.bar');");
}
public void testPreserveGoogRequires() {
preserveGoogRequires = true;
test("goog.provide('foo'); goog.require('foo');",
"var foo={}; goog.require('foo');");
test("goog.provide('foo'); goog.require('foo'); var a = {};",
"var foo = {}; goog.require('foo'); var a = {};");
}
public void testRequireErrorCases() {
test("goog.require();", "", NULL_ARGUMENT_ERROR);
test("goog.require(5);", "", INVALID_ARGUMENT_ERROR);
test("goog.require([]);", "", INVALID_ARGUMENT_ERROR);
test("goog.require({});", "", INVALID_ARGUMENT_ERROR);
}
public void testLateProvides() {
test("goog.require('foo'); goog.provide('foo');",
"var foo={};", LATE_PROVIDE_ERROR);
test("goog.require('foo.bar'); goog.provide('foo.bar');",
"var foo={}; foo.bar={};", LATE_PROVIDE_ERROR);
test("goog.provide('foo.bar'); goog.require('foo'); goog.provide('foo');",
"var foo={}; foo.bar={};", LATE_PROVIDE_ERROR);
}
public void testMissingProvides() {
test("goog.require('foo');",
"", MISSING_PROVIDE_ERROR);
test("goog.provide('foo'); goog.require('Foo');",
"var foo={};", MISSING_PROVIDE_ERROR);
test("goog.provide('foo'); goog.require('foo.bar');",
"var foo={};", MISSING_PROVIDE_ERROR);
test("goog.provide('foo'); var EXPERIMENT_FOO = true; " +
"if (EXPERIMENT_FOO) {goog.require('foo.bar');}",
"var foo={}; var EXPERIMENT_FOO = true; if (EXPERIMENT_FOO) {}",
MISSING_PROVIDE_ERROR);
}
public void testAddDependency() {
test("goog.addDependency('x.js', ['A', 'B'], []);", "0");
Compiler compiler = getLastCompiler();
assertTrue(compiler.getTypeRegistry().isForwardDeclaredType("A"));
assertTrue(compiler.getTypeRegistry().isForwardDeclaredType("B"));
assertFalse(compiler.getTypeRegistry().isForwardDeclaredType("C"));
}
public void testForwardDeclarations() {
test("goog.forwardDeclare('A.B')", "");
Compiler compiler = getLastCompiler();
assertTrue(compiler.getTypeRegistry().isForwardDeclaredType("A.B"));
assertFalse(compiler.getTypeRegistry().isForwardDeclaredType("C.D"));
test("goog.forwardDeclare();", "",
ProcessClosurePrimitives.INVALID_FORWARD_DECLARE);
test("goog.forwardDeclare('A.B', 'C.D');", "",
ProcessClosurePrimitives.INVALID_FORWARD_DECLARE);
}
public void testValidSetCssNameMapping() {
test("goog.setCssNameMapping({foo:'bar',\"biz\":'baz'});", "");
CssRenamingMap map = getLastCompiler().getCssRenamingMap();
assertNotNull(map);
assertEquals("bar", map.get("foo"));
assertEquals("baz", map.get("biz"));
}
public void testValidSetCssNameMappingWithType() {
test("goog.setCssNameMapping({foo:'bar',\"biz\":'baz'}, 'BY_PART');", "");
CssRenamingMap map = getLastCompiler().getCssRenamingMap();
assertNotNull(map);
assertEquals("bar", map.get("foo"));
assertEquals("baz", map.get("biz"));
test("goog.setCssNameMapping({foo:'bar',biz:'baz','biz-foo':'baz-bar'}," +
" 'BY_WHOLE');", "");
map = getLastCompiler().getCssRenamingMap();
assertNotNull(map);
assertEquals("bar", map.get("foo"));
assertEquals("baz", map.get("biz"));
assertEquals("baz-bar", map.get("biz-foo"));
}
public void testSetCssNameMappingNonStringValueReturnsError() {
// Make sure the argument is an object literal.
test("var BAR = {foo:'bar'}; goog.setCssNameMapping(BAR);", "",
EXPECTED_OBJECTLIT_ERROR);
test("goog.setCssNameMapping([]);", "",
EXPECTED_OBJECTLIT_ERROR);
test("goog.setCssNameMapping(false);", "",
EXPECTED_OBJECTLIT_ERROR);
test("goog.setCssNameMapping(null);", "",
EXPECTED_OBJECTLIT_ERROR);
test("goog.setCssNameMapping(undefined);", "",
EXPECTED_OBJECTLIT_ERROR);
// Make sure all values of the object literal are string literals.
test("var BAR = 'bar'; goog.setCssNameMapping({foo:BAR});", "",
NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR);
test("goog.setCssNameMapping({foo:6});", "",
NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR);
test("goog.setCssNameMapping({foo:false});", "",
NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR);
test("goog.setCssNameMapping({foo:null});", "",
NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR);
test("goog.setCssNameMapping({foo:undefined});", "",
NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR);
}
public void testSetCssNameMappingValidity() {
// Make sure that the keys don't have -'s
test("goog.setCssNameMapping({'a': 'b', 'a-a': 'c'})", "", null,
INVALID_CSS_RENAMING_MAP);
// In full mode, we check that map(a-b)=map(a)-map(b)
test("goog.setCssNameMapping({'a': 'b', 'a-a': 'c'}, 'BY_WHOLE')", "", null,
INVALID_CSS_RENAMING_MAP);
// Unknown mapping type
test("goog.setCssNameMapping({foo:'bar'}, 'UNKNOWN');", "",
INVALID_STYLE_ERROR);
}
public void testBadCrossModuleRequire() {
test(
createModuleStar(
"",
"goog.provide('goog.ui');",
"goog.require('goog.ui');"),
new String[] {
"",
"goog.ui = {};",
""
},
null,
XMODULE_REQUIRE_ERROR);
}
public void testGoodCrossModuleRequire1() {
test(
createModuleStar(
"goog.provide('goog.ui');",
"",
"goog.require('goog.ui');"),
new String[] {
"goog.ui = {};",
"",
"",
});
}
public void testGoodCrossModuleRequire2() {
test(
createModuleStar(
"",
"",
"goog.provide('goog.ui'); goog.require('goog.ui');"),
new String[] {
"",
"",
"goog.ui = {};",
});
}
// Tests providing additional code with non-overlapping var namespace.
public void testSimpleAdditionalProvide() {
additionalCode = "goog.provide('b.B'); b.B = {};";
test("goog.provide('a.A'); a.A = {};",
"var b={};b.B={};var a={};a.A={};");
}
// Same as above, but with the additional code added after the original.
public void testSimpleAdditionalProvideAtEnd() {
additionalEndCode = "goog.provide('b.B'); b.B = {};";
test("goog.provide('a.A'); a.A = {};",
"var a={};a.A={};var b={};b.B={};");
}
// Tests providing additional code with non-overlapping dotted namespace.
public void testSimpleDottedAdditionalProvide() {
additionalCode = "goog.provide('a.b.B'); a.b.B = {};";
test("goog.provide('c.d.D'); c.d.D = {};",
"var a={};a.b={};a.b.B={};var c={};c.d={};c.d.D={};");
}
// Tests providing additional code with overlapping var namespace.
public void testOverlappingAdditionalProvide() {
additionalCode = "goog.provide('a.B'); a.B = {};";
test("goog.provide('a.A'); a.A = {};",
"var a={};a.B={};a.A={};");
}
// Tests providing additional code with overlapping var namespace.
public void testOverlappingAdditionalProvideAtEnd() {
additionalEndCode = "goog.provide('a.B'); a.B = {};";
test("goog.provide('a.A'); a.A = {};",
"var a={};a.A={};a.B={};");
}
// Tests providing additional code with overlapping dotted namespace.
public void testOverlappingDottedAdditionalProvide() {
additionalCode = "goog.provide('a.b.B'); a.b.B = {};";
test("goog.provide('a.b.C'); a.b.C = {};",
"var a={};a.b={};a.b.B={};a.b.C={};");
}
// Tests that a require of additional code generates no error.
public void testRequireOfAdditionalProvide() {
additionalCode = "goog.provide('b.B'); b.B = {};";
test("goog.require('b.B'); goog.provide('a.A'); a.A = {};",
"var b={};b.B={};var a={};a.A={};");
}
// Tests that a require not in additional code generates (only) one error.
public void testMissingRequireWithAdditionalProvide() {
additionalCode = "goog.provide('b.B'); b.B = {};";
test("goog.require('b.C'); goog.provide('a.A'); a.A = {};",
"var b={};b.B={};var a={};a.A={};",
MISSING_PROVIDE_ERROR);
}
// Tests that a require in additional code generates no error.
public void testLateRequire() {
additionalEndCode = "goog.require('a.A');";
test("goog.provide('a.A'); a.A = {};",
"var a={};a.A={};");
}
// Tests a case where code is reordered after processing provides and then
// provides are processed again.
public void testReorderedProvides() {
additionalCode = "a.B = {};"; // as if a.B was after a.A originally
addAdditionalNamespace = true;
test("goog.provide('a.A'); a.A = {};",
"var a={};a.B={};a.A={};");
}
// Another version of above.
public void testReorderedProvides2() {
additionalEndCode = "a.B = {};";
addAdditionalNamespace = true;
test("goog.provide('a.A'); a.A = {};",
"var a={};a.A={};a.B={};");
}
// Provide a name before the definition of the class providing the
// parent namespace.
public void testProvideOrder1() {
additionalEndCode = "";
addAdditionalNamespace = false;
// TODO(johnlenz): This test confirms that the constructor (a.b) isn't
// improperly removed, but this result isn't really what we want as the
// reassign of a.b removes the definition of "a.b.c".
test("goog.provide('a.b');" +
"goog.provide('a.b.c');" +
"a.b.c;" +
"a.b = function(x,y) {};",
"var a = {};" +
"a.b = {};" +
"a.b.c = {};" +
"a.b.c;" +
"a.b = function(x,y) {};");
}
// Provide a name after the definition of the class providing the
// parent namespace.
public void testProvideOrder2() {
additionalEndCode = "";
addAdditionalNamespace = false;
// TODO(johnlenz): This test confirms that the constructor (a.b) isn't
// improperly removed, but this result isn't really what we want as
// namespace placeholders for a.b and a.b.c remain.
test("goog.provide('a.b');" +
"goog.provide('a.b.c');" +
"a.b = function(x,y) {};" +
"a.b.c;",
"var a = {};" +
"a.b = {};" +
"a.b.c = {};" +
"a.b = function(x,y) {};" +
"a.b.c;");
}
// Provide a name after the definition of the class providing the
// parent namespace.
public void testProvideOrder3a() {
test("goog.provide('a.b');" +
"a.b = function(x,y) {};" +
"goog.provide('a.b.c');" +
"a.b.c;",
"var a = {};" +
"a.b = function(x,y) {};" +
"a.b.c = {};" +
"a.b.c;");
}
public void testProvideOrder3b() {
additionalEndCode = "";
addAdditionalNamespace = false;
// This tests a cleanly provided name, below a function namespace.
test("goog.provide('a.b');" +
"a.b = function(x,y) {};" +
"goog.provide('a.b.c');" +
"a.b.c;",
"var a = {};" +
"a.b = function(x,y) {};" +
"a.b.c = {};" +
"a.b.c;");
}
public void testProvideOrder4a() {
test("goog.provide('goog.a');" +
"goog.provide('goog.a.b');" +
"if (x) {" +
" goog.a.b = 1;" +
"} else {" +
" goog.a.b = 2;" +
"}",
"goog.a={};" +
"if(x)" +
" goog.a.b=1;" +
"else" +
" goog.a.b=2;");
}
public void testProvideOrder4b() {
additionalEndCode = "";
addAdditionalNamespace = false;
// This tests a cleanly provided name, below a namespace.
test("goog.provide('goog.a');" +
"goog.provide('goog.a.b');" +
"if (x) {" +
" goog.a.b = 1;" +
"} else {" +
" goog.a.b = 2;" +
"}",
"goog.a={};" +
"if(x)" +
" goog.a.b=1;" +
"else" +
" goog.a.b=2;");
}
public void testInvalidProvide() {
test("goog.provide('a.class');", null, INVALID_PROVIDE_ERROR);
}
private static final String METHOD_FORMAT =
"function Foo() {} Foo.prototype.method = function() { %s };";
private static final String FOO_INHERITS =
"goog.inherits(Foo, BaseFoo);";
public void testInvalidGoogBase1() {
test("goog.base(this, 'method');", null, GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase2() {
test("function Foo() {}" +
"Foo.method = function() {" +
" goog.base(this, 'method');" +
"};", null, GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase3() {
test(String.format(METHOD_FORMAT, "goog.base();"),
null, GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase4() {
test(String.format(METHOD_FORMAT, "goog.base(this, 'bar');"),
null, GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase5() {
test(String.format(METHOD_FORMAT, "goog.base('foo', 'method');"),
null, GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase6() {
test(String.format(METHOD_FORMAT, "goog.base.call(null, this, 'method');"),
null, GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase7() {
test("function Foo() { goog.base(this); }",
null, GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase8() {
test("var Foo = function() { goog.base(this); }",
null, GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase9() {
test("var goog = {}; goog.Foo = function() { goog.base(this); }",
null, GOOG_BASE_CLASS_ERROR);
}
public void testValidGoogBase1() {
test(String.format(METHOD_FORMAT, "goog.base(this, 'method');"),
String.format(METHOD_FORMAT, "Foo.superClass_.method.call(this)"));
}
public void testValidGoogBase2() {
test(String.format(METHOD_FORMAT, "goog.base(this, 'method', 1, 2);"),
String.format(METHOD_FORMAT,
"Foo.superClass_.method.call(this, 1, 2)"));
}
public void testValidGoogBase3() {
test(String.format(METHOD_FORMAT, "return goog.base(this, 'method');"),
String.format(METHOD_FORMAT,
"return Foo.superClass_.method.call(this)"));
}
public void testValidGoogBase4() {
test("function Foo() { goog.base(this, 1, 2); }" + FOO_INHERITS,
"function Foo() { BaseFoo.call(this, 1, 2); } " + FOO_INHERITS);
}
public void testValidGoogBase5() {
test("var Foo = function() { goog.base(this, 1); };" + FOO_INHERITS,
"var Foo = function() { BaseFoo.call(this, 1); }; " + FOO_INHERITS);
}
public void testValidGoogBase6() {
test("var goog = {}; goog.Foo = function() { goog.base(this); }; " +
"goog.inherits(goog.Foo, goog.BaseFoo);",
"var goog = {}; goog.Foo = function() { goog.BaseFoo.call(this); }; " +
"goog.inherits(goog.Foo, goog.BaseFoo);");
}
public void testBanGoogBase() {
banGoogBase = true;
testSame("function Foo() { goog.base(this, 1, 2); }" + FOO_INHERITS,
ProcessClosurePrimitives.USE_OF_GOOG_BASE, true);
}
public void testInvalidBase1() {
test(
"var Foo = function() {};" + FOO_INHERITS +
"Foo.base(this, 'method');", null, BASE_CLASS_ERROR);
}
public void testInvalidBase2() {
test("function Foo() {}" + FOO_INHERITS +
"Foo.method = function() {" +
" Foo.base(this, 'method');" +
"};", null, BASE_CLASS_ERROR);
}
public void testInvalidBase3() {
test(String.format(FOO_INHERITS + METHOD_FORMAT, "Foo.base();"),
null, BASE_CLASS_ERROR);
}
public void testInvalidBase4() {
test(String.format(FOO_INHERITS + METHOD_FORMAT, "Foo.base(this, 'bar');"),
null, BASE_CLASS_ERROR);
}
public void testInvalidBase5() {
test(String.format(FOO_INHERITS + METHOD_FORMAT,
"Foo.base('foo', 'method');"),
null, BASE_CLASS_ERROR);
}
public void testInvalidBase7() {
test("function Foo() { Foo.base(this); };" + FOO_INHERITS,
null, BASE_CLASS_ERROR);
}
public void testInvalidBase8() {
test("var Foo = function() { Foo.base(this); };" + FOO_INHERITS,
null, BASE_CLASS_ERROR);
}
public void testInvalidBase9() {
test("var goog = {}; goog.Foo = function() { goog.Foo.base(this); };"
+ FOO_INHERITS,
null, BASE_CLASS_ERROR);
}
public void testInvalidBase10() {
test("function Foo() { Foo.base(this); }" + FOO_INHERITS,
null, BASE_CLASS_ERROR);
}
public void testInvalidBase11() {
test("function Foo() { Foo.base(this, 'method'); }" + FOO_INHERITS,
null, BASE_CLASS_ERROR);
}
public void testInvalidBase12() {
test("function Foo() { Foo.base(this, 1, 2); }" + FOO_INHERITS,
null, BASE_CLASS_ERROR);
}
public void testInvalidBase13() {
test(
"function Bar(){ Bar.base(this, 'constructor'); }" +
"goog.inherits(Bar, Goo);" +
"function Foo(){ Bar.base(this, 'constructor'); }" + FOO_INHERITS,
null, BASE_CLASS_ERROR);
}
public void testValidBase1() {
test(FOO_INHERITS
+ String.format(METHOD_FORMAT, "Foo.base(this, 'method');"),
FOO_INHERITS
+ String.format(METHOD_FORMAT, "Foo.superClass_.method.call(this)"));
}
public void testValidBase2() {
test(FOO_INHERITS
+ String.format(METHOD_FORMAT, "Foo.base(this, 'method', 1, 2);"),
FOO_INHERITS
+ String.format(METHOD_FORMAT,
"Foo.superClass_.method.call(this, 1, 2)"));
}
public void testValidBase3() {
test(FOO_INHERITS
+ String.format(METHOD_FORMAT, "return Foo.base(this, 'method');"),
FOO_INHERITS
+ String.format(METHOD_FORMAT,
"return Foo.superClass_.method.call(this)"));
}
public void testValidBase4() {
test("function Foo() { Foo.base(this, 'constructor', 1, 2); }"
+ FOO_INHERITS,
"function Foo() { BaseFoo.call(this, 1, 2); } " + FOO_INHERITS);
}
public void testValidBase5() {
test("var Foo = function() { Foo.base(this, 'constructor', 1); };"
+ FOO_INHERITS,
"var Foo = function() { BaseFoo.call(this, 1); }; " + FOO_INHERITS);
}
public void testValidBase6() {
test("var goog = {}; goog.Foo = function() {" +
"goog.Foo.base(this, 'constructor'); }; " +
"goog.inherits(goog.Foo, goog.BaseFoo);",
"var goog = {}; goog.Foo = function() { goog.BaseFoo.call(this); }; " +
"goog.inherits(goog.Foo, goog.BaseFoo);");
}
public void testValidBase7() {
// No goog.inherits, so this is probably a different 'base' function.
testSame(""
+ "var a = function() {"
+ " a.base(this, 'constructor');"
+ "};");
}
public void testImplicitAndExplicitProvide() {
test("var goog = {}; " +
"goog.provide('goog.foo.bar'); goog.provide('goog.foo');",
"var goog = {}; goog.foo = {}; goog.foo.bar = {};");
}
public void testImplicitProvideInIndependentModules() {
test(
createModuleStar(
"",
"goog.provide('apps.A');",
"goog.provide('apps.B');"),
new String[] {
"var apps = {};",
"apps.A = {};",
"apps.B = {};",
});
}
public void testImplicitProvideInIndependentModules2() {
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo.A');",
"goog.provide('apps.foo.B');"),
new String[] {
"var apps = {}; apps.foo = {};",
"apps.foo.A = {};",
"apps.foo.B = {};",
});
}
public void testImplicitProvideInIndependentModules3() {
test(
createModuleStar(
"var goog = {};",
"goog.provide('goog.foo.A');",
"goog.provide('goog.foo.B');"),
new String[] {
"var goog = {}; goog.foo = {};",
"goog.foo.A = {};",
"goog.foo.B = {};",
});
}
public void testProvideInIndependentModules1() {
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo');",
"goog.provide('apps.foo.B');"),
new String[] {
"var apps = {}; apps.foo = {};",
"",
"apps.foo.B = {};",
});
}
public void testProvideInIndependentModules2() {
// TODO(nicksantos): Make this an error.
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo'); apps.foo = {};",
"goog.provide('apps.foo.B');"),
new String[] {
"var apps = {};",
"apps.foo = {};",
"apps.foo.B = {};",
});
}
public void testProvideInIndependentModules2b() {
// TODO(nicksantos): Make this an error.
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo'); apps.foo = function() {};",
"goog.provide('apps.foo.B');"),
new String[] {
"var apps = {};",
"apps.foo = function() {};",
"apps.foo.B = {};",
});
}
public void testProvideInIndependentModules3() {
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo.B');",
"goog.provide('apps.foo'); goog.require('apps.foo');"),
new String[] {
"var apps = {}; apps.foo = {};",
"apps.foo.B = {};",
"",
});
}
public void testProvideInIndependentModules3b() {
// TODO(nicksantos): Make this an error.
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo.B');",
"goog.provide('apps.foo'); apps.foo = function() {}; " +
"goog.require('apps.foo');"),
new String[] {
"var apps = {};",
"apps.foo.B = {};",
"apps.foo = function() {};",
});
}
public void testProvideInIndependentModules4() {
// Regression test for bug 261:
// http://code.google.com/p/closure-compiler/issues/detail?id=261
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo.bar.B');",
"goog.provide('apps.foo.bar.C');"),
new String[] {
"var apps = {};apps.foo = {};apps.foo.bar = {}",
"apps.foo.bar.B = {};",
"apps.foo.bar.C = {};",
});
}
public void testRequireOfBaseGoog() {
test("goog.require('goog');",
"", MISSING_PROVIDE_ERROR);
}
public void testSourcePositionPreservation() {
test("goog.provide('foo.bar.baz');",
"var foo = {};" +
"foo.bar = {};" +
"foo.bar.baz = {};");
Node root = getLastCompiler().getRoot();
Node fooDecl = findQualifiedNameNode("foo", root);
Node fooBarDecl = findQualifiedNameNode("foo.bar", root);
Node fooBarBazDecl = findQualifiedNameNode("foo.bar.baz", root);
assertEquals(1, fooDecl.getLineno());
assertEquals(14, fooDecl.getCharno());
assertEquals(1, fooBarDecl.getLineno());
assertEquals(18, fooBarDecl.getCharno());
assertEquals(1, fooBarBazDecl.getLineno());
assertEquals(22, fooBarBazDecl.getCharno());
}
public void testNoStubForProvidedTypedef() {
test("goog.provide('x'); /** @typedef {number} */ var x;", "var x;");
}
public void testNoStubForProvidedTypedef2() {
test("goog.provide('x.y'); /** @typedef {number} */ x.y;",
"var x = {}; x.y;");
}
public void testNoStubForProvidedTypedef4() {
test("goog.provide('x.y.z'); /** @typedef {number} */ x.y.z;",
"var x = {}; x.y = {}; x.y.z;");
}
public void testProvideRequireSameFile() {
test("goog.provide('x');\ngoog.require('x');", "var x = {};");
}
public void testDefineCases() {
String jsdoc = "/** @define {number} */\n";
test(jsdoc + "goog.define('name', 1);", jsdoc + "var name = 1");
test(jsdoc + "goog.define('ns.name', 1);", jsdoc + "ns.name = 1");
}
public void testDefineErrorCases() {
String jsdoc = "/** @define {number} */\n";
test("goog.define('name', 1);", "", MISSING_DEFINE_ANNOTATION);
test(jsdoc + "goog.define('name.2', 1);", "", INVALID_DEFINE_NAME_ERROR);
test(jsdoc + "goog.define();", "", NULL_ARGUMENT_ERROR);
test(jsdoc + "goog.define('value');", "", NULL_ARGUMENT_ERROR);
test(jsdoc + "goog.define(5);", "", INVALID_ARGUMENT_ERROR);
}
public void testDefineValues() {
testSame("var CLOSURE_DEFINES = {'FOO': 'string'};");
testSame("var CLOSURE_DEFINES = {'FOO': true};");
testSame("var CLOSURE_DEFINES = {'FOO': false};");
testSame("var CLOSURE_DEFINES = {'FOO': 1};");
testSame("var CLOSURE_DEFINES = {'FOO': 0xABCD};");
testSame("var CLOSURE_DEFINESS = {'FOO': -1};");
}
public void testDefineValuesErrors() {
testSame("var CLOSURE_DEFINES = {'FOO': a};",
CLOSURE_DEFINES_ERROR, true);
testSame("var CLOSURE_DEFINES = {'FOO': 0+1};",
CLOSURE_DEFINES_ERROR, true);
testSame("var CLOSURE_DEFINES = {'FOO': 'value' + 'value'};",
CLOSURE_DEFINES_ERROR, true);
testSame("var CLOSURE_DEFINES = {'FOO': !true};",
CLOSURE_DEFINES_ERROR, true);
testSame("var CLOSURE_DEFINES = {'FOO': -true};",
CLOSURE_DEFINES_ERROR, true);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package test.poa;
import static org.junit.Assert.assertTrue;
import java.util.Properties;
import org.omg.CORBA.*;
import org.omg.PortableServer.*;
import org.omg.PortableServer.POAPackage.*;
import org.omg.PortableServer.POAManagerPackage.*;
public final class TestDestroy extends test.common.TestBase {
final static class Test_impl2 extends TestPOA {
private POA poa_;
private boolean called_ = false;
private boolean finished_ = false;
Test_impl2(POA poa) {
poa_ = poa;
}
synchronized public void aMethod() {
called_ = true;
notify();
try {
wait(1000);
} catch (InterruptedException ex) {
}
finished_ = true;
}
public POA _default_POA() {
return poa_;
}
synchronized void blockUntilCalled() {
while (!called_) {
try {
wait();
} catch (InterruptedException ex) {
}
}
}
synchronized boolean callComplete() {
return finished_;
}
}
final static class LongCaller extends Thread {
private Test t_;
LongCaller(Test t) {
t_ = t;
}
public void run() {
try {
t_.aMethod();
} catch (SystemException se) {
System.err.println(se.getMessage());
se.printStackTrace();
}
}
}
//
// This is a more rigorous test of POA::destroy. We want to ensure
// that the POA isn't destroyed during a method call.
//
static void TestDestroyThreaded(ORB orb, POA root) {
POAManager rootMgr = root.the_POAManager();
assertTrue(rootMgr != null);
Policy[] policies = new Policy[1];
policies[0] = root
.create_implicit_activation_policy(org.omg.PortableServer.ImplicitActivationPolicyValue.IMPLICIT_ACTIVATION);
//
// Create child POA
//
POA poa = null;
try {
poa = root.create_POA("poa1", rootMgr, policies);
} catch (InvalidPolicy ex) {
throw new RuntimeException();
} catch (AdapterAlreadyExists ex) {
throw new RuntimeException();
}
Test_impl2 impl = new Test_impl2(poa);
Test t = impl._this(orb);
Thread thr = new LongCaller(t);
thr.start();
impl.blockUntilCalled();
//
// Test: Destroy the POA while a method call is active
//
poa.destroy(true, true);
//
// The destroy call shouldn't return until the aMethod call is
// complete
//
assertTrue(impl.callComplete());
while (thr.isAlive()) {
try {
thr.join();
} catch (InterruptedException ex) {
}
}
}
static void TestDestroyBlocking(ORB orb, POA root) {
org.omg.CORBA.Object obj;
Policy[] policies = new Policy[0];
POA poa, parent, poa2, poa3;
POAManager mgr;
String str;
POAManager rootMgr = root.the_POAManager();
assertTrue(rootMgr != null);
//
// Create child POA
//
try {
poa = root.create_POA("poa1", rootMgr, policies);
} catch (InvalidPolicy ex) {
throw new RuntimeException();
} catch (AdapterAlreadyExists ex) {
throw new RuntimeException();
}
//
// Test: destroy
//
poa.destroy(true, true);
//
// Ensure parent no longer knows about child
//
try {
root.find_POA("poa1", false);
assertTrue(false); // find_POA should not have succeeded
} catch (AdapterNonExistent ex) {
// expected
}
//
// Create child POA
//
try {
poa = root.create_POA("poa1", rootMgr, policies);
} catch (AdapterAlreadyExists ex) {
throw new RuntimeException();
} catch (InvalidPolicy ex) {
throw new RuntimeException();
}
//
// Create child of child POA
//
try {
poa2 = poa.create_POA("child1", rootMgr, policies);
} catch (AdapterAlreadyExists ex) {
throw new RuntimeException();
} catch (InvalidPolicy ex) {
throw new RuntimeException();
}
//
// Test: destroy - should destroy poa1 and poa1/child1
//
poa.destroy(true, true);
//
// Ensure parent no longer knows about child
//
try {
root.find_POA("poa1", false);
assertTrue(false); // find_POA should not have succeeded
} catch (AdapterNonExistent ex) {
// expected
}
//
// XXX Test: etherealize w/ servant manager
//
}
public static void main(String[] args) {
java.util.Properties props = new Properties();
props.putAll(System.getProperties());
props.put("org.omg.CORBA.ORBClass", "org.apache.yoko.orb.CORBA.ORB");
props.put("org.omg.CORBA.ORBSingletonClass",
"org.apache.yoko.orb.CORBA.ORBSingleton");
int status = 0;
ORB orb = null;
try {
//
// Create ORB
//
orb = ORB.init(args, props);
POA root = TestUtil.GetRootPOA(orb);
POAManager rootMgr = root.the_POAManager();
assertTrue(rootMgr != null);
try {
rootMgr.activate();
} catch (AdapterInactive ex) {
throw new RuntimeException();
}
System.out.print("Testing POA::destroy... ");
System.out.flush();
TestDestroyBlocking(orb, root);
TestDestroyThreaded(orb, root);
System.out.println("Done!");
} catch (SystemException ex) {
ex.printStackTrace();
status = 1;
}
if (orb != null) {
try {
orb.destroy();
} catch (SystemException ex) {
ex.printStackTrace();
status = 1;
}
}
System.exit(status);
}
}
| |
package com.bt.pi.core.dht;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import java.io.File;
import org.apache.commons.io.FileUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import com.bt.pi.core.continuation.UpdateResolver;
import com.bt.pi.core.entity.Deletable;
import com.bt.pi.core.entity.PiEntity;
import com.bt.pi.core.entity.PiEntityBase;
import com.bt.pi.core.id.KoalaIdFactory;
import com.bt.pi.core.id.PId;
import com.bt.pi.core.node.KoalaNode;
public class DhtReaderWriterTest {
private static ClassPathXmlApplicationContext classPathXmlApplicationContext;
private static KoalaIdFactory koalaIdFactory;
private static DhtClientFactory dhtClientFactory;
private static KoalaNode koalaNode;
@BeforeClass
public static void beforeClass() throws Exception {
classPathXmlApplicationContext = new ClassPathXmlApplicationContext("applicationContext-p2p-core-integration.xml");
koalaNode = (KoalaNode) classPathXmlApplicationContext.getBean("koalaNode");
koalaIdFactory = (KoalaIdFactory) classPathXmlApplicationContext.getBean("koalaIdFactory");
dhtClientFactory = (DhtClientFactory) classPathXmlApplicationContext.getBean("dhtClientFactory");
koalaNode.start();
}
@AfterClass
public static void afterClass() throws Exception {
koalaNode.stop();
classPathXmlApplicationContext.destroy();
String nodeId = FileUtils.readFileToString(new File("var/run/nodeId.txt"));
FileUtils.deleteQuietly(new File("storage" + nodeId));
FileUtils.deleteQuietly(new File("var"));
}
@Test
public void testWriteIfAbsent() {
// setup
PiEntity piEntity = new MyDhtRWPiEntity();
PId id = koalaIdFactory.buildPId(piEntity.getUrl());
BlockingDhtWriter blockingWriter = dhtClientFactory.createBlockingWriter();
// act
blockingWriter.writeIfAbsent(id, piEntity);
// assert
assertEquals(piEntity, blockingWriter.getValueWritten());
}
@Test
public void testWriteIfAbsentTwice() {
// setup
PiEntity piEntity = new MyDhtRWPiEntity();
PId id = koalaIdFactory.buildPId(piEntity.getUrl());
// act
BlockingDhtWriter blockingWriter1 = dhtClientFactory.createBlockingWriter();
blockingWriter1.writeIfAbsent(id, piEntity);
// assert
assertEquals(piEntity, blockingWriter1.getValueWritten());
// act
BlockingDhtWriter blockingWriter2 = dhtClientFactory.createBlockingWriter();
blockingWriter2.writeIfAbsent(id, piEntity);
// assert
assertNull(blockingWriter2.getValueWritten());
}
@Test
public void testWriteIfAbsentTwiceOnDeletedDeletable() {
// setup
MyDeletablePiEntity piEntity = new MyDeletablePiEntity();
piEntity.setDeleted(true);
PId id = koalaIdFactory.buildPId(piEntity.getUrl());
// act
BlockingDhtWriter blockingWriter1 = dhtClientFactory.createBlockingWriter();
blockingWriter1.writeIfAbsent(id, piEntity);
// assert
assertEquals(piEntity, blockingWriter1.getValueWritten());
// act
BlockingDhtWriter blockingWriter2 = dhtClientFactory.createBlockingWriter();
piEntity.setDeleted(false);
blockingWriter2.writeIfAbsent(id, piEntity);
// assert
assertEquals(piEntity, blockingWriter2.getValueWritten());
}
@Test
public void shouldNotFailToUpdatePiEntityIfItExistsAndDeleted() {
// setup
// 1.
System.err.println("Create a new Hello entity");
final MyDeletablePiEntity piEntity0 = new MyDeletablePiEntity();
PId id0 = koalaIdFactory.buildPId(piEntity0.getUrl());
for (int i = 0; i < 20; i++) {
dhtClientFactory.createBlockingWriter().update(id0, null, new UpdateResolver<MyDeletablePiEntity>() {
@Override
public MyDeletablePiEntity update(MyDeletablePiEntity existingEntity, MyDeletablePiEntity requestedEntity) {
piEntity0.setMessage("Hello:" + System.currentTimeMillis());
return piEntity0;
}
});
}
// 2.
System.err.println("Deleting Hello entity");
dhtClientFactory.createBlockingWriter().update(id0, null, new UpdateResolver<MyDeletablePiEntity>() {
@Override
public MyDeletablePiEntity update(MyDeletablePiEntity existingEntity, MyDeletablePiEntity requestedEntity) {
existingEntity.setMessage("Hello Deleted");
existingEntity.setDeleted(true);
return existingEntity;
}
});
// 3.
System.err.println("Create another new Hello entity");
// act
MyDeletablePiEntity piEntity2 = new MyDeletablePiEntity();
piEntity2.setDeleted(false);
piEntity2.setMessage("World");
BlockingDhtWriter blockingWriter2 = dhtClientFactory.createBlockingWriter();
blockingWriter2.writeIfAbsent(id0, piEntity2);
// assert
assertEquals(piEntity2, blockingWriter2.getValueWritten());
}
@Test
public void testWriteIfAbsentTwiceOnNotDeletedDeletable() {
// setup
MyDeletablePiEntity piEntity = new MyDeletablePiEntity();
piEntity.setDeleted(false);
PId id = koalaIdFactory.buildPId(piEntity.getUrl());
// act
BlockingDhtWriter blockingWriter1 = dhtClientFactory.createBlockingWriter();
blockingWriter1.writeIfAbsent(id, piEntity);
// assert
assertEquals(piEntity, blockingWriter1.getValueWritten());
// act
BlockingDhtWriter blockingWriter2 = dhtClientFactory.createBlockingWriter();
blockingWriter2.writeIfAbsent(id, piEntity);
// assert
assertNull(blockingWriter2.getValueWritten());
}
public static class MyDhtRWPiEntity extends PiEntityBase {
@Override
public String getType() {
return getClass().getSimpleName();
}
@Override
public String getUrl() {
return getClass().getSimpleName() + ":" + System.currentTimeMillis();
}
@Override
public String getUriScheme() {
return "mdrwpe";
}
}
public static class MyDeletablePiEntity extends PiEntityBase implements Deletable {
private boolean deleted = false;
private String url;
private String message;
public MyDeletablePiEntity() {
this.url = getClass().getSimpleName() + ":" + System.currentTimeMillis();
}
public String getMessage() {
return message;
}
public void setMessage(String aMessage) {
message = aMessage;
}
@Override
public String getType() {
return getClass().getSimpleName();
}
@Override
public String getUrl() {
return url;
}
@Override
public boolean isDeleted() {
return deleted;
}
@Override
public void setDeleted(boolean b) {
this.deleted = b;
}
@Override
public boolean equals(Object obj) {
MyDeletablePiEntity other = (MyDeletablePiEntity) obj;
return other.url.equals(url) && (other.deleted == deleted);
}
@Override
public String toString() {
return "MyDeletablePiEntity [deleted=" + deleted + ", message=" + message + ", url=" + url + ", version=" + this.getVersion() + "]";
}
@Override
public String getUriScheme() {
return "mdpe";
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.testclient;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.pulsar.broker.TimeAverageBrokerData;
import org.apache.pulsar.broker.loadbalance.impl.ModularLoadManagerImpl;
import org.apache.pulsar.policies.data.loadbalancer.LoadReport;
import org.apache.pulsar.policies.data.loadbalancer.LocalBrokerData;
import org.apache.pulsar.policies.data.loadbalancer.ResourceUsage;
import org.apache.pulsar.policies.data.loadbalancer.SystemResourceUsage;
import org.apache.pulsar.testclient.utils.FixedColumnLengthTableMaker;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooKeeper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import com.google.gson.Gson;
/**
* Monitors brokers and prints to the console information about their system resource usages, their topic and bundle
* counts, their message rates, and other metrics.
*/
public class BrokerMonitor {
private static final Logger log = LoggerFactory.getLogger(BrokerMonitor.class);
private static final String BROKER_ROOT = "/loadbalance/brokers";
private static final int ZOOKEEPER_TIMEOUT_MILLIS = 30000;
private static final int GLOBAL_STATS_PRINT_PERIOD_MILLIS = 60000;
private final ZooKeeper zkClient;
private static final Gson gson = new Gson();
// Fields common for message rows.
private static final List<Object> MESSAGE_FIELDS = Arrays.asList("MSG/S IN", "MSG/S OUT", "TOTAL", "KB/S IN",
"KB/S OUT", "TOTAL");
// Fields common for system rows.
private static final List<Object> SYSTEM_FIELDS = Arrays.asList("CPU %", "MEMORY %", "DIRECT %", "BW IN %",
"BW OUT %", "MAX %");
private static final Object[] SYSTEM_ROW = makeSystemRow("SYSTEM");
private static final Object[] COUNT_ROW = { "COUNT", "TOPIC", "BUNDLE", "PRODUCER", "CONSUMER", "BUNDLE +",
"BUNDLE -" };
private static final Object[] LATEST_ROW = makeMessageRow("LATEST");
private static final Object[] SHORT_ROW = makeMessageRow("SHORT");
private static final Object[] LONG_ROW = makeMessageRow("LONG");
private static final Object[] RAW_SYSTEM_ROW = makeSystemRow("RAW SYSTEM");
private static final Object[] ALLOC_SYSTEM_ROW = makeSystemRow("ALLOC SYSTEM");
private static final Object[] RAW_MESSAGE_ROW = makeMessageRow("RAW MSG");
private static final Object[] ALLOC_MESSAGE_ROW = makeMessageRow("ALLOC MSG");
private static final Object[] GLOBAL_HEADER = { "BROKER", "BUNDLE", "MSG/S", "LONG/S", "KB/S", "MAX %" };
private final Map<String, Object> loadData;
private static final FixedColumnLengthTableMaker localTableMaker = new FixedColumnLengthTableMaker();
static {
// Makes the table length about 120.
localTableMaker.elementLength = 14;
localTableMaker.decimalFormatter = "%.2f";
}
private static final FixedColumnLengthTableMaker globalTableMaker = new FixedColumnLengthTableMaker();
static {
globalTableMaker.decimalFormatter = "%.2f";
globalTableMaker.topBorder = '*';
globalTableMaker.bottomBorder = '*';
// Make broker column substantially longer than other columns.
globalTableMaker.lengthFunction = column -> column == 0 ? 60 : 12;
}
// Take advantage of repeated labels in message rows.
private static Object[] makeMessageRow(final String firstElement) {
final List<Object> result = new ArrayList<>();
result.add(firstElement);
result.addAll(MESSAGE_FIELDS);
return result.toArray();
}
// Take advantage of repeated labels in system rows.
private static Object[] makeSystemRow(final String firstElement) {
final List<Object> result = new ArrayList<>();
result.add(firstElement);
result.addAll(SYSTEM_FIELDS);
return result.toArray();
}
// Helper method to initialize rows.
private static void initRow(final Object[] row, final Object... elements) {
System.arraycopy(elements, 0, row, 1, elements.length);
}
// Helper method to initialize rows which hold message data.
private static void initMessageRow(final Object[] row, final double messageRateIn, final double messageRateOut,
final double messageThroughputIn, final double messageThroughputOut) {
initRow(row, messageRateIn, messageRateOut, messageRateIn + messageRateOut, messageThroughputIn / 1024,
messageThroughputOut / 1024, (messageThroughputIn + messageThroughputOut) / 1024);
}
// Prints out the global load data.
private void printGlobalData() {
synchronized (loadData) {
// 1 header row, 1 total row, and loadData.size() rows for brokers.
Object[][] rows = new Object[loadData.size() + 2][];
rows[0] = GLOBAL_HEADER;
int totalBundles = 0;
double totalThroughput = 0;
double totalMessageRate = 0;
double totalLongTermMessageRate = 0;
double maxMaxUsage = 0;
int i = 1;
for (final Map.Entry<String, Object> entry : loadData.entrySet()) {
final String broker = entry.getKey();
final Object data = entry.getValue();
rows[i] = new Object[GLOBAL_HEADER.length];
rows[i][0] = broker;
int numBundles;
double messageRate;
double longTermMessageRate;
double messageThroughput;
double maxUsage;
if (data instanceof LoadReport) {
final LoadReport loadReport = (LoadReport) data;
numBundles = (int) loadReport.getNumBundles();
messageRate = loadReport.getMsgRateIn() + loadReport.getMsgRateOut();
longTermMessageRate = loadReport.getAllocatedMsgRateIn() + loadReport.getAllocatedMsgRateOut();
messageThroughput = (loadReport.getAllocatedBandwidthIn() + loadReport.getAllocatedBandwidthOut())
/ 1024;
final SystemResourceUsage systemResourceUsage = loadReport.getSystemResourceUsage();
maxUsage = Math.max(
Math.max(
Math.max(systemResourceUsage.getCpu().percentUsage(),
systemResourceUsage.getMemory().percentUsage()),
Math.max(systemResourceUsage.getDirectMemory().percentUsage(),
systemResourceUsage.getBandwidthIn().percentUsage())),
systemResourceUsage.getBandwidthOut().percentUsage());
} else if (data instanceof LocalBrokerData) {
final LocalBrokerData localData = (LocalBrokerData) data;
numBundles = localData.getNumBundles();
messageRate = localData.getMsgRateIn() + localData.getMsgRateOut();
final String timeAveragePath = ModularLoadManagerImpl.TIME_AVERAGE_BROKER_ZPATH + "/" + broker;
try {
final TimeAverageBrokerData timeAverageData = gson.fromJson(
new String(zkClient.getData(timeAveragePath, false, null)), TimeAverageBrokerData.class);
longTermMessageRate = timeAverageData.getLongTermMsgRateIn() + timeAverageData.getLongTermMsgRateOut();
} catch ( Exception x ) {
throw new RuntimeException(x);
}
messageThroughput = (localData.getMsgThroughputIn() + localData.getMsgThroughputOut()) / 1024;
maxUsage = localData.getMaxResourceUsage();
} else {
throw new AssertionError("Unreachable code");
}
rows[i][1] = numBundles;
rows[i][2] = messageRate;
rows[i][3] = messageThroughput;
rows[i][4] = longTermMessageRate;
rows[i][5] = maxUsage;
totalBundles += numBundles;
totalMessageRate += messageRate;
totalLongTermMessageRate += longTermMessageRate;
totalThroughput += messageThroughput;
maxMaxUsage = Math.max(maxUsage, maxMaxUsage);
++i;
}
final int finalRow = loadData.size() + 1;
rows[finalRow] = new Object[GLOBAL_HEADER.length];
rows[finalRow][0] = "TOTAL";
rows[finalRow][1] = totalBundles;
rows[finalRow][2] = totalMessageRate;
rows[finalRow][3] = totalLongTermMessageRate;
rows[finalRow][4] = totalThroughput;
rows[finalRow][5] = maxMaxUsage;
final String table = globalTableMaker.make(rows);
log.info("Overall Broker Data:\n{}", table);
}
}
// This watcher initializes data watchers whenever a new broker is found.
private class BrokerWatcher implements Watcher {
private final ZooKeeper zkClient;
private Set<String> brokers;
private BrokerWatcher(final ZooKeeper zkClient) {
this.zkClient = zkClient;
this.brokers = Collections.emptySet();
}
/**
* Creates a watch for a newly acquired broker so that its data is printed whenever it is updated.
*
* @param event
* The watched event.
*/
public synchronized void process(final WatchedEvent event) {
try {
if (event.getType() == Event.EventType.NodeChildrenChanged) {
updateBrokers(event.getPath());
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
// Inform the user of any broker gains and losses and put watches on newly acquired brokers.
private synchronized void updateBrokers(final String path) {
final Set<String> newBrokers = new HashSet<>();
try {
newBrokers.addAll(zkClient.getChildren(path, this));
} catch (Exception ex) {
throw new RuntimeException(ex);
}
for (String oldBroker : brokers) {
if (!newBrokers.contains(oldBroker)) {
log.info("Lost broker: " + oldBroker);
synchronized (loadData) {
// Stop including lost broker in global stats.
loadData.remove(oldBroker);
}
}
}
for (String newBroker : newBrokers) {
if (!brokers.contains(newBroker)) {
log.info("Gained broker: " + newBroker);
final BrokerDataWatcher brokerDataWatcher = new BrokerDataWatcher(zkClient);
brokerDataWatcher.printData(path + "/" + newBroker);
}
}
this.brokers = newBrokers;
}
}
// This watcher prints tabular data for a broker after its ZNode is updated.
private class BrokerDataWatcher implements Watcher {
private final ZooKeeper zkClient;
private BrokerDataWatcher(final ZooKeeper zkClient) {
this.zkClient = zkClient;
}
// Given the path to a broker ZNode, return the broker host name.
private String brokerNameFromPath(final String path) {
return path.substring(path.lastIndexOf('/') + 1);
}
/**
* Print the local and historical broker data in a tabular format, and put this back as a watcher.
*
* @param event
* The watched event.
*/
public synchronized void process(final WatchedEvent event) {
try {
if (event.getType() == Event.EventType.NodeDataChanged) {
printData(event.getPath());
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private double percentUsage(final double usage, final double limit) {
return limit > 0 && usage >= 0 ? 100 * Math.min(1, usage / limit) : 0;
}
// Decide whether this broker is running SimpleLoadManagerImpl or ModularLoadManagerImpl and then print the data
// accordingly.
private synchronized void printData(final String path) {
final String broker = brokerNameFromPath(path);
String jsonString;
try {
jsonString = new String(zkClient.getData(path, this, null));
} catch (Exception ex) {
throw new RuntimeException(ex);
}
// Use presence of the String "allocated" to determine if this is using SimpleLoadManagerImpl.
if (jsonString.contains("allocated")) {
printLoadReport(broker, gson.fromJson(jsonString, LoadReport.class));
} else {
final LocalBrokerData localBrokerData = gson.fromJson(jsonString, LocalBrokerData.class);
final String timeAveragePath = ModularLoadManagerImpl.TIME_AVERAGE_BROKER_ZPATH + "/" + broker;
try {
final TimeAverageBrokerData timeAverageData = gson.fromJson(
new String(zkClient.getData(timeAveragePath, false, null)), TimeAverageBrokerData.class);
printBrokerData(broker, localBrokerData, timeAverageData);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
// Print the load report in a tabular form for a broker running SimpleLoadManagerImpl.
private synchronized void printLoadReport(final String broker, final LoadReport loadReport) {
loadData.put(broker, loadReport);
// Initialize the constant rows.
final Object[][] rows = new Object[10][];
rows[0] = COUNT_ROW;
rows[2] = RAW_SYSTEM_ROW;
rows[4] = ALLOC_SYSTEM_ROW;
rows[6] = RAW_MESSAGE_ROW;
rows[8] = ALLOC_MESSAGE_ROW;
// First column is a label, so start at the second column at index 1.
// Client count row.
rows[1] = new Object[COUNT_ROW.length];
initRow(rows[1], loadReport.getNumTopics(), loadReport.getNumBundles(), loadReport.getNumProducers(),
loadReport.getNumConsumers(), loadReport.getBundleGains().size(),
loadReport.getBundleLosses().size());
// Raw system row.
final SystemResourceUsage systemResourceUsage = loadReport.getSystemResourceUsage();
final ResourceUsage cpu = systemResourceUsage.getCpu();
final ResourceUsage memory = systemResourceUsage.getMemory();
final ResourceUsage directMemory = systemResourceUsage.getDirectMemory();
final ResourceUsage bandwidthIn = systemResourceUsage.getBandwidthIn();
final ResourceUsage bandwidthOut = systemResourceUsage.getBandwidthOut();
final double maxUsage = Math.max(
Math.max(Math.max(cpu.percentUsage(), memory.percentUsage()),
Math.max(directMemory.percentUsage(), bandwidthIn.percentUsage())),
bandwidthOut.percentUsage());
rows[3] = new Object[RAW_SYSTEM_ROW.length];
initRow(rows[3], cpu.percentUsage(), memory.percentUsage(), directMemory.percentUsage(),
bandwidthIn.percentUsage(), bandwidthOut.percentUsage(), maxUsage);
// Allocated system row.
rows[5] = new Object[ALLOC_SYSTEM_ROW.length];
final double allocatedCpuUsage = percentUsage(loadReport.getAllocatedCPU(), cpu.limit);
final double allocatedMemoryUsage = percentUsage(loadReport.getAllocatedMemory(), memory.limit);
final double allocatedBandwidthInUsage = percentUsage(loadReport.getAllocatedBandwidthIn(),
bandwidthIn.limit);
final double allocatedBandwidthOutUsage = percentUsage(loadReport.getAllocatedBandwidthOut(),
bandwidthOut.limit);
final double maxAllocatedUsage = Math.max(
Math.max(Math.max(allocatedCpuUsage, allocatedMemoryUsage), allocatedBandwidthInUsage),
allocatedBandwidthOutUsage);
initRow(rows[5], allocatedCpuUsage, allocatedMemoryUsage, null, allocatedBandwidthInUsage,
allocatedBandwidthOutUsage, maxAllocatedUsage);
// Raw message row.
rows[7] = new Object[RAW_MESSAGE_ROW.length];
initMessageRow(rows[7], loadReport.getMsgRateIn(), loadReport.getMsgRateOut(), bandwidthIn.usage,
bandwidthOut.usage);
// Allocated message row.
rows[9] = new Object[ALLOC_MESSAGE_ROW.length];
initMessageRow(rows[9], loadReport.getAllocatedMsgRateIn(), loadReport.getAllocatedMsgRateOut(),
loadReport.getAllocatedBandwidthIn(), loadReport.getAllocatedBandwidthOut());
final String table = localTableMaker.make(rows);
log.info("\nLoad Report for {}:\n{}\n", broker, table);
}
// Print the broker data in a tabular form for a broker using ModularLoadManagerImpl.
private synchronized void printBrokerData(final String broker, final LocalBrokerData localBrokerData,
final TimeAverageBrokerData timeAverageData) {
loadData.put(broker, localBrokerData);
// Initialize the constant rows.
final Object[][] rows = new Object[10][];
rows[0] = SYSTEM_ROW;
rows[2] = COUNT_ROW;
rows[4] = LATEST_ROW;
rows[6] = SHORT_ROW;
rows[8] = LONG_ROW;
// First column is a label, so start at the second column at index 1.
// System row.
rows[1] = new Object[SYSTEM_ROW.length];
initRow(rows[1], localBrokerData.getCpu().percentUsage(), localBrokerData.getMemory().percentUsage(),
localBrokerData.getDirectMemory().percentUsage(), localBrokerData.getBandwidthIn().percentUsage(),
localBrokerData.getBandwidthOut().percentUsage(), localBrokerData.getMaxResourceUsage() * 100);
// Count row.
rows[3] = new Object[COUNT_ROW.length];
initRow(rows[3], localBrokerData.getNumTopics(), localBrokerData.getNumBundles(),
localBrokerData.getNumProducers(), localBrokerData.getNumConsumers(),
localBrokerData.getLastBundleGains().size(), localBrokerData.getLastBundleLosses().size());
// Latest message data row.
rows[5] = new Object[LATEST_ROW.length];
initMessageRow(rows[5], localBrokerData.getMsgRateIn(), localBrokerData.getMsgRateOut(),
localBrokerData.getMsgThroughputIn(), localBrokerData.getMsgThroughputOut());
// Short-term message data row.
rows[7] = new Object[SHORT_ROW.length];
initMessageRow(rows[7], timeAverageData.getShortTermMsgRateIn(), timeAverageData.getShortTermMsgRateOut(),
timeAverageData.getShortTermMsgThroughputIn(), timeAverageData.getShortTermMsgThroughputOut());
// Long-term message data row.
rows[9] = new Object[LONG_ROW.length];
initMessageRow(rows[9], timeAverageData.getLongTermMsgRateIn(), timeAverageData.getLongTermMsgRateOut(),
timeAverageData.getLongTermMsgThroughputIn(), timeAverageData.getLongTermMsgThroughputOut());
final String table = localTableMaker.make(rows);
log.info("\nBroker Data for {}:\n{}\n", broker, table);
}
}
// JCommander arguments class.
private static class Arguments {
@Parameter(names = { "--connect-string" }, description = "Zookeeper connect string", required = true)
public String connectString = null;
}
/**
* Create a broker monitor from the given ZooKeeper client.
*
* @param zkClient
* Client to create this from.
*/
public BrokerMonitor(final ZooKeeper zkClient) {
loadData = new ConcurrentHashMap<>();
this.zkClient = zkClient;
}
/**
* Start the broker monitoring procedure.
*/
public void start() {
try {
final BrokerWatcher brokerWatcher = new BrokerWatcher(zkClient);
brokerWatcher.updateBrokers(BROKER_ROOT);
while (true) {
Thread.sleep(GLOBAL_STATS_PRINT_PERIOD_MILLIS);
printGlobalData();
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
/**
* Run a monitor from command line arguments.
*
* @param args
* Arguments for the monitor.
*/
public static void main(String[] args) {
try {
final Arguments arguments = new Arguments();
final JCommander jc = new JCommander(arguments);
jc.parse(args);
final ZooKeeper zkClient = new ZooKeeper(arguments.connectString, ZOOKEEPER_TIMEOUT_MILLIS, null);
final BrokerMonitor monitor = new BrokerMonitor(zkClient);
monitor.start();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.recovery;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.LinkedList;
import java.util.Queue;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.records.Version;
import org.apache.hadoop.yarn.server.records.impl.pb.VersionPBImpl;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.junit.Assert;
import org.junit.Test;
public class TestFSRMStateStore extends RMStateStoreTestBase {
public static final Log LOG = LogFactory.getLog(TestFSRMStateStore.class);
private TestFSRMStateStoreTester fsTester;
class TestFSRMStateStoreTester implements RMStateStoreHelper {
Path workingDirPathURI;
TestFileSystemRMStore store;
MiniDFSCluster cluster;
boolean adminCheckEnable;
class TestFileSystemRMStore extends FileSystemRMStateStore {
TestFileSystemRMStore(Configuration conf) throws Exception {
init(conf);
Assert.assertNull(fs);
assertTrue(workingDirPathURI.equals(fsWorkingPath));
start();
Assert.assertNotNull(fs);
}
public Path getVersionNode() {
return new Path(new Path(workingDirPathURI, ROOT_DIR_NAME), VERSION_NODE);
}
public Version getCurrentVersion() {
return CURRENT_VERSION_INFO;
}
public Path getAppDir(String appId) {
Path rootDir = new Path(workingDirPathURI, ROOT_DIR_NAME);
Path appRootDir = new Path(rootDir, RM_APP_ROOT);
Path appDir = new Path(appRootDir, appId);
return appDir;
}
}
public TestFSRMStateStoreTester(MiniDFSCluster cluster, boolean adminCheckEnable) throws Exception {
Path workingDirPath = new Path("/yarn/Test");
this.adminCheckEnable = adminCheckEnable;
this.cluster = cluster;
FileSystem fs = cluster.getFileSystem();
fs.mkdirs(workingDirPath);
Path clusterURI = new Path(cluster.getURI());
workingDirPathURI = new Path(clusterURI, workingDirPath);
fs.close();
}
@Override
public RMStateStore getRMStateStore() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
conf.set(YarnConfiguration.FS_RM_STATE_STORE_URI,
workingDirPathURI.toString());
conf.set(YarnConfiguration.FS_RM_STATE_STORE_RETRY_POLICY_SPEC,
"100,6000");
conf.setInt(YarnConfiguration.FS_RM_STATE_STORE_NUM_RETRIES, 8);
conf.setLong(YarnConfiguration.FS_RM_STATE_STORE_RETRY_INTERVAL_MS,
900L);
if (adminCheckEnable) {
conf.setBoolean(
YarnConfiguration.YARN_INTERMEDIATE_DATA_ENCRYPTION, true);
}
this.store = new TestFileSystemRMStore(conf);
Assert.assertEquals(store.getNumRetries(), 8);
Assert.assertEquals(store.getRetryInterval(), 900L);
Assert.assertTrue(store.fs.getConf() == store.fsConf);
FileSystem previousFs = store.fs;
store.startInternal();
Assert.assertTrue(store.fs != previousFs);
Assert.assertTrue(store.fs.getConf() == store.fsConf);
return store;
}
@Override
public boolean isFinalStateValid() throws Exception {
FileSystem fs = cluster.getFileSystem();
FileStatus[] files = fs.listStatus(workingDirPathURI);
return files.length == 1;
}
@Override
public void writeVersion(Version version) throws Exception {
store.updateFile(store.getVersionNode(), ((VersionPBImpl)
version)
.getProto().toByteArray(), false);
}
@Override
public Version getCurrentVersion() throws Exception {
return store.getCurrentVersion();
}
public boolean appExists(RMApp app) throws IOException {
FileSystem fs = cluster.getFileSystem();
Path nodePath =
store.getAppDir(app.getApplicationId().toString());
return fs.exists(nodePath);
}
}
@Test(timeout = 60000)
public void testFSRMStateStore() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration();
MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
try {
fsTester = new TestFSRMStateStoreTester(cluster, false);
// If the state store is FileSystemRMStateStore then add corrupted entry.
// It should discard the entry and remove it from file system.
FSDataOutputStream fsOut = null;
FileSystemRMStateStore fileSystemRMStateStore =
(FileSystemRMStateStore) fsTester.getRMStateStore();
String appAttemptIdStr3 = "appattempt_1352994193343_0001_000003";
ApplicationAttemptId attemptId3 =
ConverterUtils.toApplicationAttemptId(appAttemptIdStr3);
Path appDir =
fsTester.store.getAppDir(attemptId3.getApplicationId().toString());
Path tempAppAttemptFile =
new Path(appDir, attemptId3.toString() + ".tmp");
fsOut = fileSystemRMStateStore.fs.create(tempAppAttemptFile, false);
fsOut.write("Some random data ".getBytes());
fsOut.close();
testRMAppStateStore(fsTester);
Assert.assertFalse(fsTester.workingDirPathURI
.getFileSystem(conf).exists(tempAppAttemptFile));
testRMDTSecretManagerStateStore(fsTester);
testCheckVersion(fsTester);
testEpoch(fsTester);
testAppDeletion(fsTester);
testDeleteStore(fsTester);
testRemoveApplication(fsTester);
testAMRMTokenSecretManagerStateStore(fsTester);
testReservationStateStore(fsTester);
} finally {
cluster.shutdown();
}
}
@Test(timeout = 60000)
public void testHDFSRMStateStore() throws Exception {
final HdfsConfiguration conf = new HdfsConfiguration();
UserGroupInformation yarnAdmin =
UserGroupInformation.createUserForTesting("yarn",
new String[]{"admin"});
final MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
cluster.getFileSystem().mkdir(new Path("/yarn"),
FsPermission.valueOf("-rwxrwxrwx"));
cluster.getFileSystem().setOwner(new Path("/yarn"), "yarn", "admin");
final UserGroupInformation hdfsAdmin = UserGroupInformation.getCurrentUser();
final StoreStateVerifier verifier = new StoreStateVerifier() {
@Override
void afterStoreApp(final RMStateStore store, final ApplicationId appId) {
try {
// Wait for things to settle
Thread.sleep(5000);
hdfsAdmin.doAs(
new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
verifyFilesUnreadablebyHDFS(cluster,
((FileSystemRMStateStore) store).getAppDir
(appId));
return null;
}
});
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
void afterStoreAppAttempt(final RMStateStore store,
final ApplicationAttemptId appAttId) {
try {
// Wait for things to settle
Thread.sleep(5000);
hdfsAdmin.doAs(
new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
verifyFilesUnreadablebyHDFS(cluster,
((FileSystemRMStateStore) store)
.getAppAttemptDir(appAttId));
return null;
}
});
} catch (Exception e) {
throw new RuntimeException(e);
}
}
};
try {
yarnAdmin.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
fsTester = new TestFSRMStateStoreTester(cluster, true);
testRMAppStateStore(fsTester, verifier);
return null;
}
});
} finally {
cluster.shutdown();
}
}
private void verifyFilesUnreadablebyHDFS(MiniDFSCluster cluster,
Path root) throws Exception{
DistributedFileSystem fs = cluster.getFileSystem();
Queue<Path> paths = new LinkedList<>();
paths.add(root);
while (!paths.isEmpty()) {
Path p = paths.poll();
FileStatus stat = fs.getFileStatus(p);
if (!stat.isDirectory()) {
try {
LOG.warn("\n\n ##Testing path [" + p + "]\n\n");
fs.open(p);
Assert.fail("Super user should not be able to read ["+ UserGroupInformation.getCurrentUser() + "] [" + p.getName() + "]");
} catch (AccessControlException e) {
Assert.assertTrue(e.getMessage().contains("superuser is not allowed to perform this operation"));
} catch (Exception e) {
Assert.fail("Should get an AccessControlException here");
}
}
if (stat.isDirectory()) {
FileStatus[] ls = fs.listStatus(p);
for (FileStatus f : ls) {
paths.add(f.getPath());
}
}
}
}
@Test(timeout = 60000)
public void testCheckMajorVersionChange() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration();
MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
try {
fsTester = new TestFSRMStateStoreTester(cluster, false) {
Version VERSION_INFO = Version.newInstance(Integer.MAX_VALUE, 0);
@Override
public Version getCurrentVersion() throws Exception {
return VERSION_INFO;
}
@Override
public RMStateStore getRMStateStore() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
conf.set(YarnConfiguration.FS_RM_STATE_STORE_URI,
workingDirPathURI.toString());
conf.set(YarnConfiguration.FS_RM_STATE_STORE_RETRY_POLICY_SPEC,
"100,6000");
this.store = new TestFileSystemRMStore(conf) {
Version storedVersion = null;
@Override
public Version getCurrentVersion() {
return VERSION_INFO;
}
@Override
protected synchronized Version loadVersion() throws Exception {
return storedVersion;
}
@Override
protected synchronized void storeVersion() throws Exception {
storedVersion = VERSION_INFO;
}
};
return store;
}
};
// default version
RMStateStore store = fsTester.getRMStateStore();
Version defaultVersion = fsTester.getCurrentVersion();
store.checkVersion();
Assert.assertEquals(defaultVersion, store.loadVersion());
} finally {
cluster.shutdown();
}
}
@Override
protected void modifyAppState() throws Exception {
// imitate appAttemptFile1 is still .new, but old one is deleted
String appAttemptIdStr1 = "appattempt_1352994193343_0001_000001";
ApplicationAttemptId attemptId1 =
ConverterUtils.toApplicationAttemptId(appAttemptIdStr1);
Path appDir =
fsTester.store.getAppDir(attemptId1.getApplicationId().toString());
Path appAttemptFile1 =
new Path(appDir, attemptId1.toString() + ".new");
FileSystemRMStateStore fileSystemRMStateStore =
(FileSystemRMStateStore) fsTester.getRMStateStore();
fileSystemRMStateStore.renameFile(appAttemptFile1,
new Path(appAttemptFile1.getParent(),
appAttemptFile1.getName() + ".new"));
}
@Override
protected void modifyRMDelegationTokenState() throws Exception {
// imitate dt file is still .new, but old one is deleted
Path nodeCreatePath =
fsTester.store.getNodePath(fsTester.store.rmDTSecretManagerRoot,
FileSystemRMStateStore.DELEGATION_TOKEN_PREFIX + 0);
FileSystemRMStateStore fileSystemRMStateStore =
(FileSystemRMStateStore) fsTester.getRMStateStore();
fileSystemRMStateStore.renameFile(nodeCreatePath,
new Path(nodeCreatePath.getParent(),
nodeCreatePath.getName() + ".new"));
}
@Test (timeout = 30000)
public void testFSRMStateStoreClientRetry() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration();
MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
cluster.waitActive();
try {
TestFSRMStateStoreTester fsTester = new TestFSRMStateStoreTester(cluster, false);
final RMStateStore store = fsTester.getRMStateStore();
store.setRMDispatcher(new TestDispatcher());
final AtomicBoolean assertionFailedInThread = new AtomicBoolean(false);
cluster.shutdownNameNodes();
Thread clientThread = new Thread() {
@Override
public void run() {
try {
store.storeApplicationStateInternal(
ApplicationId.newInstance(100L, 1),
ApplicationStateData.newInstance(111, 111, "user", null,
RMAppState.ACCEPTED, "diagnostics", 333, null));
} catch (Exception e) {
assertionFailedInThread.set(true);
e.printStackTrace();
}
}
};
Thread.sleep(2000);
clientThread.start();
cluster.restartNameNode();
clientThread.join();
Assert.assertFalse(assertionFailedInThread.get());
} finally {
cluster.shutdown();
}
}
}
| |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.features.apple.project;
import com.facebook.buck.apple.xcode.XCScheme;
import com.facebook.buck.apple.xcode.XCScheme.AdditionalActions;
import com.facebook.buck.apple.xcode.xcodeproj.PBXTarget;
import com.facebook.buck.core.util.log.Logger;
import com.facebook.buck.io.MoreProjectFilesystems;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Path;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.w3c.dom.DOMImplementation;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* Collects target references and generates an xcscheme.
*
* <p>To register entries in the scheme, clients must add:
*
* <ul>
* <li>associations between buck targets and Xcode targets
* <li>associations between Xcode targets and the projects that contain them
* </ul>
*
* <p>Both of these values can be pulled out of {@link ProjectGenerator}.
*/
class SchemeGenerator {
private static final Logger LOG = Logger.get(SchemeGenerator.class);
private final ProjectFilesystem projectFilesystem;
private final Optional<PBXTarget> primaryTarget;
private final ImmutableSet<PBXTarget> orderedBuildTargets;
private final ImmutableSet<PBXTarget> orderedBuildTestTargets;
private final ImmutableSet<PBXTarget> orderedRunTestTargets;
private final String schemeName;
private final Path outputDirectory;
private final boolean parallelizeBuild;
private final boolean wasCreatedForAppExtension;
private final Optional<String> runnablePath;
private final Optional<String> remoteRunnablePath;
private final ImmutableMap<SchemeActionType, String> actionConfigNames;
private final ImmutableMap<PBXTarget, Path> targetToProjectPathMap;
private Optional<XCScheme> outputScheme = Optional.empty();
private final Optional<XCScheme.LaunchAction.WatchInterface> watchInterface;
private final Optional<String> notificationPayloadFile;
private final XCScheme.LaunchAction.LaunchStyle launchStyle;
private final Optional<ImmutableMap<SchemeActionType, ImmutableMap<String, String>>>
environmentVariables;
private Optional<
ImmutableMap<
SchemeActionType, ImmutableMap<XCScheme.AdditionalActions, ImmutableList<String>>>>
additionalSchemeActions;
public SchemeGenerator(
ProjectFilesystem projectFilesystem,
Optional<PBXTarget> primaryTarget,
ImmutableSet<PBXTarget> orderedBuildTargets,
ImmutableSet<PBXTarget> orderedBuildTestTargets,
ImmutableSet<PBXTarget> orderedRunTestTargets,
String schemeName,
Path outputDirectory,
boolean parallelizeBuild,
Optional<Boolean> wasCreatedForAppExtension,
Optional<String> runnablePath,
Optional<String> remoteRunnablePath,
ImmutableMap<SchemeActionType, String> actionConfigNames,
ImmutableMap<PBXTarget, Path> targetToProjectPathMap,
Optional<ImmutableMap<SchemeActionType, ImmutableMap<String, String>>> environmentVariables,
Optional<
ImmutableMap<
SchemeActionType,
ImmutableMap<XCScheme.AdditionalActions, ImmutableList<String>>>>
additionalSchemeActions,
XCScheme.LaunchAction.LaunchStyle launchStyle,
Optional<XCScheme.LaunchAction.WatchInterface> watchInterface,
Optional<String> notificationPayloadFile) {
this.projectFilesystem = projectFilesystem;
this.primaryTarget = primaryTarget;
this.watchInterface = watchInterface;
this.launchStyle = launchStyle;
this.orderedBuildTargets = orderedBuildTargets;
this.orderedBuildTestTargets = orderedBuildTestTargets;
this.orderedRunTestTargets = orderedRunTestTargets;
this.schemeName = schemeName;
this.outputDirectory = outputDirectory;
this.parallelizeBuild = parallelizeBuild;
this.wasCreatedForAppExtension = wasCreatedForAppExtension.orElse(false);
this.runnablePath = runnablePath;
this.remoteRunnablePath = remoteRunnablePath;
this.actionConfigNames = actionConfigNames;
this.targetToProjectPathMap = targetToProjectPathMap;
this.environmentVariables = environmentVariables;
this.additionalSchemeActions = additionalSchemeActions;
this.notificationPayloadFile = notificationPayloadFile;
LOG.debug(
"Generating scheme with build targets %s, test build targets %s, test bundle targets %s",
orderedBuildTargets, orderedBuildTestTargets, orderedRunTestTargets);
}
@VisibleForTesting
private Optional<ImmutableList<XCScheme.SchemePrePostAction>> additionalCommandsForSchemeAction(
SchemeActionType schemeActionType,
XCScheme.AdditionalActions actionType,
Optional<XCScheme.BuildableReference> primaryTarget) {
Optional<ImmutableList<String>> commands =
this.additionalSchemeActions
.map(input -> Optional.ofNullable(input.get(schemeActionType)))
.filter(Optional::isPresent)
.map(input -> Optional.ofNullable(input.get().get(actionType)))
.orElse(Optional.empty());
if (commands.isPresent()) {
ImmutableList<XCScheme.SchemePrePostAction> actions =
commands
.get()
.stream()
.map(command -> new XCScheme.SchemePrePostAction(primaryTarget, command))
.collect(ImmutableList.toImmutableList());
return Optional.of(actions);
} else {
return Optional.empty();
}
}
@VisibleForTesting
Path getOutputDirectory() {
return outputDirectory;
}
@VisibleForTesting
Optional<XCScheme> getOutputScheme() {
return outputScheme;
}
public Path writeScheme() throws IOException {
Map<PBXTarget, XCScheme.BuildableReference> buildTargetToBuildableReferenceMap =
new HashMap<>();
for (PBXTarget target : Iterables.concat(orderedBuildTargets, orderedBuildTestTargets)) {
String blueprintName = target.getProductName();
if (blueprintName == null) {
blueprintName = target.getName();
}
Path outputPath = outputDirectory.getParent();
String buildableReferencePath;
Path projectPath = Objects.requireNonNull(targetToProjectPathMap.get(target));
if (outputPath == null) {
// Root directory project
buildableReferencePath = projectPath.toString();
} else {
buildableReferencePath = outputPath.relativize(projectPath).toString();
}
XCScheme.BuildableReference buildableReference =
new XCScheme.BuildableReference(
buildableReferencePath,
Objects.requireNonNull(target.getGlobalID()),
target.getProductReference() != null
? target.getProductReference().getName()
: Objects.requireNonNull(target.getProductName()),
blueprintName);
buildTargetToBuildableReferenceMap.put(target, buildableReference);
}
Optional<XCScheme.BuildableReference> primaryBuildReference = Optional.empty();
if (primaryTarget.isPresent()) {
primaryBuildReference =
Optional.ofNullable(buildTargetToBuildableReferenceMap.get(primaryTarget.get()));
}
XCScheme.BuildAction buildAction =
new XCScheme.BuildAction(
parallelizeBuild,
additionalCommandsForSchemeAction(
SchemeActionType.BUILD,
XCScheme.AdditionalActions.PRE_SCHEME_ACTIONS,
primaryBuildReference),
additionalCommandsForSchemeAction(
SchemeActionType.BUILD,
XCScheme.AdditionalActions.POST_SCHEME_ACTIONS,
primaryBuildReference));
// For aesthetic reasons put all non-test build actions before all test build actions.
for (PBXTarget target : orderedBuildTargets) {
addBuildActionForBuildTarget(
buildTargetToBuildableReferenceMap.get(target),
XCScheme.BuildActionEntry.BuildFor.DEFAULT,
buildAction);
}
for (PBXTarget target : orderedBuildTestTargets) {
addBuildActionForBuildTarget(
buildTargetToBuildableReferenceMap.get(target),
XCScheme.BuildActionEntry.BuildFor.TEST_ONLY,
buildAction);
}
ImmutableMap<SchemeActionType, ImmutableMap<String, String>> envVariables = ImmutableMap.of();
if (environmentVariables.isPresent()) {
envVariables = environmentVariables.get();
}
XCScheme.TestAction testAction =
new XCScheme.TestAction(
Objects.requireNonNull(actionConfigNames.get(SchemeActionType.TEST)),
Optional.ofNullable(envVariables.get(SchemeActionType.TEST)),
additionalCommandsForSchemeAction(
SchemeActionType.TEST, AdditionalActions.PRE_SCHEME_ACTIONS, primaryBuildReference),
additionalCommandsForSchemeAction(
SchemeActionType.TEST,
AdditionalActions.POST_SCHEME_ACTIONS,
primaryBuildReference));
for (PBXTarget target : orderedRunTestTargets) {
XCScheme.BuildableReference buildableReference =
buildTargetToBuildableReferenceMap.get(target);
XCScheme.TestableReference testableReference =
new XCScheme.TestableReference(buildableReference);
testAction.addTestableReference(testableReference);
}
Optional<XCScheme.LaunchAction> launchAction = Optional.empty();
Optional<XCScheme.ProfileAction> profileAction = Optional.empty();
if (primaryTarget.isPresent()) {
XCScheme.BuildableReference primaryBuildableReference =
buildTargetToBuildableReferenceMap.get(primaryTarget.get());
if (primaryBuildableReference != null) {
launchAction =
Optional.of(
new XCScheme.LaunchAction(
primaryBuildableReference,
Objects.requireNonNull(actionConfigNames.get(SchemeActionType.LAUNCH)),
runnablePath,
remoteRunnablePath,
watchInterface,
launchStyle,
Optional.ofNullable(envVariables.get(SchemeActionType.LAUNCH)),
additionalCommandsForSchemeAction(
SchemeActionType.LAUNCH,
AdditionalActions.PRE_SCHEME_ACTIONS,
primaryBuildReference),
additionalCommandsForSchemeAction(
SchemeActionType.LAUNCH,
AdditionalActions.POST_SCHEME_ACTIONS,
primaryBuildReference),
notificationPayloadFile));
profileAction =
Optional.of(
new XCScheme.ProfileAction(
primaryBuildableReference,
Objects.requireNonNull(actionConfigNames.get(SchemeActionType.PROFILE)),
Optional.ofNullable(envVariables.get(SchemeActionType.PROFILE)),
additionalCommandsForSchemeAction(
SchemeActionType.PROFILE,
AdditionalActions.PRE_SCHEME_ACTIONS,
primaryBuildReference),
additionalCommandsForSchemeAction(
SchemeActionType.PROFILE,
AdditionalActions.POST_SCHEME_ACTIONS,
primaryBuildReference)));
}
}
XCScheme.AnalyzeAction analyzeAction =
new XCScheme.AnalyzeAction(
Objects.requireNonNull(actionConfigNames.get(SchemeActionType.ANALYZE)),
additionalCommandsForSchemeAction(
SchemeActionType.ANALYZE,
AdditionalActions.PRE_SCHEME_ACTIONS,
primaryBuildReference),
additionalCommandsForSchemeAction(
SchemeActionType.ANALYZE,
AdditionalActions.POST_SCHEME_ACTIONS,
primaryBuildReference));
XCScheme.ArchiveAction archiveAction =
new XCScheme.ArchiveAction(
Objects.requireNonNull(actionConfigNames.get(SchemeActionType.ARCHIVE)),
additionalCommandsForSchemeAction(
SchemeActionType.ARCHIVE,
AdditionalActions.PRE_SCHEME_ACTIONS,
primaryBuildReference),
additionalCommandsForSchemeAction(
SchemeActionType.ARCHIVE,
AdditionalActions.POST_SCHEME_ACTIONS,
primaryBuildReference));
XCScheme scheme =
new XCScheme(
schemeName,
wasCreatedForAppExtension,
Optional.of(buildAction),
Optional.of(testAction),
launchAction,
profileAction,
Optional.of(analyzeAction),
Optional.of(archiveAction));
outputScheme = Optional.of(scheme);
Path schemeDirectory = outputDirectory.resolve("xcshareddata/xcschemes");
projectFilesystem.mkdirs(schemeDirectory);
Path schemePath = schemeDirectory.resolve(schemeName + ".xcscheme");
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) {
serializeScheme(scheme, outputStream);
String contentsToWrite = outputStream.toString();
if (MoreProjectFilesystems.fileContentsDiffer(
new ByteArrayInputStream(contentsToWrite.getBytes(Charsets.UTF_8)),
schemePath,
projectFilesystem)) {
projectFilesystem.writeContentsToPath(outputStream.toString(), schemePath);
}
}
return schemePath;
}
private static void addBuildActionForBuildTarget(
XCScheme.BuildableReference buildableReference,
EnumSet<XCScheme.BuildActionEntry.BuildFor> buildFor,
XCScheme.BuildAction buildAction) {
XCScheme.BuildActionEntry entry = new XCScheme.BuildActionEntry(buildableReference, buildFor);
buildAction.addBuildAction(entry);
}
public static Element serializeBuildableReference(
Document doc, XCScheme.BuildableReference buildableReference) {
Element refElem = doc.createElement("BuildableReference");
refElem.setAttribute("BuildableIdentifier", "primary");
refElem.setAttribute("BlueprintIdentifier", buildableReference.getBlueprintIdentifier());
refElem.setAttribute("BuildableName", buildableReference.getBuildableName());
refElem.setAttribute("BlueprintName", buildableReference.getBlueprintName());
String referencedContainer = "container:" + buildableReference.getContainerRelativePath();
refElem.setAttribute("ReferencedContainer", referencedContainer);
return refElem;
}
public static Element serializeEnvironmentVariables(
Document doc, ImmutableMap<String, String> environmentVariables) {
Element rootElement = doc.createElement("EnvironmentVariables");
for (String variableKey : environmentVariables.keySet()) {
Element variableElement = doc.createElement("EnvironmentVariable");
variableElement.setAttribute("key", variableKey);
variableElement.setAttribute("value", environmentVariables.get(variableKey));
variableElement.setAttribute("isEnabled", "YES");
rootElement.appendChild(variableElement);
}
return rootElement;
}
public static Element serializeBuildAction(Document doc, XCScheme.BuildAction buildAction) {
Element buildActionElem = doc.createElement("BuildAction");
serializePrePostActions(doc, buildAction, buildActionElem);
buildActionElem.setAttribute(
"parallelizeBuildables", buildAction.getParallelizeBuild() ? "YES" : "NO");
buildActionElem.setAttribute(
"buildImplicitDependencies", buildAction.getParallelizeBuild() ? "YES" : "NO");
Element buildActionEntriesElem = doc.createElement("BuildActionEntries");
buildActionElem.appendChild(buildActionEntriesElem);
for (XCScheme.BuildActionEntry entry : buildAction.getBuildActionEntries()) {
Element entryElem = doc.createElement("BuildActionEntry");
buildActionEntriesElem.appendChild(entryElem);
EnumSet<XCScheme.BuildActionEntry.BuildFor> buildFor = entry.getBuildFor();
boolean buildForRunning = buildFor.contains(XCScheme.BuildActionEntry.BuildFor.RUNNING);
entryElem.setAttribute("buildForRunning", buildForRunning ? "YES" : "NO");
boolean buildForTesting = buildFor.contains(XCScheme.BuildActionEntry.BuildFor.TESTING);
entryElem.setAttribute("buildForTesting", buildForTesting ? "YES" : "NO");
boolean buildForProfiling = buildFor.contains(XCScheme.BuildActionEntry.BuildFor.PROFILING);
entryElem.setAttribute("buildForProfiling", buildForProfiling ? "YES" : "NO");
boolean buildForArchiving = buildFor.contains(XCScheme.BuildActionEntry.BuildFor.ARCHIVING);
entryElem.setAttribute("buildForArchiving", buildForArchiving ? "YES" : "NO");
boolean buildForAnalyzing = buildFor.contains(XCScheme.BuildActionEntry.BuildFor.ANALYZING);
entryElem.setAttribute("buildForAnalyzing", buildForAnalyzing ? "YES" : "NO");
Element refElem = serializeBuildableReference(doc, entry.getBuildableReference());
entryElem.appendChild(refElem);
}
return buildActionElem;
}
public static Element serializeTestAction(Document doc, XCScheme.TestAction testAction) {
Element testActionElem = doc.createElement("TestAction");
serializePrePostActions(doc, testAction, testActionElem);
// unless otherwise specified, use the Launch scheme's env variables like the xcode default
testActionElem.setAttribute("shouldUseLaunchSchemeArgsEnv", "YES");
Element testablesElem = doc.createElement("Testables");
testActionElem.appendChild(testablesElem);
for (XCScheme.TestableReference testable : testAction.getTestables()) {
Element testableElem = doc.createElement("TestableReference");
testablesElem.appendChild(testableElem);
testableElem.setAttribute("skipped", "NO");
Element refElem = serializeBuildableReference(doc, testable.getBuildableReference());
testableElem.appendChild(refElem);
}
if (testAction.getEnvironmentVariables().isPresent()) {
// disable the default override that makes Test use Launch's environment variables
testActionElem.setAttribute("shouldUseLaunchSchemeArgsEnv", "NO");
Element environmentVariablesElement =
serializeEnvironmentVariables(doc, testAction.getEnvironmentVariables().get());
testActionElem.appendChild(environmentVariablesElement);
}
return testActionElem;
}
public static Element serializeLaunchAction(Document doc, XCScheme.LaunchAction launchAction) {
Element launchActionElem = doc.createElement("LaunchAction");
serializePrePostActions(doc, launchAction, launchActionElem);
Optional<String> runnablePath = launchAction.getRunnablePath();
Optional<String> remoteRunnablePath = launchAction.getRemoteRunnablePath();
if (remoteRunnablePath.isPresent()) {
Element remoteRunnableElem = doc.createElement("RemoteRunnable");
remoteRunnableElem.setAttribute("runnableDebuggingMode", "2");
remoteRunnableElem.setAttribute("BundleIdentifier", "com.apple.springboard");
remoteRunnableElem.setAttribute("RemotePath", remoteRunnablePath.get());
launchActionElem.appendChild(remoteRunnableElem);
Element refElem = serializeBuildableReference(doc, launchAction.getBuildableReference());
remoteRunnableElem.appendChild(refElem);
// Yes, this appears to be duplicated in Xcode as well..
Element refElem2 = serializeBuildableReference(doc, launchAction.getBuildableReference());
launchActionElem.appendChild(refElem2);
} else if (runnablePath.isPresent()) {
Element pathRunnableElem = doc.createElement("PathRunnable");
launchActionElem.appendChild(pathRunnableElem);
pathRunnableElem.setAttribute("FilePath", runnablePath.get());
} else {
Element productRunnableElem = doc.createElement("BuildableProductRunnable");
launchActionElem.appendChild(productRunnableElem);
Element refElem = serializeBuildableReference(doc, launchAction.getBuildableReference());
productRunnableElem.appendChild(refElem);
}
Optional<XCScheme.LaunchAction.WatchInterface> watchInterface =
launchAction.getWatchInterface();
if (watchInterface.isPresent()) {
Optional<String> watchInterfaceValue = Optional.empty();
switch (watchInterface.get()) {
case MAIN:
// excluded from scheme
case COMPLICATION:
watchInterfaceValue = Optional.of("32");
break;
case DYNAMIC_NOTIFICATION:
watchInterfaceValue = Optional.of("8");
break;
case STATIC_NOTIFICATION:
watchInterfaceValue = Optional.of("16");
break;
}
if (watchInterfaceValue.isPresent()) {
launchActionElem.setAttribute("launchAutomaticallySubstyle", watchInterfaceValue.get());
}
}
Optional<String> notificationPayloadFile = launchAction.getNotificationPayloadFile();
if (notificationPayloadFile.isPresent()) {
launchActionElem.setAttribute("notificationPayloadFile", notificationPayloadFile.get());
}
XCScheme.LaunchAction.LaunchStyle launchStyle = launchAction.getLaunchStyle();
launchActionElem.setAttribute(
"launchStyle", launchStyle == XCScheme.LaunchAction.LaunchStyle.AUTO ? "0" : "1");
if (launchAction.getEnvironmentVariables().isPresent()) {
Element environmentVariablesElement =
serializeEnvironmentVariables(doc, launchAction.getEnvironmentVariables().get());
launchActionElem.appendChild(environmentVariablesElement);
}
return launchActionElem;
}
public static Element serializeProfileAction(Document doc, XCScheme.ProfileAction profileAction) {
Element profileActionElem = doc.createElement("ProfileAction");
serializePrePostActions(doc, profileAction, profileActionElem);
// unless otherwise specified, use the Launch scheme's env variables like the xcode default
profileActionElem.setAttribute("shouldUseLaunchSchemeArgsEnv", "YES");
Element productRunnableElem = doc.createElement("BuildableProductRunnable");
profileActionElem.appendChild(productRunnableElem);
Element refElem = serializeBuildableReference(doc, profileAction.getBuildableReference());
productRunnableElem.appendChild(refElem);
if (profileAction.getEnvironmentVariables().isPresent()) {
// disable the default override that makes Profile use Launch's environment variables
profileActionElem.setAttribute("shouldUseLaunchSchemeArgsEnv", "NO");
Element environmentVariablesElement =
serializeEnvironmentVariables(doc, profileAction.getEnvironmentVariables().get());
profileActionElem.appendChild(environmentVariablesElement);
}
return profileActionElem;
}
private static void serializePrePostActions(
Document doc, XCScheme.SchemeAction action, Element actionElem) {
if (action.getPreActions().isPresent()) {
actionElem.appendChild(
serializePrePostAction(doc, "PreActions", action.getPreActions().get()));
}
if (action.getPostActions().isPresent()) {
actionElem.appendChild(
serializePrePostAction(doc, "PostActions", action.getPostActions().get()));
}
}
private static Element serializePrePostAction(
Document doc, String tagName, ImmutableList<XCScheme.SchemePrePostAction> actions) {
Element executionActions = doc.createElement(tagName);
for (XCScheme.SchemePrePostAction action : actions) {
executionActions.appendChild(serializeExecutionAction(doc, action));
}
return executionActions;
}
public static Element serializeExecutionAction(
Document doc, XCScheme.SchemePrePostAction action) {
Element executionAction = doc.createElement("ExecutionAction");
executionAction.setAttribute(
"ActionType",
"Xcode.IDEStandardExecutionActionsCore.ExecutionActionType.ShellScriptAction");
Element actionContent = doc.createElement("ActionContent");
actionContent.setAttribute("title", "Run Script");
actionContent.setAttribute("scriptText", action.getCommand());
actionContent.setAttribute("shellToInvoke", "/bin/bash");
Optional<XCScheme.BuildableReference> buildableReference = action.getBuildableReference();
if (buildableReference.isPresent()) {
Element environmentBuildable = doc.createElement("EnvironmentBuildable");
environmentBuildable.appendChild(serializeBuildableReference(doc, buildableReference.get()));
actionContent.appendChild(environmentBuildable);
}
executionAction.appendChild(actionContent);
return executionAction;
}
private static void serializeScheme(XCScheme scheme, OutputStream stream) {
DocumentBuilder docBuilder;
Transformer transformer;
try {
docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
transformer = TransformerFactory.newInstance().newTransformer();
} catch (ParserConfigurationException | TransformerConfigurationException e) {
throw new RuntimeException(e);
}
DOMImplementation domImplementation = docBuilder.getDOMImplementation();
Document doc = domImplementation.createDocument(null, "Scheme", null);
doc.setXmlVersion("1.0");
Element rootElem = doc.getDocumentElement();
rootElem.setAttribute("LastUpgradeVersion", "9999");
if (scheme.getWasCreatedForExtension()) {
rootElem.setAttribute("wasCreatedForAppExtension", "YES");
}
rootElem.setAttribute("version", "1.7");
Optional<XCScheme.BuildAction> buildAction = scheme.getBuildAction();
if (buildAction.isPresent()) {
Element buildActionElem = serializeBuildAction(doc, buildAction.get());
rootElem.appendChild(buildActionElem);
}
Optional<XCScheme.TestAction> testAction = scheme.getTestAction();
if (testAction.isPresent()) {
Element testActionElem = serializeTestAction(doc, testAction.get());
testActionElem.setAttribute(
"buildConfiguration", scheme.getTestAction().get().getBuildConfiguration());
rootElem.appendChild(testActionElem);
}
Optional<XCScheme.LaunchAction> launchAction = scheme.getLaunchAction();
if (launchAction.isPresent()) {
Element launchActionElem = serializeLaunchAction(doc, launchAction.get());
launchActionElem.setAttribute(
"buildConfiguration", launchAction.get().getBuildConfiguration());
rootElem.appendChild(launchActionElem);
} else {
Element launchActionElem = doc.createElement("LaunchAction");
launchActionElem.setAttribute("buildConfiguration", "Debug");
rootElem.appendChild(launchActionElem);
}
Optional<XCScheme.ProfileAction> profileAction = scheme.getProfileAction();
if (profileAction.isPresent()) {
Element profileActionElem = serializeProfileAction(doc, profileAction.get());
profileActionElem.setAttribute(
"buildConfiguration", profileAction.get().getBuildConfiguration());
rootElem.appendChild(profileActionElem);
} else {
Element profileActionElem = doc.createElement("ProfileAction");
profileActionElem.setAttribute("buildConfiguration", "Release");
rootElem.appendChild(profileActionElem);
}
Optional<XCScheme.AnalyzeAction> analyzeAction = scheme.getAnalyzeAction();
if (analyzeAction.isPresent()) {
Element analyzeActionElem = doc.createElement("AnalyzeAction");
serializePrePostActions(doc, analyzeAction.get(), analyzeActionElem);
analyzeActionElem.setAttribute(
"buildConfiguration", analyzeAction.get().getBuildConfiguration());
rootElem.appendChild(analyzeActionElem);
} else {
Element analyzeActionElem = doc.createElement("AnalyzeAction");
analyzeActionElem.setAttribute("buildConfiguration", "Debug");
rootElem.appendChild(analyzeActionElem);
}
Optional<XCScheme.ArchiveAction> archiveAction = scheme.getArchiveAction();
if (archiveAction.isPresent()) {
Element archiveActionElem = doc.createElement("ArchiveAction");
serializePrePostActions(doc, archiveAction.get(), archiveActionElem);
archiveActionElem.setAttribute(
"buildConfiguration", archiveAction.get().getBuildConfiguration());
archiveActionElem.setAttribute("revealArchiveInOrganizer", "YES");
rootElem.appendChild(archiveActionElem);
} else {
Element archiveActionElem = doc.createElement("ArchiveAction");
archiveActionElem.setAttribute("buildConfiguration", "Release");
rootElem.appendChild(archiveActionElem);
}
// write out
DOMSource source = new DOMSource(doc);
StreamResult result = new StreamResult(stream);
try {
transformer.transform(source, result);
} catch (TransformerException e) {
throw new RuntimeException(e);
}
}
}
| |
/*
* #%L
* SparkCommerce Open Admin Platform
* %%
* Copyright (C) 2009 - 2013 Spark Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.sparkcommerce.openadmin.server.service;
import org.sparkcommerce.common.exception.ServiceException;
import org.sparkcommerce.common.presentation.client.SupportedFieldType;
import org.sparkcommerce.openadmin.dto.ClassMetadata;
import org.sparkcommerce.openadmin.dto.CriteriaTransferObject;
import org.sparkcommerce.openadmin.dto.DynamicResultSet;
import org.sparkcommerce.openadmin.dto.Entity;
import org.sparkcommerce.openadmin.dto.FilterAndSortCriteria;
import org.sparkcommerce.openadmin.dto.Property;
import org.sparkcommerce.openadmin.dto.SectionCrumb;
import org.sparkcommerce.openadmin.server.domain.PersistencePackageRequest;
import org.sparkcommerce.openadmin.server.service.persistence.PersistenceResponse;
import org.sparkcommerce.openadmin.web.form.entity.EntityForm;
import java.util.List;
import java.util.Map;
/**
* @author Andre Azzolini (apazzolini)
*/
public interface AdminEntityService {
/**
* Returns class metadata for the given request object
*
* @param request
* @return ClassMetadata for the given request
* @throws ServiceException
*/
public PersistenceResponse getClassMetadata(PersistencePackageRequest request)
throws ServiceException;
/**
* Returns the DynamicResultSet containing the total records for the query and the currently fetched Entity[]
*
* @param request
* @return DynamicResultSet
* @throws ServiceException
*/
public PersistenceResponse getRecords(PersistencePackageRequest request)
throws ServiceException;
/**
* Returns a specific record for the given request and primary key id/property
*
* @param request
* @param id
* @param cmd
* @param isCollectionRequest whether or not this record request was initiated from a collection on a parent entity
* @return the Entity
* @throws ServiceException
*/
public PersistenceResponse getRecord(PersistencePackageRequest request, String id, ClassMetadata cmd, boolean isCollectionRequest)
throws ServiceException;
/**
* Persists the given entity
*
* @param entityForm
* @param customCriteria
* @return the persisted Entity
* @throws ServiceException
*/
public PersistenceResponse addEntity(EntityForm entityForm, String[] customCriteria, List<SectionCrumb> sectionCrumb)
throws ServiceException;
/**
* Updates the given entity
*
* @param entityForm
* @param customCriteria
* @return the persisted Entity
* @throws ServiceException
*/
public PersistenceResponse updateEntity(EntityForm entityForm, String[] customCriteria, List<SectionCrumb> sectionCrumb)
throws ServiceException;
/**
* Removes the given entity
*
* @param entityForm
* @param customCriteria
* @throws ServiceException
*/
public PersistenceResponse removeEntity(EntityForm entityForm, String[] customCriteria, List<SectionCrumb> sectionCrumb)
throws ServiceException;
/**
* Thin layer on top of {@link DynamicEntityService#add(org.sparkcommerce.openadmin.dto.PersistencePackage)} that
* swallows all {@link ValidationException}s that could be thrown and still just returns a {@link PersistenceResponse}
* with the {@link Entity} that failed validation.
*
* @param request
* @return
* @throws ServiceException if there were exceptions other than a {@link ValidationException} that was thrown as a
* result of the attempted add
*/
public PersistenceResponse add(PersistencePackageRequest request) throws ServiceException;
/**
* Thin layer on top of {@link DynamicEntityService#update(org.sparkcommerce.openadmin.dto.PersistencePackage)}
* @param request
* @return
* @throws ServiceException if there were exceptions other than a {@link ValidationException} that was thrown as a
* result of the attempted update
*/
public PersistenceResponse update(PersistencePackageRequest request) throws ServiceException;
/**
* Thin layer on top of {@link DynamicEntityService#inspect(org.sparkcommerce.openadmin.dto.PersistencePackage)}
* @param request
* @return
* @throws ServiceException
*/
public PersistenceResponse inspect(PersistencePackageRequest request) throws ServiceException;
/**
* Thin layer on top of {@link DynamicEntityService#remove(org.sparkcommerce.openadmin.dto.PersistencePackage)}
* @param request
* @return
* @throws ServiceException
*/
public PersistenceResponse remove(PersistencePackageRequest request) throws ServiceException;
/**
* Thin layer on top of {@link DynamicEntityService#fetch(org.sparkcommerce.openadmin.dto.PersistencePackage, org.sparkcommerce.openadmin.dto.CriteriaTransferObject)}.
* This will glean and create a {@link CriteriaTransferObject} from {@link PersistencePackageRequest#getFilterAndSortCriteria()}
* to pass to {@link DynamicEntityService}.
*
* @param request
* @return
* @throws ServiceException
*/
public PersistenceResponse fetch(PersistencePackageRequest request) throws ServiceException;
/**
* Gets an Entity representing a specific collection item
*
* @param containingClassMetadata
* @param containingEntity
* @param collectionProperty
* @param collectionItemId
* @return the Entity
* @throws ServiceException
*/
public PersistenceResponse getAdvancedCollectionRecord(ClassMetadata containingClassMetadata, Entity containingEntity,
Property collectionProperty, String collectionItemId, List<SectionCrumb> sectionCrumb)
throws ServiceException;
/**
* Returns the DynamicResultSet representing the records that belong to the specified collectionProperty for the
* given containingClass and the primary key for the containingClass
*
* @param containingClassMetadata
* @param containingEntity
* @param collectionProperty
* @param fascs
* @param startIndex
* @param maxIndex
* @return the DynamicResultSet
* @throws ServiceException
*/
public PersistenceResponse getRecordsForCollection(ClassMetadata containingClassMetadata, Entity containingEntity,
Property collectionProperty, FilterAndSortCriteria[] fascs, Integer startIndex, Integer maxIndex, List<SectionCrumb> sectionCrumb)
throws ServiceException;
/**
* The same as the other getRecordsForCollection method, except that this one expects allow the caller to explicitly
* set the id value that will be used in the fetch instead of delegating to {@link #getContextSpecificRelationshipId()}
*
* @param containingClassMetadata
* @param containingEntity
* @param collectionProperty
* @param fascs
* @param startIndex
* @param maxIndex
* @param idValueOverride
* @return the PersistenceResponse
* @throws ServiceException
*/
public PersistenceResponse getRecordsForCollection(ClassMetadata containingClassMetadata, Entity containingEntity,
Property collectionProperty, FilterAndSortCriteria[] fascs, Integer startIndex, Integer maxIndex,
String idValueOverride, List<SectionCrumb> sectionCrumb) throws ServiceException;
/**
* Returns all records for all subcollections of the specified request and its primary key
*
* @param ppr
* @param containingEntity
* @return all Entity[] for all collections for the specified containingClass
* @throws ServiceException
*
* @see #getRecordsForCollection(ClassMetadata, String, Property)
*/
public Map<String, DynamicResultSet> getRecordsForAllSubCollections(PersistencePackageRequest ppr,
Entity containingEntity, List<SectionCrumb> sectionCrumb)
throws ServiceException;
/**
* Adds an item into the specified collection
*
* @param entityForm
* @param mainMetadata
* @param field
* @param parentEntity
* @return the persisted Entity
* @throws ServiceException
* @throws ClassNotFoundException
*/
public PersistenceResponse addSubCollectionEntity(EntityForm entityForm, ClassMetadata mainMetadata, Property field,
Entity parentEntity, List<SectionCrumb> sectionCrumb)
throws ServiceException, ClassNotFoundException;
/**
* Updates the specified collection item
*
* @param entityForm
* @param mainMetadata
* @param field
* @param parentEntity
* @param collectionItemId
* @return the persisted Entity
* @throws ServiceException
* @throws ClassNotFoundException
*/
public PersistenceResponse updateSubCollectionEntity(EntityForm entityForm, ClassMetadata mainMetadata, Property field,
Entity parentEntity, String collectionItemId, List<SectionCrumb> sectionCrumb)
throws ServiceException, ClassNotFoundException;
/**
* Removes the given item from the specified collection.
*
* @param mainMetadata
* @param field
* @param parentId
* @param itemId
* @param priorKey - only needed for Map type collections
* @throws ServiceException
*/
public PersistenceResponse removeSubCollectionEntity(ClassMetadata mainMetadata, Property field, Entity parentEntity, String itemId,
String priorKey, List<SectionCrumb> sectionCrumb)
throws ServiceException;
/**
* Returns the appropriate id to use for the given entity/metadata and prefix when dealing with collections. For
* example, on the Product screen, we display associated media. However, this media is actually owned by the Sku entity,
* which means its property name is "defaultSku.skuMedia". In this case, when wanting to look up media for this product,
* we cannot use the id of the product. Instead, we need to use the id of the sku.
*
* @param cmd
* @param entity
* @param propertyName
* @return the id to be used for this relationship
*/
public String getContextSpecificRelationshipId(ClassMetadata cmd, Entity entity, String propertyName);
/**
* Returns the name of the property in this ClassMetadata that has field type set to {@link SupportedFieldType#ID}
*
* @param cmd
* @return the id property name
* @throws ServiceException
*/
public String getIdProperty(ClassMetadata cmd) throws ServiceException;
}
| |
/**
*
*/
package fr.imie.tpjdbc.DAO;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.List;
import fr.imie.tpjdbc.AJDBC;
import fr.imie.tpjdbc.AbstractFactory;
import fr.imie.tpjdbc.DTO.PersonneDTO;
import fr.imie.tpjdbc.DTO.PromotionDTO;
/**
* @author imie
*
*/
public class PersonneDAO extends AJDBC implements IPersonneDAO {
private static PersonneDAO instance = null;
private Connection connection = null;
/**
*
*/
private PersonneDAO() {
// TODO Auto-generated constructor stub
}
public static synchronized PersonneDAO getInstance() {
if (instance == null) {
instance = new PersonneDAO();
}
return instance;
}
/*
* (non-Javadoc)
*
* @see fr.imie.tpjdbc.DAO.IPersonneDAO#findAll()
*/
@Override
public List<PersonneDTO> findAll() {
Statement statement = null;
ResultSet resultSet = null;
List<PersonneDTO> retour = new ArrayList<PersonneDTO>();
try {
statement = connection.createStatement();
resultSet = statement
.executeQuery("select id, nom, prenom, datenaiss, tel, promotion_id, password from personne");
while (resultSet.next()) {
retour.add(buildDTO(resultSet));
}
} catch (SQLException e) {
throw new RuntimeException("erreure applicative", e);
} finally {
try {
if (resultSet != null && !resultSet.isClosed()) {
resultSet.close();
}
if (statement != null && !statement.isClosed()) {
statement.close();
}
} catch (SQLException e) {
throw new RuntimeException("erreure applicative", e);
}
}
return retour;
}
@Override
public PersonneDTO findById(PersonneDTO dto) {
PreparedStatement preparedStatement = null;
ResultSet resultSet = null;
PersonneDTO retour = null;
try {
preparedStatement = connection
.prepareStatement("select nom, prenom, id, datenaiss,tel,promotion_id,password from personne where id=?");
preparedStatement.setInt(1, dto.getId());
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
retour = buildDTO(resultSet);
}
} catch (SQLException e) {
throw new RuntimeException("erreure applicative", e);
} finally {
try {
if (resultSet != null && !resultSet.isClosed()) {
resultSet.close();
}
if (preparedStatement != null && !preparedStatement.isClosed()) {
preparedStatement.close();
}
} catch (SQLException e) {
throw new RuntimeException("erreure applicative", e);
}
}
return retour;
}
private PersonneDTO buildDTO(ResultSet resultSet) throws SQLException {
PersonneDTO retour;
retour = new PersonneDTO();
retour.setId(resultSet.getInt("id"));
retour.setNom(resultSet.getString("nom"));
retour.setPrenom(resultSet.getString("prenom"));
retour.setDateNaiss(resultSet.getDate("datenaiss"));
retour.setTel(resultSet.getString("tel"));
retour.setPassword(resultSet.getString("password"));
PromotionDTO linkedPromotion = new PromotionDTO();
linkedPromotion.setId(resultSet.getInt("promotion_id"));
retour.setPromotionDTO(linkedPromotion);
return retour;
}
@Override
public PersonneDTO insert(PersonneDTO dto) {
PreparedStatement preparedStatement = null;
ResultSet resultSet = null;
PersonneDTO retour = null;
try {
preparedStatement = connection
.prepareStatement("insert into personne(nom, password, prenom, datenaiss,tel) values(?,?,?,?,?) returning nom, password, prenom, datenaiss,tel,id,promotion_id");
preparedStatement.setString(1, dto.getNom());
preparedStatement.setString(2, dto.getPassword());
preparedStatement.setString(3, dto.getPrenom());
Date dateNaiss = null;
if (dto.getDateNaiss() != null) {
dateNaiss = new Date(dto.getDateNaiss().getTime());
}
preparedStatement.setDate(4, dateNaiss);
preparedStatement.setString(5, dto.getTel());
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
retour = buildDTO(resultSet);
}
} catch (SQLException e) {
throw new RuntimeException("erreure applicative", e);
} finally {
try {
if (resultSet != null && !resultSet.isClosed()) {
resultSet.close();
}
if (preparedStatement != null && !preparedStatement.isClosed()) {
preparedStatement.close();
}
} catch (SQLException e) {
throw new RuntimeException("erreure applicative", e);
}
}
return retour;
}
@Override
public PersonneDTO update(PersonneDTO dto) {
if (dto.getNom().equals("test")) {
throw new RuntimeException("pour de rire");
}
PreparedStatement preparedStatement = null;
ResultSet resultSet = null;
PersonneDTO retour = null;
try {
preparedStatement = connection
.prepareStatement("update personne set nom=?, prenom=?, datenaiss=?,tel=?, promotion_id=?, password=? where id=? returning nom, prenom, datenaiss,tel,id,promotion_id,password");
preparedStatement.setString(1, dto.getNom());
preparedStatement.setString(2, dto.getPrenom());
Date dateNaiss = null;
if (dto.getDateNaiss() != null) {
dateNaiss = new Date(dto.getDateNaiss().getTime());
}
preparedStatement.setDate(3, dateNaiss);
preparedStatement.setString(4, dto.getTel());
if (dto.getPromotionDTO() != null) {
preparedStatement.setInt(5, dto.getPromotionDTO().getId());
} else {
preparedStatement.setNull(5, Types.INTEGER);
}
preparedStatement.setString(6, dto.getPassword());
preparedStatement.setInt(7, dto.getId());
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
retour = buildDTO(resultSet);
}
} catch (SQLException e) {
throw new RuntimeException("erreure applicative", e);
} finally {
try {
if (resultSet != null && !resultSet.isClosed()) {
resultSet.close();
}
if (preparedStatement != null && !preparedStatement.isClosed()) {
preparedStatement.close();
}
} catch (SQLException e) {
throw new RuntimeException("erreure applicative", e);
}
}
return retour;
}
@Override
public PersonneDTO update(PersonneDTO dto, Connection connectionCaller) {
return null;
}
@Override
public void delete(PersonneDTO dto) {
PreparedStatement preparedStatement = null;
ResultSet resultSet = null;
try {
preparedStatement = connection
.prepareStatement("delete from personne where id =?");
preparedStatement.setInt(1, dto.getId());
preparedStatement.executeUpdate();
} catch (SQLException e) {
throw new RuntimeException("erreure applicative", e);
} finally {
try {
if (resultSet != null && !resultSet.isClosed()) {
resultSet.close();
}
if (preparedStatement != null && !preparedStatement.isClosed()) {
preparedStatement.close();
}
} catch (SQLException e) {
throw new RuntimeException("erreure applicative", e);
}
}
}
@Override
public List<PersonneDTO> findByDTO(PersonneDTO findParameter) {
Statement statement = null;
ResultSet resultSet = null;
List<PersonneDTO> retour = new ArrayList<PersonneDTO>();
try {
statement = connection.createStatement();
String query = "select id, password, nom, prenom, datenaiss, tel, promotion_id from personne";
Boolean firstConstraint = true;
if (findParameter.getPromotionDTO() != null) {
query = query.concat(String.format(" %s promotion_id ='%s' ",
firstConstraint ? "where" : "and",
findParameter.getId()));
firstConstraint = false;
}
if (findParameter.getNom() != null) {
query = query.concat(String.format(" %s nom ='%s' ",
firstConstraint ? "where" : "and",
findParameter.getNom()));
firstConstraint = false;
}
if (findParameter.getPassword() != null) {
query = query.concat(String.format(" %s password ='%s' ",
firstConstraint ? "where" : "and",
findParameter.getPassword()));
firstConstraint = false;
}
resultSet = statement.executeQuery(query);
while (resultSet.next()) {
retour.add(buildDTO(resultSet));
}
} catch (SQLException e) {
throw new RuntimeException("erreure applicative", e);
} finally {
try {
if (resultSet != null && !resultSet.isClosed()) {
resultSet.close();
}
if (statement != null && !statement.isClosed()) {
statement.close();
}
} catch (SQLException e) {
throw new RuntimeException("erreure applicative", e);
}
}
return retour;
}
@Override
public void setConnection(Connection connection) {
this.connection = connection;
}
@Override
public Connection getConnection() {
return this.connection;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package org.apache.polygene.library.sql.generator.grammar.factories;
import org.apache.polygene.library.sql.generator.grammar.builders.query.ColumnsBuilder;
import org.apache.polygene.library.sql.generator.grammar.builders.query.FromBuilder;
import org.apache.polygene.library.sql.generator.grammar.builders.query.GroupByBuilder;
import org.apache.polygene.library.sql.generator.grammar.builders.query.OrderByBuilder;
import org.apache.polygene.library.sql.generator.grammar.builders.query.QueryBuilder;
import org.apache.polygene.library.sql.generator.grammar.builders.query.QuerySpecificationBuilder;
import org.apache.polygene.library.sql.generator.grammar.builders.query.SimpleQueryBuilder;
import org.apache.polygene.library.sql.generator.grammar.common.NonBooleanExpression;
import org.apache.polygene.library.sql.generator.grammar.common.SetQuantifier;
import org.apache.polygene.library.sql.generator.grammar.common.ValueExpression;
import org.apache.polygene.library.sql.generator.grammar.literals.SQLFunctionLiteral;
import org.apache.polygene.library.sql.generator.grammar.query.GroupByClause;
import org.apache.polygene.library.sql.generator.grammar.query.LimitSpecification;
import org.apache.polygene.library.sql.generator.grammar.query.OffsetSpecification;
import org.apache.polygene.library.sql.generator.grammar.query.Ordering;
import org.apache.polygene.library.sql.generator.grammar.query.OrdinaryGroupingSet;
import org.apache.polygene.library.sql.generator.grammar.query.QueryExpression;
import org.apache.polygene.library.sql.generator.grammar.query.QueryExpressionBody;
import org.apache.polygene.library.sql.generator.grammar.query.QueryExpressionBody.EmptyQueryExpressionBody;
import org.apache.polygene.library.sql.generator.grammar.query.QuerySpecification;
import org.apache.polygene.library.sql.generator.grammar.query.RowDefinition;
import org.apache.polygene.library.sql.generator.grammar.query.RowSubQuery;
import org.apache.polygene.library.sql.generator.grammar.query.RowValueConstructor;
import org.apache.polygene.library.sql.generator.grammar.query.SortSpecification;
import org.apache.polygene.library.sql.generator.grammar.query.TableValueConstructor;
import org.apache.polygene.library.sql.generator.vendor.SQLVendor;
/**
* A factory, which creates builders and syntax elements for SQL queries ({@code SELECT} statements). This factory may
* be obtained from {@link SQLVendor}.
*
* @author Stanislav Muhametsin
* @see SQLVendor
* @see QueryExpression
* @see QuerySpecification
*/
public interface QueryFactory
{
/**
* Creates new query, which has the specified body as an actual query.
*
* @param body The actual query to use.
* @return The new {@link QueryExpression}
*/
QueryExpression createQuery( QueryExpressionBody body );
/**
* Creates a builder to build query specifications ({@code SELECT} expressions).
*
* @return The new {@link QuerySpecificationBuilder}.
*/
QuerySpecificationBuilder querySpecificationBuilder();
/**
* <p>
* Creates a builder for the columns in {@code SELECT} expressions.
* </p>
* <p>
* Calling this method is equivalent to calling {@link #columnsBuilder(SetQuantifier)} and passing
* {@link SetQuantifier#ALL} as argument.
* </p>
*
* @return The new {@link ColumnsBuilder}.
*/
ColumnsBuilder columnsBuilder();
/**
* Creates a builder for columns in {@code SELECT} expressions, which has specified set quantifier initially.
*
* @param setQuantifier The set quantifier to use.
* @return The new {@link ColumnsBuilder}.
*/
ColumnsBuilder columnsBuilder( SetQuantifier setQuantifier );
/**
* <p>
* Creates a builder to build queries with capability for {@code UNION}, {@code INTERSECT}, and {@code EXCEPT} set
* operations.
* </p>
* <p>
* Calling this method is equivalent in calling {@link #queryBuilder(QueryExpressionBody)} and passing
* {@link EmptyQueryExpressionBody} as argument.
*
* @return The new {@link QueryBuilder}.
*/
QueryBuilder queryBuilder();
/**
* Creates a builder to build queries with capability for {@code UNION}, {@code INTERSECT}, and {@code EXCEPT} set
* operations.
*
* @param query The initial query for builder.
* @return The new {@link QueryBuilder}.
*/
QueryBuilder queryBuilder( QueryExpressionBody query );
/**
* Creates a builder for {@code GROUP BY} clause.
*
* @return The new {@link GroupByBuilder}.
*/
GroupByBuilder groupByBuilder();
/**
* Creates a builder for {@code FROM} clause.
*
* @return The new {@link FromBuilder}.
*/
FromBuilder fromBuilder();
/**
* Creates a new grouping element, which has some expressions as grouping columns.
*
* @param expressions The expressions to use.
* @return The new {@link OrdinaryGroupingSet}.
* @see GroupByClause
*/
OrdinaryGroupingSet groupingElement( NonBooleanExpression... expressions );
/**
* Creates a new sort specification for {@code ORDER BY} clause.
*
* @param expression The expression for column.
* @param ordering The ordering to use.
* @return The new {@link SortSpecification}.
*/
SortSpecification sortSpec( ValueExpression expression, Ordering ordering );
/**
* Creates a builder for {@code ORDER BY} clause.
*
* @return The new {@link OrderByBuilder}.
*/
OrderByBuilder orderByBuilder();
/**
* Creates a builder for simple queries.
*
* @return The new {@link SimpleQueryBuilder}.
*/
SimpleQueryBuilder simpleQueryBuilder();
/**
* Creates a new {@code VALUES} expression in query.
*
* @param rows The rows for {@code VALUES} expression.
* @return The new {@link TableValueConstructor}.
* @see RowValueConstructor
* @see RowSubQuery
* @see RowDefinition
*/
TableValueConstructor values( RowValueConstructor... rows );
/**
* Creates a new subquery for a row for {@code VALUES} expression in query.
*
* @param subQuery The query to return the row.
* @return The new {@link RowSubQuery}.
*/
RowSubQuery rowSubQuery( QueryExpression subQuery );
/**
* Creates a new row for {@code VALUES} expression in query.
*
* @param elements The elements for the row.
* @return The new {@link RowDefinition}.
*/
RowDefinition row( ValueExpression... elements );
/**
* Returns a query for calling a SQL function with schema. The query is
* {@code SELECT * FROM schemaName.functionName(params...)}.
*
* @param schemaName The name of the schema where SQL function resides.
* @param function The SQL function to call.
* @return A query returning the results of calling SQL function.
*/
QueryExpression callFunction( String schemaName, SQLFunctionLiteral function );
/**
* Returns a query for calling a SQL function without a schema. The query is
* {@code SELECT * FROM functionName(params...)}. Calling this method is equivalent to calling
* {@link #callFunction(String, SQLFunctionLiteral)} and passing {@code null} as first argument.
*
* @param function The function to call.
* @return A query returning the results of calling SQL function.
*/
QueryExpression callFunction( SQLFunctionLiteral function );
/**
* Creates a new {@code OFFSET <n> ROWS} syntax element for query.
*
* @param offset The offset amount.
* @return A new {@code OFFSET <n> ROWS} syntax element.
*/
OffsetSpecification offset( NonBooleanExpression offset );
/**
* Creates a new {@code FETCH FIRST <n> ROWS ONLY} syntax element for query.
*
* @param count The limit amount.
* @return A new {@code FETCH FIRST <n> ROWS ONLY} syntax element.
*/
LimitSpecification limit( NonBooleanExpression count );
}
| |
/*
* Copyright 2015 Torridity.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tor.tribes.ui.views;
import de.tor.tribes.control.GenericManagerListener;
import de.tor.tribes.control.ManageableType;
import de.tor.tribes.io.DataHolder;
import de.tor.tribes.io.UnitHolder;
import de.tor.tribes.types.Attack;
import de.tor.tribes.types.StandardAttack;
import de.tor.tribes.types.ext.Village;
import de.tor.tribes.ui.editors.DateSpinEditor;
import de.tor.tribes.ui.editors.NoteIconCellEditor;
import de.tor.tribes.ui.editors.UnitCellEditor;
import de.tor.tribes.ui.editors.VillageCellEditor;
import de.tor.tribes.ui.models.DoItYourselfAttackTableModel;
import de.tor.tribes.ui.renderer.*;
import de.tor.tribes.ui.windows.AbstractDSWorkbenchFrame;
import de.tor.tribes.util.Constants;
import de.tor.tribes.util.GlobalOptions;
import de.tor.tribes.util.ImageUtils;
import de.tor.tribes.util.JOptionPaneHelper;
import de.tor.tribes.util.PropertyHelper;
import de.tor.tribes.util.attack.AttackManager;
import de.tor.tribes.util.attack.StandardAttackManager;
import de.tor.tribes.util.bb.AttackListFormatter;
import de.tor.tribes.util.translation.TranslationManager;
import de.tor.tribes.util.translation.Translator;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.HeadlessException;
import java.awt.Point;
import java.awt.Toolkit;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.StringSelection;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.geom.GeneralPath;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.StringTokenizer;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import org.apache.commons.configuration2.Configuration;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jdesktop.swingx.decorator.CompoundHighlighter;
import org.jdesktop.swingx.decorator.HighlightPredicate;
import org.jdesktop.swingx.decorator.HighlighterFactory;
import org.jdesktop.swingx.decorator.PainterHighlighter;
import org.jdesktop.swingx.painter.AbstractLayoutPainter.HorizontalAlignment;
import org.jdesktop.swingx.painter.AbstractLayoutPainter.VerticalAlignment;
import org.jdesktop.swingx.painter.ImagePainter;
import org.jdesktop.swingx.painter.MattePainter;
import org.jdesktop.swingx.table.TableColumnExt;
/**
* @author Torridity
*/
public class DSWorkbenchDoItYourselfAttackPlaner extends AbstractDSWorkbenchFrame implements GenericManagerListener, ActionListener, ListSelectionListener {
@Override
public void valueChanged(ListSelectionEvent e) {
if (e.getValueIsAdjusting()) {
int selectionCount = jAttackTable.getSelectedRowCount();
if (selectionCount != 0) {
showInfo(selectionCount + ((selectionCount == 1) ? trans.get("Angriffgewahlt") : trans.get("Angriffegewahlt")));
}
}
}
public enum TRANSFER_TYPE {
CUT_TO_INTERNAL_CLIPBOARD, COPY_TO_INTERNAL_CLIPBOARD, FROM_INTERNAL_CLIPBOARD, BB_TO_CLIPBOARD
}
@Override
public void dataChangedEvent() {
dataChangedEvent(null);
}
@Override
public void dataChangedEvent(String pGroup) {
((DoItYourselfAttackTableModel) jAttackTable.getModel()).fireTableDataChanged();
}
@Override
public void actionPerformed(ActionEvent e) {
if (e.getActionCommand() != null) {
if (e.getActionCommand().equals("Copy")) {
transferSelection(TRANSFER_TYPE.COPY_TO_INTERNAL_CLIPBOARD);
} else if (e.getActionCommand().equals("Cut")) {
transferSelection(TRANSFER_TYPE.CUT_TO_INTERNAL_CLIPBOARD);
} else if (e.getActionCommand().equals("Paste")) {
transferSelection(TRANSFER_TYPE.FROM_INTERNAL_CLIPBOARD);
} else if (e.getActionCommand().equals("Delete")) {
deleteSelection(true);
} else if (e.getActionCommand().equals("BBCopy")) {
transferSelection(TRANSFER_TYPE.BB_TO_CLIPBOARD);
}
}
}
private static Translator trans = TranslationManager.getTranslator("ui.views.DoItYourselflAttackPlaner");
private static final Logger logger = LogManager.getLogger("DoItYourselflAttackPlaner");
private static DSWorkbenchDoItYourselfAttackPlaner SINGLETON = null;
/**
* Creates new form DSWorkbenchDoItYourselflAttackPlaner
*/
DSWorkbenchDoItYourselfAttackPlaner() {
initComponents();
jAttackTable.setModel(new DoItYourselfAttackTableModel());
jAttackTable.getSelectionModel().addListSelectionListener(DSWorkbenchDoItYourselfAttackPlaner.this);
jArriveTime.setDate(new Date());
jNewArriveSpinner.setDate(new Date());
capabilityInfoPanel1.addActionListener(this);
KeyStroke copy = KeyStroke.getKeyStroke(KeyEvent.VK_C, ActionEvent.CTRL_MASK, false);
KeyStroke bbCopy = KeyStroke.getKeyStroke(KeyEvent.VK_B, ActionEvent.CTRL_MASK, false);
KeyStroke paste = KeyStroke.getKeyStroke(KeyEvent.VK_V, ActionEvent.CTRL_MASK, false);
KeyStroke cut = KeyStroke.getKeyStroke(KeyEvent.VK_X, ActionEvent.CTRL_MASK, false);
KeyStroke delete = KeyStroke.getKeyStroke(KeyEvent.VK_DELETE, 0, false);
jAttackTable.registerKeyboardAction(DSWorkbenchDoItYourselfAttackPlaner.this, "Copy", copy, JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
jAttackTable.registerKeyboardAction(DSWorkbenchDoItYourselfAttackPlaner.this, "Cut", cut, JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
jAttackTable.registerKeyboardAction(DSWorkbenchDoItYourselfAttackPlaner.this, "Paste", paste, JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
jAttackTable.registerKeyboardAction(DSWorkbenchDoItYourselfAttackPlaner.this, "Delete", delete, JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
jAttackTable.registerKeyboardAction(DSWorkbenchDoItYourselfAttackPlaner.this, "BBCopy", bbCopy, JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
jAttackTable.getActionMap().put("find", new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
//no find
}
});
DoItYourselfCountdownThread thread = new DoItYourselfCountdownThread();
thread.start();
// <editor-fold defaultstate="collapsed" desc=" Init HelpSystem ">
if (!Constants.DEBUG) {
GlobalOptions.getHelpBroker().enableHelpKey(getRootPane(), "pages.manual_attack_planer", GlobalOptions.getHelpBroker().getHelpSet());
}
// </editor-fold>
pack();
}
@Override
public void toBack() {
jAlwaysOnTopBox.setSelected(false);
fireAttackFrameOnTopEvent(null);
super.toBack();
}
@Override
public void storeCustomProperties(Configuration pConfig) {
pConfig.setProperty(getPropertyPrefix() + ".alwaysOnTop", jAlwaysOnTopBox.isSelected());
PropertyHelper.storeTableProperties(jAttackTable, pConfig, getPropertyPrefix());
}
@Override
public void restoreCustomProperties(Configuration pConfig) {
try {
jAlwaysOnTopBox.setSelected(pConfig.getBoolean(getPropertyPrefix() + ".alwaysOnTop"));
} catch (Exception ignored) {
}
setAlwaysOnTop(jAlwaysOnTopBox.isSelected());
PropertyHelper.restoreTableProperties(jAttackTable, pConfig, getPropertyPrefix());
}
@Override
public String getPropertyPrefix() {
return "manual.attack.planer.view";
}
public static synchronized DSWorkbenchDoItYourselfAttackPlaner getSingleton() {
if (SINGLETON == null) {
SINGLETON = new DSWorkbenchDoItYourselfAttackPlaner();
}
return SINGLETON;
}
@Override
public void resetView() {
AttackManager.getSingleton().addManagerListener(this);
//setup renderer and general view
// ((DoItYourselfAttackTableModel) jAttackTable.getModel()).clear();
HighlightPredicate.ColumnHighlightPredicate colu = new HighlightPredicate.ColumnHighlightPredicate(0, 1, 2, 3, 6);
jAttackTable.setRowHeight(24);
jAttackTable.getTableHeader().setDefaultRenderer(new DefaultTableHeaderRenderer());
jAttackTable.setHighlighters(new CompoundHighlighter(colu, HighlighterFactory.createAlternateStriping(Constants.DS_ROW_A, Constants.DS_ROW_B)));
jAttackTable.setColumnControlVisible(true);
jAttackTable.setDefaultEditor(UnitHolder.class, new UnitCellEditor());
jAttackTable.setDefaultEditor(Village.class, new VillageCellEditor());
jAttackTable.setDefaultRenderer(UnitHolder.class, new UnitCellRenderer());
jAttackTable.setDefaultRenderer(Integer.class, new NoteIconCellRenderer(NoteIconCellRenderer.ICON_TYPE.NOTE));
jAttackTable.setDefaultRenderer(Date.class, new ColoredDateCellRenderer());
jAttackTable.setDefaultRenderer(Long.class, new ColoredCoutdownCellRenderer());
jAttackTable.setDefaultEditor(Date.class, new DateSpinEditor());
jAttackTable.setDefaultEditor(Integer.class, new NoteIconCellEditor(NoteIconCellEditor.ICON_TYPE.NOTE));
BufferedImage back = ImageUtils.createCompatibleBufferedImage(5, 5, BufferedImage.BITMASK);
Graphics2D g = back.createGraphics();
GeneralPath p = new GeneralPath();
p.moveTo(0, 0);
p.lineTo(5, 0);
p.lineTo(5, 5);
p.closePath();
g.setColor(Color.GREEN.darker());
g.fill(p);
g.dispose();
jAttackTable.addHighlighter(new PainterHighlighter(HighlightPredicate.EDITABLE, new ImagePainter(back, HorizontalAlignment.RIGHT, VerticalAlignment.TOP)));
DefaultComboBoxModel model = new DefaultComboBoxModel();
DefaultComboBoxModel model2 = new DefaultComboBoxModel();
for (UnitHolder unit : DataHolder.getSingleton().getUnits()) {
model.addElement(unit);
model2.addElement(unit);
}
jUnitBox.setModel(model);
jUnitComboBox.setModel(model2);
jUnitBox.setSelectedItem(DataHolder.getSingleton().getUnitByPlainName("ram"));
jUnitComboBox.setSelectedItem(DataHolder.getSingleton().getUnitByPlainName("ram"));
jUnitBox.setRenderer(new UnitListCellRenderer());
jAttackTypeComboBox.setRenderer(new StandardAttackListCellRenderer());
DefaultComboBoxModel typeModel = new DefaultComboBoxModel();
for (ManageableType t : StandardAttackManager.getSingleton().getAllElements()) {
StandardAttack a = (StandardAttack) t;
typeModel.addElement(a);
}
jAttackTypeComboBox.setModel(typeModel);
jUnitComboBox.setRenderer(new UnitListCellRenderer());
jSourceVillage.setValue(new Point(500, 500));
jTargetVillage.setValue(new Point(500, 500));
SwingUtilities.invokeLater(() -> {
jSourceVillage.updateUI();
jTargetVillage.updateUI();
});
}
protected void updateCountdown() {
TableColumnExt col = jAttackTable.getColumnExt(trans.getRaw("ui.models.DoItYourselfAttackTableModel.Verbleibend"));
if (col.isVisible()) {
int startX = 0;
for (int i = 0; i < jAttackTable.getColumnCount(); i++) {
if (jAttackTable.getColumnExt(i).equals(col)) {
break;
}
startX += (jAttackTable.getColumnExt(i).isVisible()) ? jAttackTable.getColumnExt(i).getWidth() : 0;
}
jAttackTable.repaint(startX, 0, startX + col.getWidth(), jAttackTable.getHeight());
}
}
public void showSuccess(String pMessage) {
infoPanel.setCollapsed(false);
jXLabel1.setBackgroundPainter(new MattePainter(Color.GREEN));
jXLabel1.setForeground(Color.BLACK);
jXLabel1.setText(pMessage);
}
public void showInfo(String pMessage) {
infoPanel.setCollapsed(false);
jXLabel1.setBackgroundPainter(new MattePainter(getBackground()));
jXLabel1.setForeground(Color.BLACK);
jXLabel1.setText(pMessage);
}
public void showError(String pMessage) {
infoPanel.setCollapsed(false);
jXLabel1.setBackgroundPainter(new MattePainter(Color.RED));
jXLabel1.setForeground(Color.WHITE);
jXLabel1.setText(pMessage);
}
/**
* This method is called from within the constructor to initialize the form. WARNING: Do NOT modify this code. The content of this
* method is always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
java.awt.GridBagConstraints gridBagConstraints;
jAlwaysOnTopBox = new javax.swing.JCheckBox();
jPanel4 = new javax.swing.JPanel();
jPanel3 = new javax.swing.JPanel();
jLabel7 = new javax.swing.JLabel();
jAdeptTimeButton = new javax.swing.JButton();
jLabel8 = new javax.swing.JLabel();
jUnitComboBox = new javax.swing.JComboBox();
jAdeptUnitButton = new javax.swing.JButton();
jLabel9 = new javax.swing.JLabel();
jAttackTypeComboBox = new javax.swing.JComboBox();
jAdeptTypeButton = new javax.swing.JButton();
jNewArriveSpinner = new de.tor.tribes.ui.components.DateTimeField();
jPanel2 = new javax.swing.JPanel();
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
jLabel4 = new javax.swing.JLabel();
jUnitBox = new javax.swing.JComboBox();
jButton1 = new javax.swing.JButton();
jArriveTime = new de.tor.tribes.ui.components.DateTimeField();
jSourceVillage = new de.tor.tribes.ui.components.CoordinateSpinner();
jTargetVillage = new de.tor.tribes.ui.components.CoordinateSpinner();
jPanel5 = new javax.swing.JPanel();
jScrollPane2 = new javax.swing.JScrollPane();
jAttackTable = new org.jdesktop.swingx.JXTable();
infoPanel = new org.jdesktop.swingx.JXCollapsiblePane();
jXLabel1 = new org.jdesktop.swingx.JXLabel();
capabilityInfoPanel1 = new de.tor.tribes.ui.components.CapabilityInfoPanel();
setTitle(trans.get("ManuellerAngriffsplaner"));
getContentPane().setLayout(new java.awt.GridBagLayout());
jAlwaysOnTopBox.setText(trans.get("ImmerimVordergrund"));
jAlwaysOnTopBox.addItemListener(new java.awt.event.ItemListener() {
public void itemStateChanged(java.awt.event.ItemEvent evt) {
fireAttackFrameOnTopEvent(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
getContentPane().add(jAlwaysOnTopBox, gridBagConstraints);
jPanel4.setLayout(new java.awt.GridBagLayout());
jPanel3.setBorder(javax.swing.BorderFactory.createTitledBorder(trans.get("Anpassen")));
jPanel3.setOpaque(false);
jPanel3.setPreferredSize(new java.awt.Dimension(350, 152));
jPanel3.setLayout(new java.awt.GridBagLayout());
jLabel7.setText(trans.get("Ankunftszeit"));
jLabel7.setMaximumSize(new java.awt.Dimension(80, 25));
jLabel7.setMinimumSize(new java.awt.Dimension(80, 25));
jLabel7.setPreferredSize(new java.awt.Dimension(80, 25));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel3.add(jLabel7, gridBagConstraints);
jAdeptTimeButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/res/checkbox.png"))); // NOI18N
jAdeptTimeButton.setToolTipText(trans.get("Ankunfszeit_markierten"));
jAdeptTimeButton.setMaximumSize(new java.awt.Dimension(25, 25));
jAdeptTimeButton.setMinimumSize(new java.awt.Dimension(25, 25));
jAdeptTimeButton.setPreferredSize(new java.awt.Dimension(25, 25));
jAdeptTimeButton.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
fireAdeptEvent(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 0;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel3.add(jAdeptTimeButton, gridBagConstraints);
jLabel8.setText(trans.get("Einheit"));
jLabel8.setMaximumSize(new java.awt.Dimension(80, 25));
jLabel8.setMinimumSize(new java.awt.Dimension(80, 25));
jLabel8.setPreferredSize(new java.awt.Dimension(80, 25));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel3.add(jLabel8, gridBagConstraints);
jUnitComboBox.setMinimumSize(new java.awt.Dimension(23, 25));
jUnitComboBox.setPreferredSize(new java.awt.Dimension(28, 25));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel3.add(jUnitComboBox, gridBagConstraints);
jAdeptUnitButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/res/checkbox.png"))); // NOI18N
jAdeptUnitButton.setToolTipText(trans.get("Einheit_markierten"));
jAdeptUnitButton.setMaximumSize(new java.awt.Dimension(25, 25));
jAdeptUnitButton.setMinimumSize(new java.awt.Dimension(25, 25));
jAdeptUnitButton.setPreferredSize(new java.awt.Dimension(25, 25));
jAdeptUnitButton.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
fireAdeptEvent(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 1;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel3.add(jAdeptUnitButton, gridBagConstraints);
jLabel9.setText(trans.get("Angriffstyp"));
jLabel9.setMaximumSize(new java.awt.Dimension(80, 25));
jLabel9.setMinimumSize(new java.awt.Dimension(80, 25));
jLabel9.setPreferredSize(new java.awt.Dimension(80, 25));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel3.add(jLabel9, gridBagConstraints);
jAttackTypeComboBox.setMinimumSize(new java.awt.Dimension(23, 25));
jAttackTypeComboBox.setPreferredSize(new java.awt.Dimension(28, 25));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel3.add(jAttackTypeComboBox, gridBagConstraints);
jAdeptTypeButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/res/checkbox.png"))); // NOI18N
jAdeptTypeButton.setToolTipText(trans.get("Angriffstyp_markierten"));
jAdeptTypeButton.setMaximumSize(new java.awt.Dimension(25, 25));
jAdeptTypeButton.setMinimumSize(new java.awt.Dimension(25, 25));
jAdeptTypeButton.setPreferredSize(new java.awt.Dimension(25, 25));
jAdeptTypeButton.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
fireAdeptEvent(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 2;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel3.add(jAdeptTypeButton, gridBagConstraints);
jNewArriveSpinner.setMinimumSize(new java.awt.Dimension(64, 25));
jNewArriveSpinner.setPreferredSize(new java.awt.Dimension(258, 25));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel3.add(jNewArriveSpinner, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 0.7;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel4.add(jPanel3, gridBagConstraints);
jPanel2.setBorder(javax.swing.BorderFactory.createTitledBorder(trans.get("NeuerAngriff")));
jPanel2.setOpaque(false);
jPanel2.setLayout(new java.awt.GridBagLayout());
jLabel1.setText(trans.get("Herkunft"));
jLabel1.setMaximumSize(new java.awt.Dimension(80, 25));
jLabel1.setMinimumSize(new java.awt.Dimension(80, 25));
jLabel1.setPreferredSize(new java.awt.Dimension(80, 25));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel2.add(jLabel1, gridBagConstraints);
jLabel2.setText(trans.get("Ziel"));
jLabel2.setMaximumSize(new java.awt.Dimension(80, 25));
jLabel2.setMinimumSize(new java.awt.Dimension(80, 25));
jLabel2.setPreferredSize(new java.awt.Dimension(80, 25));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel2.add(jLabel2, gridBagConstraints);
jLabel3.setText(trans.get("Einheit"));
jLabel3.setMaximumSize(new java.awt.Dimension(80, 25));
jLabel3.setMinimumSize(new java.awt.Dimension(80, 25));
jLabel3.setPreferredSize(new java.awt.Dimension(80, 25));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 3;
gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel2.add(jLabel3, gridBagConstraints);
jLabel4.setText(trans.get("Ankunft"));
jLabel4.setMaximumSize(new java.awt.Dimension(80, 25));
jLabel4.setMinimumSize(new java.awt.Dimension(80, 25));
jLabel4.setPreferredSize(new java.awt.Dimension(80, 25));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel2.add(jLabel4, gridBagConstraints);
jUnitBox.setToolTipText(trans.get("LangsamsteEinheit"));
jUnitBox.setMinimumSize(new java.awt.Dimension(23, 25));
jUnitBox.setPreferredSize(new java.awt.Dimension(28, 25));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 3;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel2.add(jUnitBox, gridBagConstraints);
jButton1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/res/add.gif"))); // NOI18N
jButton1.setText(trans.get("Add"));
jButton1.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
fireAddAttackEvent(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 4;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel2.add(jButton1, gridBagConstraints);
jArriveTime.setMinimumSize(new java.awt.Dimension(64, 25));
jArriveTime.setPreferredSize(new java.awt.Dimension(258, 25));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel2.add(jArriveTime, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel2.add(jSourceVillage, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel2.add(jTargetVillage, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 0.5;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel4.add(jPanel2, gridBagConstraints);
jPanel5.setLayout(new java.awt.BorderLayout());
jAttackTable.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null}
},
new String [] {
"Title 1", "Title 2", "Title 3", "Title 4"
}
));
jScrollPane2.setViewportView(jAttackTable);
jPanel5.add(jScrollPane2, java.awt.BorderLayout.CENTER);
infoPanel.setCollapsed(true);
infoPanel.setInheritAlpha(false);
jXLabel1.setText(trans.get("KeineMeldung"));
jXLabel1.setOpaque(true);
jXLabel1.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseReleased(java.awt.event.MouseEvent evt) {
jXLabel1fireHideInfoEvent(evt);
}
});
infoPanel.add(jXLabel1, java.awt.BorderLayout.CENTER);
jPanel5.add(infoPanel, java.awt.BorderLayout.SOUTH);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
jPanel4.add(jPanel5, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
getContentPane().add(jPanel4, gridBagConstraints);
capabilityInfoPanel1.setSearchable(false);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5);
getContentPane().add(capabilityInfoPanel1, gridBagConstraints);
pack();
}// </editor-fold>//GEN-END:initComponents
private void fireAttackFrameOnTopEvent(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_fireAttackFrameOnTopEvent
setAlwaysOnTop(!isAlwaysOnTop());
}//GEN-LAST:event_fireAttackFrameOnTopEvent
private void fireAddAttackEvent(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_fireAddAttackEvent
Village source = jSourceVillage.getVillage();
if (source == null) {
showError(trans.get("KeingueltigesHerkunftsdorf"));
return;
}
Village target = jTargetVillage.getVillage();
if (target == null) {
showError(trans.get("KeingueltigesZieldorf"));
return;
}
Date arrive = jArriveTime.getSelectedDate();
if (arrive.getTime() < System.currentTimeMillis()) {
showError(trans.get("AnkunftszeitVergangenheit"));
return;
}
UnitHolder unit = (UnitHolder) jUnitBox.getSelectedItem();
int type = Attack.NO_TYPE;
if (unit.equals(DataHolder.getSingleton().getUnitByPlainName("snob"))) {
type = Attack.SNOB_TYPE;
} else if (unit.equals(DataHolder.getSingleton().getUnitByPlainName("ram"))) {
type = Attack.CLEAN_TYPE;
}
AttackManager.getSingleton().addAttack(source, target, unit, arrive, AttackManager.MANUAL_ATTACK_PLAN);
showSuccess(trans.get("AngriffHinzufuegen"));
}//GEN-LAST:event_fireAddAttackEvent
private void fireAdeptEvent(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_fireAdeptEvent
int[] rows = jAttackTable.getSelectedRows();
if (rows == null || rows.length == 0) {
showInfo(trans.get("KeineAngriffegewaehlt"));
return;
}
if (evt.getSource() == jAdeptTimeButton) {
Date newArrive = jNewArriveSpinner.getSelectedDate();
if (newArrive.getTime() < System.currentTimeMillis()) {
showError(trans.get("AnkunftzeitdarfnichtVergangenheit"));
return;
}
for (int r = rows.length - 1; r >= 0; r--) {
int row = jAttackTable.convertRowIndexToModel(rows[r]);
Attack a = (Attack) AttackManager.getSingleton().getDoItYourselfAttacks().get(row);
a.setArriveTime(newArrive);
}
} else if (evt.getSource() == jAdeptUnitButton) {
UnitHolder newUnit = (UnitHolder) jUnitComboBox.getSelectedItem();
if (newUnit == null) {
showError(trans.get("keineeinheit"));
return;
}
for (int r = rows.length - 1; r >= 0; r--) {
int row = jAttackTable.convertRowIndexToModel(rows[r]);
Attack a = (Attack) AttackManager.getSingleton().getDoItYourselfAttacks().get(row);
a.setUnit(newUnit);
}
} else if (evt.getSource() == jAdeptTypeButton) {
StandardAttack newType = (StandardAttack) jAttackTypeComboBox.getSelectedItem();
if (newType == null) {
showError(trans.get("KeinAngriffstyp"));
return;
}
for (int r = rows.length - 1; r >= 0; r--) {
int row = jAttackTable.convertRowIndexToModel(rows[r]);
Attack a = (Attack) AttackManager.getSingleton().getDoItYourselfAttacks().get(row);
a.setType(newType.getIcon());
a.setTroopsByType();
}
}
AttackManager.getSingleton().revalidate(true);
}//GEN-LAST:event_fireAdeptEvent
private void jXLabel1fireHideInfoEvent(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jXLabel1fireHideInfoEvent
infoPanel.setCollapsed(true);
}//GEN-LAST:event_jXLabel1fireHideInfoEvent
public void transferSelection(TRANSFER_TYPE pType) {
switch (pType) {
case COPY_TO_INTERNAL_CLIPBOARD:
copyToInternalClipboard();
break;
case CUT_TO_INTERNAL_CLIPBOARD:
cutToInternalClipboard();
break;
case FROM_INTERNAL_CLIPBOARD:
copyFromInternalClipboard();
break;
case BB_TO_CLIPBOARD:
copyBBToExternalClipboardEvent();
break;
}
}
@Override
public void fireVillagesDraggedEvent(List<Village> pVillages, Point pDropLocation) {
}
public boolean deleteSelection(boolean pAsk) {
List<Attack> selectedAttacks = getSelectedAttacks();
if (pAsk) {
String message = ((selectedAttacks.size() == 1) ? trans.get("Angriff") : (selectedAttacks.size() + trans.get("Angriffe"))) + trans.get("wirklichloeschen");
if (selectedAttacks.isEmpty() || JOptionPaneHelper.showQuestionConfirmBox(this, message, trans.get("Angriffeloeschen"), trans.get("Nein"), trans.get("Ja")) != JOptionPane.YES_OPTION) {
return false;
}
}
jAttackTable.editingCanceled(new ChangeEvent(this));
AttackManager.getSingleton().removeElements(AttackManager.MANUAL_ATTACK_PLAN, selectedAttacks);
((DoItYourselfAttackTableModel) jAttackTable.getModel()).fireTableDataChanged();
showSuccess(selectedAttacks.size() + trans.get("Angriffegeloescht"));
return true;
}
private boolean copyToInternalClipboard() {
List<Attack> selection = getSelectedAttacks();
StringBuilder b = new StringBuilder();
int cnt = 0;
for (Attack a : selection) {
b.append(a.toInternalRepresentation()).append("\n");
cnt++;
}
try {
Toolkit.getDefaultToolkit().getSystemClipboard().setContents(new StringSelection(b.toString()), null);
showSuccess(cnt + ((cnt == 1) ? trans.get("Angriffkopiert") : trans.get("Angriffekopiert")));
return true;
} catch (HeadlessException hex) {
showError(trans.get("Error_copie"));
return false;
}
}
private void cutToInternalClipboard() {
int size = getSelectedAttacks().size();
if (copyToInternalClipboard() && deleteSelection(false)) {
showSuccess(size + ((size == 1) ? trans.get("Angriffausgeschnitten") : trans.get("Angriffeausgeschnitten")));
} else {
showError(trans.get("Error_cut_attack"));
}
}
private void copyFromInternalClipboard() {
try {
String data = (String) Toolkit.getDefaultToolkit().getSystemClipboard().getContents(null).getTransferData(DataFlavor.stringFlavor);
String[] lines = data.split("\n");
int cnt = 0;
AttackManager.getSingleton().invalidate();
for (String line : lines) {
Attack a = Attack.fromInternalRepresentation(line);
if (a != null) {
AttackManager.getSingleton().addManagedElement(AttackManager.MANUAL_ATTACK_PLAN, a);
cnt++;
}
}
showSuccess(cnt + ((cnt == 1) ? trans.get("Angriffeingefuegt") : trans.get("Angriffeeingefuegt")));
} catch (UnsupportedFlavorException | IOException ufe) {
logger.error("Failed to copy attacks from internal clipboard", ufe);
showError(trans.get("Error_paste_attack"));
}
((DoItYourselfAttackTableModel) jAttackTable.getModel()).fireTableDataChanged();
AttackManager.getSingleton().revalidate();
}
private void copyBBToExternalClipboardEvent() {
try {
List<Attack> attacks = getSelectedAttacks();
if (attacks.isEmpty()) {
showInfo("no_reports_insert");
return;
}
String b = AttackListFormatter.AttackListToBBCodes(this, attacks, trans.get("Angriffsplan"));
StringTokenizer t = new StringTokenizer(b, "[");
int cnt = t.countTokens();
if (cnt > 1000) {
if (JOptionPaneHelper.showQuestionConfirmBox(this, trans.get("BB_report_tausend"), trans.get("tomutch_BBCode"), trans.get("Nein"), trans.get("Ja")) == JOptionPane.NO_OPTION) {
return;
}
}
Toolkit.getDefaultToolkit().getSystemClipboard().setContents(new StringSelection(b), null);
String result = trans.get("Daten_Zwischenablage");
showSuccess(result);
} catch (Exception e) {
logger.error("Failed to copy data to clipboard", e);
String result = trans.get("Error_copy");
showError(result);
}
}
private List<Attack> getSelectedAttacks() {
final List<Attack> selectedAttacks = new LinkedList<>();
int[] selectedRows = jAttackTable.getSelectedRows();
if (selectedRows != null && selectedRows.length < 1) {
return selectedAttacks;
}
for (Integer selectedRow : selectedRows) {
Attack a = (Attack) AttackManager.getSingleton().getAllElements(AttackManager.MANUAL_ATTACK_PLAN).get(jAttackTable.convertRowIndexToModel(selectedRow));
if (a != null) {
selectedAttacks.add(a);
}
}
return selectedAttacks;
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private de.tor.tribes.ui.components.CapabilityInfoPanel capabilityInfoPanel1;
private org.jdesktop.swingx.JXCollapsiblePane infoPanel;
private javax.swing.JButton jAdeptTimeButton;
private javax.swing.JButton jAdeptTypeButton;
private javax.swing.JButton jAdeptUnitButton;
private javax.swing.JCheckBox jAlwaysOnTopBox;
private de.tor.tribes.ui.components.DateTimeField jArriveTime;
private org.jdesktop.swingx.JXTable jAttackTable;
private javax.swing.JComboBox jAttackTypeComboBox;
private javax.swing.JButton jButton1;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel7;
private javax.swing.JLabel jLabel8;
private javax.swing.JLabel jLabel9;
private de.tor.tribes.ui.components.DateTimeField jNewArriveSpinner;
private javax.swing.JPanel jPanel2;
private javax.swing.JPanel jPanel3;
private javax.swing.JPanel jPanel4;
private javax.swing.JPanel jPanel5;
private javax.swing.JScrollPane jScrollPane2;
private de.tor.tribes.ui.components.CoordinateSpinner jSourceVillage;
private de.tor.tribes.ui.components.CoordinateSpinner jTargetVillage;
private javax.swing.JComboBox jUnitBox;
private javax.swing.JComboBox jUnitComboBox;
private org.jdesktop.swingx.JXLabel jXLabel1;
// End of variables declaration//GEN-END:variables
}
class DoItYourselfCountdownThread extends Thread {
public DoItYourselfCountdownThread() {
setName("DoItYourselfCountdownUpdater");
setPriority(MIN_PRIORITY);
setDaemon(true);
}
@Override
public void run() {
while (true) {
try {
if (DSWorkbenchDoItYourselfAttackPlaner.getSingleton().isVisible()) {
DSWorkbenchDoItYourselfAttackPlaner.getSingleton().updateCountdown();
sleep(100);
} else {
sleep(1000);
}
} catch (Exception ignored) {
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.distributedlog.impl.metadata;
import static com.google.common.base.Charsets.UTF_8;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.distributedlog.DistributedLogConstants.EMPTY_BYTES;
import static org.apache.distributedlog.DistributedLogConstants.UNASSIGNED_LOGSEGMENT_SEQNO;
import static org.apache.distributedlog.metadata.LogMetadata.ALLOCATION_PATH;
import static org.apache.distributedlog.metadata.LogMetadata.LAYOUT_VERSION;
import static org.apache.distributedlog.metadata.LogMetadata.LOCK_PATH;
import static org.apache.distributedlog.metadata.LogMetadata.LOGSEGMENTS_PATH;
import static org.apache.distributedlog.metadata.LogMetadata.MAX_TXID_PATH;
import static org.apache.distributedlog.metadata.LogMetadata.READ_LOCK_PATH;
import static org.apache.distributedlog.metadata.LogMetadata.VERSION_PATH;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.net.URI;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import org.apache.bookkeeper.common.concurrent.FutureEventListener;
import org.apache.bookkeeper.common.concurrent.FutureUtils;
import org.apache.bookkeeper.common.util.OrderedScheduler;
import org.apache.bookkeeper.stats.StatsLogger;
import org.apache.bookkeeper.versioning.LongVersion;
import org.apache.bookkeeper.versioning.Versioned;
import org.apache.distributedlog.DistributedLogConfiguration;
import org.apache.distributedlog.DistributedLogConstants;
import org.apache.distributedlog.LogSegmentMetadata;
import org.apache.distributedlog.ZooKeeperClient;
import org.apache.distributedlog.ZooKeeperClient.ZooKeeperConnectionException;
import org.apache.distributedlog.common.util.PermitManager;
import org.apache.distributedlog.common.util.SchedulerUtils;
import org.apache.distributedlog.exceptions.DLInterruptedException;
import org.apache.distributedlog.exceptions.InvalidStreamNameException;
import org.apache.distributedlog.exceptions.LockCancelledException;
import org.apache.distributedlog.exceptions.LockingException;
import org.apache.distributedlog.exceptions.LogExistsException;
import org.apache.distributedlog.exceptions.LogNotFoundException;
import org.apache.distributedlog.exceptions.UnexpectedException;
import org.apache.distributedlog.exceptions.ZKException;
import org.apache.distributedlog.impl.ZKLogSegmentMetadataStore;
import org.apache.distributedlog.lock.DistributedLock;
import org.apache.distributedlog.lock.SessionLockFactory;
import org.apache.distributedlog.lock.ZKDistributedLock;
import org.apache.distributedlog.lock.ZKSessionLockFactory;
import org.apache.distributedlog.logsegment.LogSegmentMetadataStore;
import org.apache.distributedlog.metadata.LogMetadata;
import org.apache.distributedlog.metadata.LogMetadataForReader;
import org.apache.distributedlog.metadata.LogMetadataForWriter;
import org.apache.distributedlog.metadata.LogStreamMetadataStore;
import org.apache.distributedlog.util.DLUtils;
import org.apache.distributedlog.util.Transaction;
import org.apache.distributedlog.util.Utils;
import org.apache.distributedlog.zk.LimitedPermitManager;
import org.apache.distributedlog.zk.ZKTransaction;
import org.apache.zookeeper.AsyncCallback;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.Code;
import org.apache.zookeeper.Op;
import org.apache.zookeeper.Op.Create;
import org.apache.zookeeper.Op.Delete;
import org.apache.zookeeper.OpResult;
import org.apache.zookeeper.ZKUtil;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.common.PathUtils;
import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* zookeeper based {@link LogStreamMetadataStore}.
*/
public class ZKLogStreamMetadataStore implements LogStreamMetadataStore {
private static final Logger LOG = LoggerFactory.getLogger(ZKLogStreamMetadataStore.class);
private final String clientId;
private final DistributedLogConfiguration conf;
private final ZooKeeperClient zooKeeperClient;
private final OrderedScheduler scheduler;
private final StatsLogger statsLogger;
private final LogSegmentMetadataStore logSegmentStore;
private final LimitedPermitManager permitManager;
// lock
private SessionLockFactory lockFactory;
private OrderedScheduler lockStateExecutor;
public ZKLogStreamMetadataStore(String clientId,
DistributedLogConfiguration conf,
ZooKeeperClient zkc,
OrderedScheduler scheduler,
StatsLogger statsLogger) {
this.clientId = clientId;
this.conf = conf;
this.zooKeeperClient = zkc;
this.scheduler = scheduler;
this.statsLogger = statsLogger;
// create the log segment metadata store and the permit manager (used for log segment rolling)
this.logSegmentStore = new ZKLogSegmentMetadataStore(conf, zooKeeperClient, scheduler);
this.permitManager = new LimitedPermitManager(
conf.getLogSegmentRollingConcurrency(),
1,
TimeUnit.MINUTES,
scheduler);
this.zooKeeperClient.register(permitManager);
}
private synchronized OrderedScheduler getLockStateExecutor(boolean createIfNull) {
if (createIfNull && null == lockStateExecutor) {
lockStateExecutor = OrderedScheduler.newSchedulerBuilder()
.name("DLM-LockState")
.numThreads(conf.getNumLockStateThreads())
.build();
}
return lockStateExecutor;
}
private synchronized SessionLockFactory getLockFactory(boolean createIfNull) {
if (createIfNull && null == lockFactory) {
lockFactory = new ZKSessionLockFactory(
zooKeeperClient,
clientId,
getLockStateExecutor(createIfNull),
conf.getZKNumRetries(),
conf.getLockTimeoutMilliSeconds(),
conf.getZKRetryBackoffStartMillis(),
statsLogger);
}
return lockFactory;
}
@Override
public void close() throws IOException {
this.zooKeeperClient.unregister(permitManager);
this.permitManager.close();
this.logSegmentStore.close();
SchedulerUtils.shutdownScheduler(
getLockStateExecutor(false),
conf.getSchedulerShutdownTimeoutMs(),
TimeUnit.MILLISECONDS);
}
@Override
public LogSegmentMetadataStore getLogSegmentMetadataStore() {
return logSegmentStore;
}
@Override
public PermitManager getPermitManager() {
return this.permitManager;
}
@Override
public Transaction<Object> newTransaction() {
return new ZKTransaction(zooKeeperClient);
}
@Override
public CompletableFuture<Void> logExists(URI uri, final String logName) {
final String logSegmentsPath = LogMetadata.getLogSegmentsPath(
uri, logName, conf.getUnpartitionedStreamName());
final CompletableFuture<Void> promise = new CompletableFuture<Void>();
try {
final ZooKeeper zk = zooKeeperClient.get();
zk.sync(logSegmentsPath, new AsyncCallback.VoidCallback() {
@Override
public void processResult(int syncRc, String path, Object syncCtx) {
if (KeeperException.Code.NONODE.intValue() == syncRc) {
promise.completeExceptionally(new LogNotFoundException(
String.format("Log %s does not exist or has been deleted", logName)));
return;
} else if (KeeperException.Code.OK.intValue() != syncRc){
promise.completeExceptionally(new ZKException("Error on checking log existence for " + logName,
KeeperException.create(KeeperException.Code.get(syncRc))));
return;
}
zk.exists(logSegmentsPath, false, new AsyncCallback.StatCallback() {
@Override
public void processResult(int rc, String path, Object ctx, Stat stat) {
if (KeeperException.Code.OK.intValue() == rc) {
promise.complete(null);
} else if (KeeperException.Code.NONODE.intValue() == rc) {
promise.completeExceptionally(new LogNotFoundException(
String.format("Log %s does not exist or has been deleted", logName)));
} else {
promise.completeExceptionally(
new ZKException("Error on checking log existence for "
+ logName, KeeperException.create(KeeperException.Code.get(rc))));
}
}
}, null);
}
}, null);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
LOG.error("Interrupted while reading {}", logSegmentsPath, ie);
promise.completeExceptionally(new DLInterruptedException("Interrupted while checking "
+ logSegmentsPath, ie));
} catch (ZooKeeperClient.ZooKeeperConnectionException e) {
promise.completeExceptionally(e);
}
return promise;
}
//
// Create Write Lock
//
@Override
public DistributedLock createWriteLock(LogMetadataForWriter metadata) {
return new ZKDistributedLock(
getLockStateExecutor(true),
getLockFactory(true),
metadata.getLockPath(),
conf.getLockTimeoutMilliSeconds(),
statsLogger);
}
//
// Create Read Lock
//
private CompletableFuture<Void> ensureReadLockPathExist(final LogMetadata logMetadata,
final String readLockPath) {
final CompletableFuture<Void> promise = new CompletableFuture<Void>();
promise.whenComplete((value, cause) -> {
if (cause instanceof CancellationException) {
FutureUtils.completeExceptionally(promise, new LockCancelledException(readLockPath,
"Could not ensure read lock path", cause));
}
});
Optional<String> parentPathShouldNotCreate = Optional.of(logMetadata.getLogRootPath());
Utils.zkAsyncCreateFullPathOptimisticRecursive(zooKeeperClient, readLockPath, parentPathShouldNotCreate,
new byte[0], zooKeeperClient.getDefaultACL(), CreateMode.PERSISTENT,
new org.apache.zookeeper.AsyncCallback.StringCallback() {
@Override
public void processResult(final int rc, final String path, Object ctx, String name) {
if (KeeperException.Code.NONODE.intValue() == rc) {
FutureUtils.completeExceptionally(promise, new LogNotFoundException(
String.format("Log %s does not exist or has been deleted",
logMetadata.getFullyQualifiedName())));
} else if (KeeperException.Code.OK.intValue() == rc) {
FutureUtils.complete(promise, null);
LOG.trace("Created path {}.", path);
} else if (KeeperException.Code.NODEEXISTS.intValue() == rc) {
FutureUtils.complete(promise, null);
LOG.trace("Path {} is already existed.", path);
} else if (DistributedLogConstants.ZK_CONNECTION_EXCEPTION_RESULT_CODE == rc) {
FutureUtils.completeExceptionally(promise,
new ZooKeeperClient.ZooKeeperConnectionException(path));
} else if (DistributedLogConstants.DL_INTERRUPTED_EXCEPTION_RESULT_CODE == rc) {
FutureUtils.completeExceptionally(promise, new DLInterruptedException(path));
} else {
FutureUtils.completeExceptionally(promise,
KeeperException.create(KeeperException.Code.get(rc)));
}
}
}, null);
return promise;
}
@Override
public CompletableFuture<DistributedLock> createReadLock(final LogMetadataForReader metadata,
Optional<String> readerId) {
final String readLockPath = metadata.getReadLockPath(readerId);
return ensureReadLockPathExist(metadata, readLockPath)
.thenApplyAsync((value) -> {
DistributedLock lock = new ZKDistributedLock(
getLockStateExecutor(true),
getLockFactory(true),
readLockPath,
conf.getLockTimeoutMilliSeconds(),
statsLogger.scope("read_lock"));
return lock;
}, scheduler.chooseThread(readLockPath));
}
//
// Create Log
//
static class MetadataIndex {
static final int LOG_ROOT_PARENT = 0;
static final int LOG_ROOT = 1;
static final int MAX_TXID = 2;
static final int VERSION = 3;
static final int LOCK = 4;
static final int READ_LOCK = 5;
static final int LOGSEGMENTS = 6;
static final int ALLOCATION = 7;
}
static int bytesToInt(byte[] b) {
assert b.length >= 4;
return b[0] << 24 | b[1] << 16 | b[2] << 8 | b[3];
}
static byte[] intToBytes(int i) {
return new byte[]{
(byte) (i >> 24),
(byte) (i >> 16),
(byte) (i >> 8),
(byte) (i)};
}
static CompletableFuture<List<Versioned<byte[]>>> checkLogMetadataPaths(ZooKeeper zk,
String logRootPath,
boolean ownAllocator) {
// Note re. persistent lock state initialization: the read lock persistent state (path) is
// initialized here but only used in the read handler. The reason is its more convenient and
// less error prone to manage all stream structure in one place.
final String logRootParentPath = Utils.getParent(logRootPath);
final String logSegmentsPath = logRootPath + LOGSEGMENTS_PATH;
final String maxTxIdPath = logRootPath + MAX_TXID_PATH;
final String lockPath = logRootPath + LOCK_PATH;
final String readLockPath = logRootPath + READ_LOCK_PATH;
final String versionPath = logRootPath + VERSION_PATH;
final String allocationPath = logRootPath + ALLOCATION_PATH;
int numPaths = ownAllocator ? MetadataIndex.ALLOCATION + 1 : MetadataIndex.LOGSEGMENTS + 1;
List<CompletableFuture<Versioned<byte[]>>> checkFutures = Lists.newArrayListWithExpectedSize(numPaths);
checkFutures.add(Utils.zkGetData(zk, logRootParentPath, false));
checkFutures.add(Utils.zkGetData(zk, logRootPath, false));
checkFutures.add(Utils.zkGetData(zk, maxTxIdPath, false));
checkFutures.add(Utils.zkGetData(zk, versionPath, false));
checkFutures.add(Utils.zkGetData(zk, lockPath, false));
checkFutures.add(Utils.zkGetData(zk, readLockPath, false));
checkFutures.add(Utils.zkGetData(zk, logSegmentsPath, false));
if (ownAllocator) {
checkFutures.add(Utils.zkGetData(zk, allocationPath, false));
}
return FutureUtils.collect(checkFutures);
}
static boolean pathExists(Versioned<byte[]> metadata) {
return null != metadata.getValue() && null != metadata.getVersion();
}
static void ensureMetadataExist(Versioned<byte[]> metadata) {
checkNotNull(metadata.getValue());
checkNotNull(metadata.getVersion());
}
static void createMissingMetadata(final ZooKeeper zk,
final String basePath,
final String logRootPath,
final List<Versioned<byte[]>> metadatas,
final List<ACL> acl,
final boolean ownAllocator,
final boolean createIfNotExists,
final CompletableFuture<List<Versioned<byte[]>>> promise) {
final List<byte[]> pathsToCreate = Lists.newArrayListWithExpectedSize(metadatas.size());
final List<Op> zkOps = Lists.newArrayListWithExpectedSize(metadatas.size());
CreateMode createMode = CreateMode.PERSISTENT;
// log root parent path
String logRootParentPath = Utils.getParent(logRootPath);
if (pathExists(metadatas.get(MetadataIndex.LOG_ROOT_PARENT))) {
pathsToCreate.add(null);
} else {
pathsToCreate.add(EMPTY_BYTES);
zkOps.add(Op.create(logRootParentPath, EMPTY_BYTES, acl, createMode));
}
// log root path
if (pathExists(metadatas.get(MetadataIndex.LOG_ROOT))) {
pathsToCreate.add(null);
} else {
pathsToCreate.add(EMPTY_BYTES);
zkOps.add(Op.create(logRootPath, EMPTY_BYTES, acl, createMode));
}
// max id
if (pathExists(metadatas.get(MetadataIndex.MAX_TXID))) {
pathsToCreate.add(null);
} else {
byte[] zeroTxnIdData = DLUtils.serializeTransactionId(0L);
pathsToCreate.add(zeroTxnIdData);
zkOps.add(Op.create(logRootPath + MAX_TXID_PATH, zeroTxnIdData, acl, createMode));
}
// version
if (pathExists(metadatas.get(MetadataIndex.VERSION))) {
pathsToCreate.add(null);
} else {
byte[] versionData = intToBytes(LAYOUT_VERSION);
pathsToCreate.add(versionData);
zkOps.add(Op.create(logRootPath + VERSION_PATH, versionData, acl, createMode));
}
// lock path
if (pathExists(metadatas.get(MetadataIndex.LOCK))) {
pathsToCreate.add(null);
} else {
pathsToCreate.add(EMPTY_BYTES);
zkOps.add(Op.create(logRootPath + LOCK_PATH, EMPTY_BYTES, acl, createMode));
}
// read lock path
if (pathExists(metadatas.get(MetadataIndex.READ_LOCK))) {
pathsToCreate.add(null);
} else {
pathsToCreate.add(EMPTY_BYTES);
zkOps.add(Op.create(logRootPath + READ_LOCK_PATH, EMPTY_BYTES, acl, createMode));
}
// log segments path
if (pathExists(metadatas.get(MetadataIndex.LOGSEGMENTS))) {
pathsToCreate.add(null);
} else {
byte[] logSegmentsData = DLUtils.serializeLogSegmentSequenceNumber(
DistributedLogConstants.UNASSIGNED_LOGSEGMENT_SEQNO);
pathsToCreate.add(logSegmentsData);
zkOps.add(Op.create(logRootPath + LOGSEGMENTS_PATH, logSegmentsData, acl, createMode));
}
// allocation path
if (ownAllocator) {
if (pathExists(metadatas.get(MetadataIndex.ALLOCATION))) {
pathsToCreate.add(null);
} else {
pathsToCreate.add(EMPTY_BYTES);
zkOps.add(Op.create(logRootPath + ALLOCATION_PATH,
EMPTY_BYTES, acl, createMode));
}
}
if (zkOps.isEmpty()) {
// nothing missed
promise.complete(metadatas);
return;
}
if (!createIfNotExists) {
promise.completeExceptionally(new LogNotFoundException("Log " + logRootPath + " not found"));
return;
}
getMissingPaths(zk, basePath, Utils.getParent(logRootParentPath))
.whenComplete(new FutureEventListener<List<String>>() {
@Override
public void onSuccess(List<String> paths) {
for (String path : paths) {
pathsToCreate.add(EMPTY_BYTES);
zkOps.add(
0, Op.create(path, EMPTY_BYTES, acl, createMode));
}
executeCreateMissingPathTxn(
zk,
zkOps,
pathsToCreate,
metadatas,
logRootPath,
promise
);
}
@Override
public void onFailure(Throwable cause) {
promise.completeExceptionally(cause);
return;
}
});
}
private static void executeCreateMissingPathTxn(ZooKeeper zk,
List<Op> zkOps,
List<byte[]> pathsToCreate,
List<Versioned<byte[]>> metadatas,
String logRootPath,
CompletableFuture<List<Versioned<byte[]>>> promise) {
zk.multi(zkOps, new AsyncCallback.MultiCallback() {
@Override
public void processResult(int rc, String path, Object ctx, List<OpResult> resultList) {
if (KeeperException.Code.OK.intValue() == rc) {
List<Versioned<byte[]>> finalMetadatas =
Lists.newArrayListWithExpectedSize(metadatas.size());
for (int i = 0; i < pathsToCreate.size(); i++) {
byte[] dataCreated = pathsToCreate.get(i);
if (null == dataCreated) {
finalMetadatas.add(metadatas.get(i));
} else {
finalMetadatas.add(new Versioned<byte[]>(dataCreated, new LongVersion(0)));
}
}
promise.complete(finalMetadatas);
} else if (KeeperException.Code.NODEEXISTS.intValue() == rc) {
promise.completeExceptionally(new LogExistsException("Someone just created log "
+ logRootPath));
} else {
if (LOG.isDebugEnabled()) {
StringBuilder builder = new StringBuilder();
for (OpResult result : resultList) {
if (result instanceof OpResult.ErrorResult) {
OpResult.ErrorResult errorResult = (OpResult.ErrorResult) result;
builder.append(errorResult.getErr()).append(",");
} else {
builder.append(0).append(",");
}
}
String resultCodeList = builder.substring(0, builder.length() - 1);
LOG.debug("Failed to create log, full rc list = {}", resultCodeList);
}
promise.completeExceptionally(new ZKException("Failed to create log " + logRootPath,
KeeperException.Code.get(rc)));
}
}
}, null);
}
static LogMetadataForWriter processLogMetadatas(URI uri,
String logName,
String logIdentifier,
List<Versioned<byte[]>> metadatas,
boolean ownAllocator)
throws UnexpectedException {
try {
// max id
Versioned<byte[]> maxTxnIdData = metadatas.get(MetadataIndex.MAX_TXID);
ensureMetadataExist(maxTxnIdData);
// version
Versioned<byte[]> versionData = metadatas.get(MetadataIndex.VERSION);
ensureMetadataExist(maxTxnIdData);
checkArgument(LAYOUT_VERSION == bytesToInt(versionData.getValue()));
// lock path
ensureMetadataExist(metadatas.get(MetadataIndex.LOCK));
// read lock path
ensureMetadataExist(metadatas.get(MetadataIndex.READ_LOCK));
// max lssn
Versioned<byte[]> maxLSSNData = metadatas.get(MetadataIndex.LOGSEGMENTS);
ensureMetadataExist(maxLSSNData);
try {
DLUtils.deserializeLogSegmentSequenceNumber(maxLSSNData.getValue());
} catch (NumberFormatException nfe) {
throw new UnexpectedException("Invalid max sequence number found in log " + logName, nfe);
}
// allocation path
Versioned<byte[]> allocationData;
if (ownAllocator) {
allocationData = metadatas.get(MetadataIndex.ALLOCATION);
ensureMetadataExist(allocationData);
} else {
allocationData = new Versioned<byte[]>(null, null);
}
return new LogMetadataForWriter(uri, logName, logIdentifier,
maxLSSNData, maxTxnIdData, allocationData);
} catch (IllegalArgumentException iae) {
throw new UnexpectedException("Invalid log " + logName, iae);
} catch (NullPointerException npe) {
throw new UnexpectedException("Invalid log " + logName, npe);
}
}
static CompletableFuture<LogMetadataForWriter> getLog(final URI uri,
final String logName,
final String logIdentifier,
final ZooKeeperClient zooKeeperClient,
final boolean ownAllocator,
final boolean createIfNotExists) {
final String logRootPath = LogMetadata.getLogRootPath(uri, logName, logIdentifier);
try {
PathUtils.validatePath(logRootPath);
} catch (IllegalArgumentException e) {
LOG.error("Illegal path value {} for stream {}", new Object[]{logRootPath, logName, e});
return FutureUtils.exception(new InvalidStreamNameException(logName, "Log name is invalid"));
}
try {
final ZooKeeper zk = zooKeeperClient.get();
return checkLogMetadataPaths(zk, logRootPath, ownAllocator)
.thenCompose(metadatas -> {
CompletableFuture<List<Versioned<byte[]>>> promise =
new CompletableFuture<List<Versioned<byte[]>>>();
createMissingMetadata(
zk,
uri.getPath(),
logRootPath,
metadatas,
zooKeeperClient.getDefaultACL(),
ownAllocator,
createIfNotExists,
promise);
return promise;
}).thenCompose(metadatas -> {
try {
return FutureUtils.value(
processLogMetadatas(
uri,
logName,
logIdentifier,
metadatas,
ownAllocator));
} catch (UnexpectedException e) {
return FutureUtils.exception(e);
}
});
} catch (ZooKeeperClient.ZooKeeperConnectionException e) {
return FutureUtils.exception(
new ZKException("Encountered zookeeper connection issue on creating log "
+ logName, KeeperException.Code.CONNECTIONLOSS));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return FutureUtils.exception(new DLInterruptedException("Interrupted on creating log " + logName, e));
}
}
@Override
public CompletableFuture<LogMetadataForWriter> getLog(final URI uri,
final String logName,
final boolean ownAllocator,
final boolean createIfNotExists) {
return getLog(
uri,
logName,
conf.getUnpartitionedStreamName(),
zooKeeperClient,
ownAllocator,
createIfNotExists);
}
//
// Delete Log
//
@Override
public CompletableFuture<Void> deleteLog(URI uri, final String logName) {
final CompletableFuture<Void> promise = new CompletableFuture<Void>();
try {
String streamPath = LogMetadata.getLogStreamPath(uri, logName);
ZKUtil.deleteRecursive(zooKeeperClient.get(), streamPath, new AsyncCallback.VoidCallback() {
@Override
public void processResult(int rc, String path, Object ctx) {
if (KeeperException.Code.OK.intValue() != rc) {
FutureUtils.completeExceptionally(promise,
new ZKException("Encountered zookeeper issue on deleting log stream "
+ logName, KeeperException.Code.get(rc)));
return;
}
FutureUtils.complete(promise, null);
}
}, null);
} catch (ZooKeeperClient.ZooKeeperConnectionException e) {
FutureUtils.completeExceptionally(promise,
new ZKException("Encountered zookeeper issue on deleting log stream "
+ logName, KeeperException.Code.CONNECTIONLOSS));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
FutureUtils.completeExceptionally(promise,
new DLInterruptedException("Interrupted while deleting log stream " + logName));
} catch (KeeperException e) {
FutureUtils.completeExceptionally(promise,
new ZKException("Encountered zookeeper issue on deleting log stream "
+ logName, e));
}
return promise;
}
//
// Rename Log
//
@Override
public CompletableFuture<Void> renameLog(URI uri, String oldStreamName, String newStreamName) {
return getLog(
uri,
oldStreamName,
true,
false
).thenCompose(metadata -> renameLogMetadata(uri, metadata, newStreamName));
}
private CompletableFuture<Void> renameLogMetadata(URI uri,
LogMetadataForWriter oldMetadata,
String newStreamName) {
final LinkedList<Op> createOps = Lists.newLinkedList();
final LinkedList<Op> deleteOps = Lists.newLinkedList();
List<ACL> acls = zooKeeperClient.getDefaultACL();
// get the root path
String oldRootPath = oldMetadata.getLogRootPath();
String newRootPath = LogMetadata.getLogRootPath(
uri, newStreamName, conf.getUnpartitionedStreamName());
// 0. the log path
deleteOps.addFirst(Op.delete(
LogMetadata.getLogStreamPath(uri, oldMetadata.getLogName()), -1));
// 1. the root path
createOps.addLast(Op.create(
newRootPath, EMPTY_BYTES, acls, CreateMode.PERSISTENT));
deleteOps.addFirst(Op.delete(
oldRootPath, -1));
// 2. max id
Versioned<byte[]> maxTxIdData = oldMetadata.getMaxTxIdData();
deleteOldPathAndCreateNewPath(
oldRootPath, MAX_TXID_PATH, maxTxIdData,
newRootPath, DLUtils.serializeTransactionId(0L), acls,
createOps, deleteOps
);
// 3. version
createOps.addLast(Op.create(
newRootPath + VERSION_PATH, intToBytes(LAYOUT_VERSION), acls, CreateMode.PERSISTENT));
deleteOps.addFirst(Op.delete(
oldRootPath + VERSION_PATH, -1));
// 4. lock path (NOTE: if the stream is locked by a writer, then the delete will fail as you can not
// delete the lock path if children is not empty.
createOps.addLast(Op.create(
newRootPath + LOCK_PATH, EMPTY_BYTES, acls, CreateMode.PERSISTENT));
deleteOps.addFirst(Op.delete(
oldRootPath + LOCK_PATH, -1));
// 5. read lock path (NOTE: same reason as the write lock)
createOps.addLast(Op.create(
newRootPath + READ_LOCK_PATH, EMPTY_BYTES, acls, CreateMode.PERSISTENT));
deleteOps.addFirst(Op.delete(
oldRootPath + READ_LOCK_PATH, -1));
// 6. allocation path
Versioned<byte[]> allocationData = oldMetadata.getAllocationData();
deleteOldPathAndCreateNewPath(
oldRootPath, ALLOCATION_PATH, allocationData,
newRootPath, EMPTY_BYTES, acls,
createOps, deleteOps);
// 7. log segments
Versioned<byte[]> maxLSSNData = oldMetadata.getMaxLSSNData();
deleteOldPathAndCreateNewPath(
oldRootPath, LOGSEGMENTS_PATH, maxLSSNData,
newRootPath, DLUtils.serializeLogSegmentSequenceNumber(UNASSIGNED_LOGSEGMENT_SEQNO), acls,
createOps, deleteOps);
// 8. copy the log segments
CompletableFuture<List<LogSegmentMetadata>> segmentsFuture;
if (pathExists(maxLSSNData)) {
segmentsFuture = getLogSegments(zooKeeperClient, oldRootPath + LOGSEGMENTS_PATH);
} else {
segmentsFuture = FutureUtils.value(Collections.emptyList());
}
return segmentsFuture
// copy the segments
.thenApply(segments -> {
for (LogSegmentMetadata segment : segments) {
deleteOldSegmentAndCreateNewSegment(
segment,
newRootPath + LOGSEGMENTS_PATH,
acls,
createOps,
deleteOps);
}
return null;
})
// get the missing paths
.thenCompose(ignored ->
getMissingPaths(zooKeeperClient, uri, newStreamName)
)
// create the missing paths and execute the rename transaction
.thenCompose(paths -> {
for (String path : paths) {
createOps.addFirst(Op.create(
path, EMPTY_BYTES, acls, CreateMode.PERSISTENT));
}
return executeRenameTxn(oldRootPath, newRootPath, createOps, deleteOps);
});
}
@VisibleForTesting
static CompletableFuture<List<String>> getMissingPaths(ZooKeeperClient zkc, URI uri, String logName) {
ZooKeeper zk;
try {
zk = zkc.get();
} catch (ZooKeeperConnectionException | InterruptedException e) {
return FutureUtils.exception(e);
}
String basePath = uri.getPath();
String logStreamPath = LogMetadata.getLogStreamPath(uri, logName);
return getMissingPaths(zk, basePath, logStreamPath);
}
@VisibleForTesting
static CompletableFuture<List<String>> getMissingPaths(ZooKeeper zk, String basePath, String logStreamPath) {
LinkedList<String> missingPaths = Lists.newLinkedList();
CompletableFuture<List<String>> future = FutureUtils.createFuture();
existPath(zk, logStreamPath, basePath, missingPaths, future);
return future;
}
private static void existPath(ZooKeeper zk,
String path,
String basePath,
LinkedList<String> missingPaths,
CompletableFuture<List<String>> future) {
if (basePath.equals(path)) {
future.complete(missingPaths);
return;
}
zk.exists(path, false, (rc, path1, ctx, stat) -> {
if (Code.OK.intValue() != rc && Code.NONODE.intValue() != rc) {
future.completeExceptionally(new ZKException("Failed to check existence of path " + path1,
Code.get(rc)));
return;
}
if (Code.OK.intValue() == rc) {
future.complete(missingPaths);
return;
}
missingPaths.addLast(path);
String parentPath = Utils.getParent(path);
existPath(zk, parentPath, basePath, missingPaths, future);
}, null);
}
private CompletableFuture<Void> executeRenameTxn(String oldLogPath,
String newLogPath,
LinkedList<Op> createOps,
LinkedList<Op> deleteOps) {
CompletableFuture<Void> future = FutureUtils.createFuture();
List<Op> zkOps = Lists.newArrayListWithExpectedSize(createOps.size() + deleteOps.size());
zkOps.addAll(createOps);
zkOps.addAll(deleteOps);
if (LOG.isDebugEnabled()) {
for (Op op : zkOps) {
if (op instanceof Create) {
Create create = (Create) op;
LOG.debug("op : create {}", create.getPath());
} else if (op instanceof Delete) {
Delete delete = (Delete) op;
LOG.debug("op : delete {}, record = {}", delete.getPath(), op.toRequestRecord());
} else {
LOG.debug("op : {}", op);
}
}
}
try {
zooKeeperClient.get().multi(zkOps, (rc, path, ctx, opResults) -> {
if (Code.OK.intValue() == rc) {
future.complete(null);
} else if (Code.NODEEXISTS.intValue() == rc) {
future.completeExceptionally(new LogExistsException("Someone just created new log " + newLogPath));
} else if (Code.NOTEMPTY.intValue() == rc) {
future.completeExceptionally(new LockingException(oldLogPath + LOCK_PATH,
"Someone is holding a lock on log " + oldLogPath));
} else if (Code.NONODE.intValue() == rc) {
future.completeExceptionally(new LogNotFoundException("Log " + newLogPath + " is not found"));
} else {
future.completeExceptionally(new ZKException("Failed to rename log "
+ oldLogPath + " to " + newLogPath + " at path " + path, Code.get(rc)));
}
}, null);
} catch (ZooKeeperConnectionException e) {
future.completeExceptionally(e);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
future.completeExceptionally(e);
}
return future;
}
private static void deleteOldSegmentAndCreateNewSegment(LogSegmentMetadata oldMetadata,
String newSegmentsPath,
List<ACL> acls,
LinkedList<Op> createOps,
LinkedList<Op> deleteOps) {
createOps.addLast(Op.create(
newSegmentsPath + "/" + oldMetadata.getZNodeName(),
oldMetadata.getFinalisedData().getBytes(UTF_8),
acls,
CreateMode.PERSISTENT));
deleteOps.addFirst(Op.delete(
oldMetadata.getZkPath(),
-1));
}
private static void deleteOldPathAndCreateNewPath(String oldRootPath,
String nodePath,
Versioned<byte[]> pathData,
String newRootPath,
byte[] initData,
List<ACL> acls,
LinkedList<Op> createOps,
LinkedList<Op> deleteOps) {
if (pathExists(pathData)) {
createOps.addLast(Op.create(
newRootPath + nodePath, pathData.getValue(), acls, CreateMode.PERSISTENT));
deleteOps.addFirst(Op.delete(
oldRootPath + nodePath, (int) ((LongVersion) pathData.getVersion()).getLongVersion()));
} else {
createOps.addLast(Op.create(
newRootPath + nodePath, initData, acls, CreateMode.PERSISTENT));
}
}
@VisibleForTesting
static CompletableFuture<List<LogSegmentMetadata>> getLogSegments(ZooKeeperClient zk,
String logSegmentsPath) {
CompletableFuture<List<LogSegmentMetadata>> future = FutureUtils.createFuture();
try {
zk.get().getChildren(logSegmentsPath, false, (rc, path, ctx, children, stat) -> {
if (Code.OK.intValue() != rc) {
if (Code.NONODE.intValue() == rc) {
future.completeExceptionally(new LogNotFoundException("Log " + path + " not found"));
} else {
future.completeExceptionally(new ZKException("Failed to get log segments from " + path,
Code.get(rc)));
}
return;
}
// get all the segments
List<CompletableFuture<LogSegmentMetadata>> futures =
Lists.newArrayListWithExpectedSize(children.size());
for (String child : children) {
futures.add(LogSegmentMetadata.read(zk, logSegmentsPath + "/" + child));
}
FutureUtils.proxyTo(
FutureUtils.collect(futures),
future);
}, null);
} catch (ZooKeeperConnectionException e) {
future.completeExceptionally(e);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
future.completeExceptionally(e);
}
return future;
}
}
| |
package org.cloudfoundry.autoscaler.api;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import org.apache.log4j.Logger;
import org.cloudfoundry.autoscaler.api.util.ConfigManager;
public final class Constants {
private static final String CLASS_NAME = Constants.class.getName();
private static final Logger logger = Logger.getLogger(CLASS_NAME);
public static final int DASHBORAD_TIME_RANGE = 30;
public static final String USERNAME = "cfClientId";
public static final String PASSWORD = "cfClientSecret";
public static final String CFURL = "cfUrl";
public static final String APP_TYPE_JAVA = "java";
public static final String APP_TYPE_RUBY = "ruby";
public static final String APP_TYPE_RUBY_SINATRA = "ruby_sinatra";
public static final String APP_TYPE_RUBY_ON_RAILS = "ruby_on_rails";
public static final String APP_TYPE_NODEJS = "nodejs";
public static final String APP_TYPE_GO = "go";
public static final String APP_TYPE_PHP = "php";
public static final String APP_TYPE_PYTHON = "python";
public static final String APP_TYPE_DOTNET = "dotnet";
public static final String APP_TYPE_UNKNOWN = "unknown";
public static final String APP_TYPE_DEFAULT = "default";
public static final String DISPLAY_APP_TYPE_JAVA = "LIBERTY FOR JAVA";
public static final String DISPLAY_APP_TYPE_RUBY = "Ruby";
public static final String DISPLAY_APP_TYPE_RUBY_SINATRA = "Ruby_sinatra";
public static final String DISPLAY_APP_TYPE_RUBY_ON_RAILS = "Ruby_on_rails";
public static final String DISPLAY_APP_TYPE_NODEJS = "SDK FOR NODE.JS";
public static final String DISPLAY_APP_TYPE_GO = "Go";
public static final String DISPLAY_APP_TYPE_PHP = "PHP";
public static final String DISPLAY_APP_TYPE_PYTHON = "Python";
public static final String DISPLAY_APP_TYPE_DOTNET = ".NET";
public static final String DISPLAY_APP_TYPE_DEFAULT = "default";
//metric.* related entries defined in config.properties
public static final String REPORT_INTERVAL = "reportInterval";
//Policy data range
public static final String[] timezones = new String[] {
"(GMT -12:00) Etc/GMT+12",
"(GMT -11:00) Etc/GMT+11",
"(GMT -11:00) Pacific/Midway",
"(GMT -11:00) Pacific/Niue",
"(GMT -11:00) Pacific/Pago_Pago",
"(GMT -11:00) Pacific/Samoa",
"(GMT -11:00) US/Samoa",
"(GMT -10:00) Etc/GMT+10",
"(GMT -10:00) HST",
"(GMT -10:00) Pacific/Honolulu",
"(GMT -10:00) Pacific/Johnston",
"(GMT -10:00) Pacific/Rarotonga",
"(GMT -10:00) Pacific/Tahiti",
"(GMT -10:00) US/Hawaii",
"(GMT -09:30) Pacific/Marquesas",
"(GMT -09:00) America/Adak",
"(GMT -09:00) America/Atka",
"(GMT -09:00) Etc/GMT+9",
"(GMT -09:00) Pacific/Gambier",
"(GMT -09:00) US/Aleutian",
"(GMT -08:00) America/Anchorage",
"(GMT -08:00) America/Juneau",
"(GMT -08:00) America/Metlakatla",
"(GMT -08:00) America/Nome",
"(GMT -08:00) America/Sitka",
"(GMT -08:00) America/Yakutat",
"(GMT -08:00) Etc/GMT+8",
"(GMT -08:00) Pacific/Pitcairn",
"(GMT -08:00) US/Alaska",
"(GMT -07:00) America/Creston",
"(GMT -07:00) America/Dawson",
"(GMT -07:00) America/Dawson_Creek",
"(GMT -07:00) America/Ensenada",
"(GMT -07:00) America/Hermosillo",
"(GMT -07:00) America/Los_Angeles",
"(GMT -07:00) America/Phoenix",
"(GMT -07:00) America/Santa_Isabel",
"(GMT -07:00) America/Tijuana",
"(GMT -07:00) America/Vancouver",
"(GMT -07:00) America/Whitehorse",
"(GMT -07:00) Canada/Pacific",
"(GMT -07:00) Canada/Yukon",
"(GMT -07:00) Etc/GMT+7",
"(GMT -07:00) MST",
"(GMT -07:00) Mexico/BajaNorte",
"(GMT -07:00) PST8PDT",
"(GMT -07:00) US/Arizona",
"(GMT -07:00) US/Pacific",
"(GMT -07:00) US/Pacific-New",
"(GMT -06:00) America/Belize",
"(GMT -06:00) America/Boise",
"(GMT -06:00) America/Cambridge_Bay",
"(GMT -06:00) America/Chihuahua",
"(GMT -06:00) America/Costa_Rica",
"(GMT -06:00) America/Denver",
"(GMT -06:00) America/Edmonton",
"(GMT -06:00) America/El_Salvador",
"(GMT -06:00) America/Guatemala",
"(GMT -06:00) America/Inuvik",
"(GMT -06:00) America/Managua",
"(GMT -06:00) America/Mazatlan",
"(GMT -06:00) America/Ojinaga",
"(GMT -06:00) America/Regina",
"(GMT -06:00) America/Shiprock",
"(GMT -06:00) America/Swift_Current",
"(GMT -06:00) America/Tegucigalpa",
"(GMT -06:00) America/Yellowknife",
"(GMT -06:00) Canada/East-Saskatchewan",
"(GMT -06:00) Canada/Mountain",
"(GMT -06:00) Canada/Saskatchewan",
"(GMT -06:00) Etc/GMT+6",
"(GMT -06:00) MST7MDT",
"(GMT -06:00) Mexico/BajaSur",
"(GMT -06:00) Navajo",
"(GMT -06:00) Pacific/Galapagos",
"(GMT -06:00) US/Mountain",
"(GMT -05:00) America/Atikokan",
"(GMT -05:00) America/Bahia_Banderas",
"(GMT -05:00) America/Bogota",
"(GMT -05:00) America/Cancun",
"(GMT -05:00) America/Cayman",
"(GMT -05:00) America/Chicago",
"(GMT -05:00) America/Coral_Harbour",
"(GMT -05:00) America/Eirunepe",
"(GMT -05:00) America/Guayaquil",
"(GMT -05:00) America/Indiana/Knox",
"(GMT -05:00) America/Indiana/Tell_City",
"(GMT -05:00) America/Jamaica",
"(GMT -05:00) America/Knox_IN",
"(GMT -05:00) America/Lima",
"(GMT -05:00) America/Matamoros",
"(GMT -05:00) America/Menominee",
"(GMT -05:00) America/Merida",
"(GMT -05:00) America/Mexico_City",
"(GMT -05:00) America/Monterrey",
"(GMT -05:00) America/North_Dakota/Beulah",
"(GMT -05:00) America/North_Dakota/Center",
"(GMT -05:00) America/North_Dakota/New_Salem",
"(GMT -05:00) America/Panama",
"(GMT -05:00) America/Porto_Acre",
"(GMT -05:00) America/Rainy_River",
"(GMT -05:00) America/Rankin_Inlet",
"(GMT -05:00) America/Resolute",
"(GMT -05:00) America/Rio_Branco",
"(GMT -05:00) America/Winnipeg",
"(GMT -05:00) Brazil/Acre",
"(GMT -05:00) CST6CDT",
"(GMT -05:00) Canada/Central",
"(GMT -05:00) Chile/EasterIsland",
"(GMT -05:00) EST",
"(GMT -05:00) Etc/GMT+5",
"(GMT -05:00) Jamaica",
"(GMT -05:00) Mexico/General",
"(GMT -05:00) Pacific/Easter",
"(GMT -05:00) US/Central",
"(GMT -05:00) US/Indiana-Starke",
"(GMT -04:30) America/Caracas",
"(GMT -04:00) America/Anguilla",
"(GMT -04:00) America/Antigua",
"(GMT -04:00) America/Aruba",
"(GMT -04:00) America/Asuncion",
"(GMT -04:00) America/Barbados",
"(GMT -04:00) America/Blanc-Sablon",
"(GMT -04:00) America/Boa_Vista",
"(GMT -04:00) America/Campo_Grande",
"(GMT -04:00) America/Cuiaba",
"(GMT -04:00) America/Curacao",
"(GMT -04:00) America/Detroit",
"(GMT -04:00) America/Dominica",
"(GMT -04:00) America/Fort_Wayne",
"(GMT -04:00) America/Grand_Turk",
"(GMT -04:00) America/Grenada",
"(GMT -04:00) America/Guadeloupe",
"(GMT -04:00) America/Guyana",
"(GMT -04:00) America/Havana",
"(GMT -04:00) America/Indiana/Indianapolis",
"(GMT -04:00) America/Indiana/Marengo",
"(GMT -04:00) America/Indiana/Petersburg",
"(GMT -04:00) America/Indiana/Vevay",
"(GMT -04:00) America/Indiana/Vincennes",
"(GMT -04:00) America/Indiana/Winamac",
"(GMT -04:00) America/Indianapolis",
"(GMT -04:00) America/Iqaluit ",
"(GMT -04:00) America/Kentucky/Louisville ",
"(GMT -04:00) America/Kentucky/Monticello",
"(GMT -04:00) America/Kralendijk",
"(GMT -04:00) America/La_Paz",
"(GMT -04:00) America/Louisville ",
"(GMT -04:00) America/Lower_Princes",
"(GMT -04:00) America/Manaus",
"(GMT -04:00) America/Marigot",
"(GMT -04:00) America/Martinique",
"(GMT -04:00) America/Montreal",
"(GMT -04:00) America/Montserrat",
"(GMT -04:00) America/Nassau",
"(GMT -04:00) America/New_York",
"(GMT -04:00) America/Nipigon",
"(GMT -04:00) America/Pangnirtung ",
"(GMT -04:00) America/Port-au-Prince ",
"(GMT -04:00) America/Port_of_Spain",
"(GMT -04:00) America/Porto_Velho",
"(GMT -04:00) America/Puerto_Rico ",
"(GMT -04:00) America/Santo_Domingo ",
"(GMT -04:00) America/St_Barthelemy",
"(GMT -04:00) America/St_Kitts",
"(GMT -04:00) America/St_Lucia",
"(GMT -04:00) America/St_Thomas",
"(GMT -04:00) America/St_Vincent",
"(GMT -04:00) America/Thunder_Bay",
"(GMT -04:00) America/Toronto",
"(GMT -04:00) America/Tortola",
"(GMT -04:00) America/Virgin",
"(GMT -04:00) Brazil/West",
"(GMT -04:00) Canada/Eastern",
"(GMT -04:00) Cuba",
"(GMT -04:00) EST5EDT",
"(GMT -04:00) Etc/GMT+4",
"(GMT -04:00) US/East-Indiana",
"(GMT -04:00) US/Eastern",
"(GMT -04:00) US/Michigan",
"(GMT -03:00) America/Araguaina ",
"(GMT -03:00) America/Argentina/Buenos_Aires ",
"(GMT -03:00) America/Argentina/Catamarca ",
"(GMT -03:00) America/Argentina/ComodRivadavia ",
"(GMT -03:00) America/Argentina/Cordoba ",
"(GMT -03:00) America/Argentina/Jujuy ",
"(GMT -03:00) America/Argentina/La_Rioja ",
"(GMT -03:00) America/Argentina/Mendoza ",
"(GMT -03:00) America/Argentina/Rio_Gallegos ",
"(GMT -03:00) America/Argentina/Salta ",
"(GMT -03:00) America/Argentina/San_Juan ",
"(GMT -03:00) America/Argentina/San_Luis ",
"(GMT -03:00) America/Argentina/Tucuman ",
"(GMT -03:00) America/Argentina/Ushuaia",
"(GMT -03:00) America/Bahia",
"(GMT -03:00) America/Belem",
"(GMT -03:00) America/Buenos_Aires",
"(GMT -03:00) America/Catamarca",
"(GMT -03:00) America/Cayenne",
"(GMT -03:00) America/Cordoba",
"(GMT -03:00) America/Fortaleza",
"(GMT -03:00) America/Glace_Bay",
"(GMT -03:00) America/Goose_Bay",
"(GMT -03:00) America/Halifax",
"(GMT -03:00) America/Jujuy",
"(GMT -03:00) America/Maceio",
"(GMT -03:00) America/Mendoza",
"(GMT -03:00) America/Moncton",
"(GMT -03:00) America/Montevideo",
"(GMT -03:00) America/Paramaribo",
"(GMT -03:00) America/Recife",
"(GMT -03:00) America/Rosario",
"(GMT -03:00) America/Santarem",
"(GMT -03:00) America/Santiago",
"(GMT -03:00) America/Sao_Paulo",
"(GMT -03:00) America/Thule",
"(GMT -03:00) Antarctica/Palmer",
"(GMT -03:00) Antarctica/Rothera",
"(GMT -03:00) Atlantic/Bermuda",
"(GMT -03:00) Atlantic/Stanley",
"(GMT -03:00) Brazil/East",
"(GMT -03:00) Canada/Atlantic",
"(GMT -03:00) Chile/Continental",
"(GMT -03:00) Etc/GMT+3",
"(GMT -02:30) America/St_Johns",
"(GMT -02:30) Canada/Newfoundland",
"(GMT -02:00) America/Godthab",
"(GMT -02:00) America/Miquelon",
"(GMT -02:00) America/Noronha ",
"(GMT -02:00) Atlantic/South_Georgia",
"(GMT -02:00) Brazil/DeNoronha",
"(GMT -02:00) Etc/GMT+2",
"(GMT -01:00) Atlantic/Cape_Verde",
"(GMT -01:00) Etc/GMT+1",
"(GMT +00:00) Africa/Abidjan",
"(GMT +00:00) Africa/Accra",
"(GMT +00:00) Africa/Bamako",
"(GMT +00:00) Africa/Banjul",
"(GMT +00:00) Africa/Bissau",
"(GMT +00:00) Africa/Conakry",
"(GMT +00:00) Africa/Dakar",
"(GMT +00:00) Africa/Freetown",
"(GMT +00:00) Africa/Lome",
"(GMT +00:00) Africa/Monrovia",
"(GMT +00:00) Africa/Nouakchott",
"(GMT +00:00) Africa/Ouagadougou",
"(GMT +00:00) Africa/Sao_Tome",
"(GMT +00:00) Africa/Timbuktu",
"(GMT +00:00) America/Danmarkshavn",
"(GMT +00:00) America/Scoresbysund",
"(GMT +00:00) Atlantic/Azores",
"(GMT +00:00) Atlantic/Reykjavik",
"(GMT +00:00) Atlantic/St_Helena",
"(GMT +00:00) Etc/GMT",
"(GMT +00:00) Etc/GMT+0",
"(GMT +00:00) Etc/GMT-0",
"(GMT +00:00) Etc/GMT0",
"(GMT +00:00) Etc/Greenwich",
"(GMT +00:00) Etc/UCT",
"(GMT +00:00) Etc/UTC",
"(GMT +00:00) Etc/Universal",
"(GMT +00:00) Etc/Zulu",
"(GMT +00:00) GMT",
"(GMT +00:00) GMT+0",
"(GMT +00:00) GMT-0",
"(GMT +00:00) GMT0",
"(GMT +00:00) Greenwich",
"(GMT +00:00) Iceland",
"(GMT +00:00) UCT",
"(GMT +00:00) UTC",
"(GMT +00:00) Universal",
"(GMT +00:00) Zulu",
"(GMT +01:00) Africa/Algiers",
"(GMT +01:00) Africa/Bangui",
"(GMT +01:00) Africa/Brazzaville",
"(GMT +01:00) Africa/Casablanca",
"(GMT +01:00) Africa/Douala",
"(GMT +01:00) Africa/El_Aaiun",
"(GMT +01:00) Africa/Kinshasa",
"(GMT +01:00) Africa/Lagos",
"(GMT +01:00) Africa/Libreville",
"(GMT +01:00) Africa/Luanda",
"(GMT +01:00) Africa/Malabo",
"(GMT +01:00) Africa/Ndjamena",
"(GMT +01:00) Africa/Niamey",
"(GMT +01:00) Africa/Porto-Novo",
"(GMT +01:00) Africa/Tunis",
"(GMT +01:00) Africa/Windhoek",
"(GMT +01:00) Atlantic/Canary",
"(GMT +01:00) Atlantic/Faeroe",
"(GMT +01:00) Atlantic/Faroe",
"(GMT +01:00) Atlantic/Madeira",
"(GMT +01:00) Eire",
"(GMT +01:00) Etc/GMT-1",
"(GMT +01:00) Europe/Belfast",
"(GMT +01:00) Europe/Dublin",
"(GMT +01:00) Europe/Guernsey",
"(GMT +01:00) Europe/Isle_of_Man",
"(GMT +01:00) Europe/Jersey",
"(GMT +01:00) Europe/Lisbon",
"(GMT +01:00) Europe/London",
"(GMT +01:00) GB",
"(GMT +01:00) GB-Eire",
"(GMT +01:00) Portugal",
"(GMT +01:00) WET",
"(GMT +02:00) Africa/Blantyre",
"(GMT +02:00) Africa/Bujumbura",
"(GMT +02:00) Africa/Cairo",
"(GMT +02:00) Africa/Ceuta",
"(GMT +02:00) Africa/Gaborone",
"(GMT +02:00) Africa/Harare",
"(GMT +02:00) Africa/Johannesburg",
"(GMT +02:00) Africa/Kigali",
"(GMT +02:00) Africa/Lubumbashi",
"(GMT +02:00) Africa/Lusaka",
"(GMT +02:00) Africa/Maputo",
"(GMT +02:00) Africa/Maseru",
"(GMT +02:00) Africa/Mbabane",
"(GMT +02:00) Africa/Tripoli",
"(GMT +02:00) Antarctica/Troll",
"(GMT +02:00) Arctic/Longyearbyen",
"(GMT +02:00) Atlantic/Jan_Mayen",
"(GMT +02:00) CET",
"(GMT +02:00) Egypt",
"(GMT +02:00) Etc/GMT-2",
"(GMT +02:00) Europe/Amsterdam",
"(GMT +02:00) Europe/Andorra",
"(GMT +02:00) Europe/Belgrade",
"(GMT +02:00) Europe/Berlin",
"(GMT +02:00) Europe/Bratislava",
"(GMT +02:00) Europe/Brussels",
"(GMT +02:00) Europe/Budapest",
"(GMT +02:00) Europe/Busingen",
"(GMT +02:00) Europe/Copenhagen",
"(GMT +02:00) Europe/Gibraltar",
"(GMT +02:00) Europe/Kaliningrad",
"(GMT +02:00) Europe/Ljubljana",
"(GMT +02:00) Europe/Luxembourg",
"(GMT +02:00) Europe/Madrid",
"(GMT +02:00) Europe/Malta",
"(GMT +02:00) Europe/Monaco",
"(GMT +02:00) Europe/Oslo",
"(GMT +02:00) Europe/Paris",
"(GMT +02:00) Europe/Podgorica",
"(GMT +02:00) Europe/Prague",
"(GMT +02:00) Europe/Rome",
"(GMT +02:00) Europe/San_Marino",
"(GMT +02:00) Europe/Sarajevo",
"(GMT +02:00) Europe/Skopje",
"(GMT +02:00) Europe/Stockholm",
"(GMT +02:00) Europe/Tirane",
"(GMT +02:00) Europe/Vaduz",
"(GMT +02:00) Europe/Vatican",
"(GMT +02:00) Europe/Vienna",
"(GMT +02:00) Europe/Warsaw",
"(GMT +02:00) Europe/Zagreb",
"(GMT +02:00) Europe/Zurich",
"(GMT +02:00) Libya",
"(GMT +02:00) MET",
"(GMT +02:00) Poland",
"(GMT +03:00) Africa/Addis_Ababa",
"(GMT +03:00) Africa/Asmara",
"(GMT +03:00) Africa/Asmera",
"(GMT +03:00) Africa/Dar_es_Salaam",
"(GMT +03:00) Africa/Djibouti",
"(GMT +03:00) Africa/Juba",
"(GMT +03:00) Africa/Kampala",
"(GMT +03:00) Africa/Khartoum",
"(GMT +03:00) Africa/Mogadishu",
"(GMT +03:00) Africa/Nairobi",
"(GMT +03:00) Antarctica/Syowa",
"(GMT +03:00) Asia/Aden",
"(GMT +03:00) Asia/Amman",
"(GMT +03:00) Asia/Baghdad",
"(GMT +03:00) Asia/Bahrain",
"(GMT +03:00) Asia/Beirut",
"(GMT +03:00) Asia/Damascus",
"(GMT +03:00) Asia/Gaza",
"(GMT +03:00) Asia/Hebron",
"(GMT +03:00) Asia/Istanbul",
"(GMT +03:00) Asia/Jerusalem",
"(GMT +03:00) Asia/Kuwait",
"(GMT +03:00) Asia/Nicosia",
"(GMT +03:00) Asia/Qatar",
"(GMT +03:00) Asia/Riyadh",
"(GMT +03:00) Asia/Tel_Aviv",
"(GMT +03:00) EET",
"(GMT +03:00) Etc/GMT-3",
"(GMT +03:00) Europe/Athens",
"(GMT +03:00) Europe/Bucharest",
"(GMT +03:00) Europe/Chisinau",
"(GMT +03:00) Europe/Helsinki",
"(GMT +03:00) Europe/Istanbul",
"(GMT +03:00) Europe/Kiev",
"(GMT +03:00) Europe/Mariehamn",
"(GMT +03:00) Europe/Minsk",
"(GMT +03:00) Europe/Moscow",
"(GMT +03:00) Europe/Nicosia",
"(GMT +03:00) Europe/Riga",
"(GMT +03:00) Europe/Simferopol",
"(GMT +03:00) Europe/Sofia",
"(GMT +03:00) Europe/Tallinn",
"(GMT +03:00) Europe/Tiraspol",
"(GMT +03:00) Europe/Uzhgorod",
"(GMT +03:00) Europe/Vilnius",
"(GMT +03:00) Europe/Volgograd",
"(GMT +03:00) Europe/Zaporozhye",
"(GMT +03:00) Indian/Antananarivo",
"(GMT +03:00) Indian/Comoro",
"(GMT +03:00) Indian/Mayotte",
"(GMT +03:00) Israel",
"(GMT +03:00) Turkey",
"(GMT +03:00) W-SU",
"(GMT +04:00) Asia/Dubai",
"(GMT +04:00) Asia/Muscat",
"(GMT +04:00) Asia/Tbilisi",
"(GMT +04:00) Asia/Yerevan",
"(GMT +04:00) Etc/GMT-4",
"(GMT +04:00) Europe/Samara",
"(GMT +04:00) Indian/Mahe",
"(GMT +04:00) Indian/Mauritius",
"(GMT +04:00) Indian/Reunion",
"(GMT +04:30) Asia/Kabul",
"(GMT +04:30) Asia/Tehran",
"(GMT +04:30) Iran",
"(GMT +05:00) Antarctica/Mawson",
"(GMT +05:00) Asia/Aqtau",
"(GMT +05:00) Asia/Aqtobe",
"(GMT +05:00) Asia/Ashgabat",
"(GMT +05:00) Asia/Ashkhabad",
"(GMT +05:00) Asia/Baku",
"(GMT +05:00) Asia/Dushanbe",
"(GMT +05:00) Asia/Karachi",
"(GMT +05:00) Asia/Oral",
"(GMT +05:00) Asia/Samarkand",
"(GMT +05:00) Asia/Tashkent",
"(GMT +05:00) Asia/Yekaterinburg",
"(GMT +05:00) Etc/GMT-5",
"(GMT +05:00) Indian/Kerguelen",
"(GMT +05:00) Indian/Maldives",
"(GMT +05:30) Asia/Calcutta",
"(GMT +05:30) Asia/Colombo",
"(GMT +05:30) Asia/Kolkata",
"(GMT +05:45) Asia/Kathmandu",
"(GMT +05:45) Asia/Katmandu",
"(GMT +06:00) Antarctica/Vostok",
"(GMT +06:00) Asia/Almaty",
"(GMT +06:00) Asia/Bishkek",
"(GMT +06:00) Asia/Dacca",
"(GMT +06:00) Asia/Dhaka",
"(GMT +06:00) Asia/Kashgar",
"(GMT +06:00) Asia/Novosibirsk",
"(GMT +06:00) Asia/Omsk",
"(GMT +06:00) Asia/Qyzylorda",
"(GMT +06:00) Asia/Thimbu",
"(GMT +06:00) Asia/Thimphu",
"(GMT +06:00) Asia/Urumqi",
"(GMT +06:00) Etc/GMT-6",
"(GMT +06:00) Indian/Chagos",
"(GMT +06:30) Asia/Rangoon",
"(GMT +06:30) Indian/Cocos",
"(GMT +07:00) Antarctica/Davis",
"(GMT +07:00) Asia/Bangkok",
"(GMT +07:00) Asia/Ho_Chi_Minh",
"(GMT +07:00) Asia/Hovd",
"(GMT +07:00) Asia/Jakarta",
"(GMT +07:00) Asia/Krasnoyarsk",
"(GMT +07:00) Asia/Novokuznetsk",
"(GMT +07:00) Asia/Phnom_Penh",
"(GMT +07:00) Asia/Pontianak",
"(GMT +07:00) Asia/Saigon",
"(GMT +07:00) Asia/Vientiane",
"(GMT +07:00) Etc/GMT-7",
"(GMT +07:00) Indian/Christmas",
"(GMT +08:00) Antarctica/Casey",
"(GMT +08:00) Asia/Brunei",
"(GMT +08:00) Asia/Chita",
"(GMT +08:00) Asia/Choibalsan",
"(GMT +08:00) Asia/Chongqing",
"(GMT +08:00) Asia/Chungking",
"(GMT +08:00) Asia/Harbin",
"(GMT +08:00) Asia/Hong_Kong",
"(GMT +08:00) Asia/Irkutsk",
"(GMT +08:00) Asia/Kuala_Lumpur",
"(GMT +08:00) Asia/Kuching",
"(GMT +08:00) Asia/Macao",
"(GMT +08:00) Asia/Macau",
"(GMT +08:00) Asia/Makassar",
"(GMT +08:00) Asia/Manila",
"(GMT +08:00) Asia/Shanghai",
"(GMT +08:00) Asia/Singapore",
"(GMT +08:00) Asia/Taipei",
"(GMT +08:00) Asia/Ujung_Pandang",
"(GMT +08:00) Asia/Ulaanbaatar",
"(GMT +08:00) Asia/Ulan_Bator",
"(GMT +08:00) Australia/Perth",
"(GMT +08:00) Australia/West",
"(GMT +08:00) Etc/GMT-8",
"(GMT +08:00) Hongkong",
"(GMT +08:00) PRC",
"(GMT +08:00) ROC",
"(GMT +08:00) Singapore",
"(GMT +08:45) Australia/Eucla",
"(GMT +09:00) Asia/Dili",
"(GMT +09:00) Asia/Jayapura",
"(GMT +09:00) Asia/Khandyga",
"(GMT +09:00) Asia/Pyongyang",
"(GMT +09:00) Asia/Seoul",
"(GMT +09:00) Asia/Tokyo",
"(GMT +09:00) Asia/Yakutsk",
"(GMT +09:00) Etc/GMT-9",
"(GMT +09:00) Japan",
"(GMT +09:00) Pacific/Palau",
"(GMT +09:00) ROK",
"(GMT +09:30) Australia/Adelaide ",
"(GMT +09:30) Australia/Broken_Hill",
"(GMT +09:30) Australia/Darwin",
"(GMT +09:30) Australia/North",
"(GMT +09:30) Australia/South",
"(GMT +09:30) Australia/Yancowinna ",
"(GMT +10:00) Antarctica/DumontDUrville",
"(GMT +10:00) Asia/Magadan",
"(GMT +10:00) Asia/Sakhalin",
"(GMT +10:00) Asia/Ust-Nera",
"(GMT +10:00) Asia/Vladivostok",
"(GMT +10:00) Australia/ACT",
"(GMT +10:00) Australia/Brisbane",
"(GMT +10:00) Australia/Canberra",
"(GMT +10:00) Australia/Currie",
"(GMT +10:00) Australia/Hobart",
"(GMT +10:00) Australia/Lindeman",
"(GMT +10:00) Australia/Melbourne",
"(GMT +10:00) Australia/NSW",
"(GMT +10:00) Australia/Queensland",
"(GMT +10:00) Australia/Sydney",
"(GMT +10:00) Australia/Tasmania",
"(GMT +10:00) Australia/Victoria",
"(GMT +10:00) Etc/GMT-10",
"(GMT +10:00) Pacific/Chuuk",
"(GMT +10:00) Pacific/Guam",
"(GMT +10:00) Pacific/Port_Moresby",
"(GMT +10:00) Pacific/Saipan",
"(GMT +10:00) Pacific/Truk",
"(GMT +10:00) Pacific/Yap",
"(GMT +10:30) Australia/LHI",
"(GMT +10:30) Australia/Lord_Howe",
"(GMT +11:00) Antarctica/Macquarie",
"(GMT +11:00) Asia/Srednekolymsk",
"(GMT +11:00) Etc/GMT-11",
"(GMT +11:00) Pacific/Bougainville",
"(GMT +11:00) Pacific/Efate",
"(GMT +11:00) Pacific/Guadalcanal",
"(GMT +11:00) Pacific/Kosrae",
"(GMT +11:00) Pacific/Noumea",
"(GMT +11:00) Pacific/Pohnpei",
"(GMT +11:00) Pacific/Ponape",
"(GMT +11:30) Pacific/Norfolk",
"(GMT +12:00) Antarctica/McMurdo",
"(GMT +12:00) Antarctica/South_Pole",
"(GMT +12:00) Asia/Anadyr",
"(GMT +12:00) Asia/Kamchatka",
"(GMT +12:00) Etc/GMT-12",
"(GMT +12:00) Kwajalein",
"(GMT +12:00) NZ",
"(GMT +12:00) Pacific/Auckland",
"(GMT +12:00) Pacific/Fiji",
"(GMT +12:00) Pacific/Funafuti",
"(GMT +12:00) Pacific/Kwajalein",
"(GMT +12:00) Pacific/Majuro",
"(GMT +12:00) Pacific/Nauru",
"(GMT +12:00) Pacific/Tarawa",
"(GMT +12:00) Pacific/Wake",
"(GMT +12:00) Pacific/Wallis",
"(GMT +12:45) NZ-CHAT",
"(GMT +12:45) Pacific/Chatham",
"(GMT +13:00) Etc/GMT-13",
"(GMT +13:00) Pacific/Apia",
"(GMT +13:00) Pacific/Enderbury",
"(GMT +13:00) Pacific/Fakaofo",
"(GMT +13:00) Pacific/Tongatapu",
"(GMT +14:00) Etc/GMT-14",
"(GMT +14:00) Pacific/Kiritimati"};
public static String INTERNAL_METRIC_TYPE_MEMORY = "Memory";
public static String METRIC_TYPE_MEMORY = "Memory";
public static String[] metrictype = {METRIC_TYPE_MEMORY};
public static String[] APPTYPE={
APP_TYPE_JAVA, APP_TYPE_RUBY, APP_TYPE_RUBY_SINATRA, APP_TYPE_RUBY_ON_RAILS, APP_TYPE_NODEJS,
APP_TYPE_GO, APP_TYPE_PHP, APP_TYPE_PYTHON, APP_TYPE_DOTNET
};
public static String[] APP_TOBE_UPDATE={APP_TYPE_JAVA, APP_TYPE_NODEJS};
public static String[] JAVA_METRICS = {METRIC_TYPE_MEMORY};
public static String[] NODEJS_METRICS = {METRIC_TYPE_MEMORY};
public static String[] DEFAULT_METRICS = {METRIC_TYPE_MEMORY};
public static Map<String,String[]> appType_metric_mapping = new HashMap<String, String[]>()
{
{
put(APP_TYPE_JAVA, JAVA_METRICS);
put(APP_TYPE_NODEJS, NODEJS_METRICS);
put(APP_TYPE_DEFAULT, DEFAULT_METRICS);
}
};
public static int REPORTINTERVAL = ConfigManager.getInt(Constants.REPORT_INTERVAL, 60);
public static int value_metric_refresh_interval = REPORTINTERVAL * 1000;
public static int value_min_statwindow = REPORTINTERVAL;
public static int value_min_breachDuration = REPORTINTERVAL;
public static int value_min_stepDownCoolDownSecs = REPORTINTERVAL;
public static int value_min_stepUpCoolDownSecs = REPORTINTERVAL;
public static int value_min_lowerThreshold_Memory = 1;
public static int value_min_instanceStepCountDown = 1;
public static int value_min_instanceStepCountUp = 1;
public static int metricTimeRange = REPORTINTERVAL * 60 /60; //Given the unit of reportInterval is seconds, we need to /60 to get the value for unit "minutes". Then multiple with 60 as we need 60 points in the chart.
public static int metricTimeRangeByMilliseconds = metricTimeRange * 60 * 1000;
public static String value_nolimit = "nolimit";
public static int value_default_statWindow = 300;
public static int value_default_breachDuration = 600;
public static int value_default_stepDownCoolDownSecs = 600;
public static int value_default_stepUpCoolDownSecs = 600;
public static int value_default_lowerThreshold_Memory = 30;
public static int value_default_upperThreshold_Memory= 80;
public static int value_default_instanceStepCountDown = 1;
public static int value_default_instanceStepCountUp = 1;
public static int value_max_statwindow = 3600;
public static int value_max_breachDuration = 3600;
public static int value_max_stepDownCoolDownSecs = 3600;
public static int value_max_stepUpCoolDownSecs = 3600;
public static int value_max_lowerThreshold_Memory = 100;
public static int value_max_upperThreshold_Memory= 100;
public static String TRIGGER_STATWINDOW = "statWindow";
public static String TRIGGER_BREACHDURATION = "breachDuration";
public static String TRIGGER_LOWERTHRESHOLD = "lowerThreshold";
public static String TRIGGER_UPPERTHRESHOLD = "upperThreshold";
public static String TRIGGER_INSTANCESTEPCOUNTDOWN = "instanceStepCountDown";
public static String TRIGGER_INSTANCESTEPCOUNTUP = "instanceStepCountUp";
public static String TRIGGER_STEPDOWNCOOLDOWNSECS = "stepDownCoolDownSecs";
public static String TRIGGER_STEPUPCOOLDOWN = "stepUpCoolDownSecs";
public static Map<String, Integer> trigger_default = new HashMap<String, Integer>() //server metric string to metricType
{
{
put(TRIGGER_STATWINDOW, value_default_statWindow);
put(TRIGGER_BREACHDURATION, value_default_breachDuration);
put(TRIGGER_LOWERTHRESHOLD, value_default_lowerThreshold_Memory);
put(TRIGGER_UPPERTHRESHOLD, value_default_upperThreshold_Memory);
put(TRIGGER_INSTANCESTEPCOUNTDOWN, value_default_instanceStepCountDown);
put(TRIGGER_INSTANCESTEPCOUNTUP, value_default_instanceStepCountUp);
put(TRIGGER_STEPDOWNCOOLDOWNSECS, value_default_stepDownCoolDownSecs);
put(TRIGGER_STEPUPCOOLDOWN, value_default_stepUpCoolDownSecs);
}
};
public static Map<String, Map<String, Map<String, String>>> getTriggerRange()
{
Map<String, Map<String, Map<String, String>>> range=null;
range = new HashMap<String, Map<String, Map<String, String>>>();
Map<String, Map<String, String>> item = null;
Map<String, String> subitem = null;
item = new HashMap<String, Map<String, String>>();
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_min_statwindow));
subitem.put("message", "{PolicyTrigger.statWindow.Min}");
item.put("statWindow_Min", subitem);
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_max_statwindow));
subitem.put("message", "{PolicyTrigger.statWindow.Max}");
item.put("statWindow_Max", subitem);
range.put("statWindow", item);
item = new HashMap<String, Map<String, String>>();
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_min_breachDuration));
subitem.put("message", "{PolicyTrigger.breachDuration.Min}");
item.put("breachDuration_Min", subitem);
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_max_breachDuration));
subitem.put("message", "{PolicyTrigger.breachDuration.Max}");
item.put("breachDuration_Max", subitem);
range.put("breachDuration", item);
item = new HashMap<String, Map<String, String>>();
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_min_instanceStepCountDown));
subitem.put("message", "{PolicyTrigger.instanceStepCountDown.Min}");
item.put("instanceStepCountDown_Min", subitem);
range.put("instanceStepCountDown", item);
item = new HashMap<String, Map<String, String>>();
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_min_instanceStepCountUp));
subitem.put("message", "{PolicyTrigger.instanceStepCountUp.Min}");
item.put("instanceStepCountUp_Min", subitem);
range.put("instanceStepCountUp", item);
item = new HashMap<String, Map<String, String>>();
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_min_stepDownCoolDownSecs));
subitem.put("message", "{PolicyTrigger.stepDownCoolDownSecs.Min}");
item.put("stepDownCoolDownSecs_Min", subitem);
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_max_stepDownCoolDownSecs));
subitem.put("message", "{PolicyTrigger.stepDownCoolDownSecs.Max}");
item.put("stepDownCoolDownSecs_Max", subitem);
range.put("stepDownCoolDownSecs", item);
item = new HashMap<String, Map<String, String>>();
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_min_stepUpCoolDownSecs));
subitem.put("message", "{PolicyTrigger.stepUpCoolDownSecs.Min}");
item.put("stepUpCoolDownSecs_Min", subitem);
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_max_stepUpCoolDownSecs));
subitem.put("message", "{PolicyTrigger.stepUpCoolDownSecs.Max}");
item.put("stepUpCoolDownSecs_Max", subitem);
range.put("stepUpCoolDownSecs", item);
return range;
}
public static Map<String, Map<String, Map<String, String>>> getThresholdRange()
{
Map<String, Map<String, Map<String, String>>> range=null;
range = new HashMap<String, Map<String, Map<String, String>>>();
Map<String, Map<String, String>> item = null;
Map<String, String> subitem = null;
item = new HashMap<String, Map<String, String>>();
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_min_lowerThreshold_Memory));
subitem.put("message", "{PolicyTrigger.lowerThreshold.Min}");
item.put("Memory_lowerThreshold_Min", subitem);
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_max_lowerThreshold_Memory));
subitem.put("message", "{PolicyTrigger.lowerThreshold.Max}");
item.put("Memory_lowerThreshold_Max", subitem);
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(1)); // hard coded instead of value_min_upperThreshold_Memory
subitem.put("message", "{PolicyTrigger.upperThreshold.Min}");
item.put("Memory_upperThreshold_Min", subitem);
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_max_upperThreshold_Memory));
subitem.put("message", "{PolicyTrigger.upperThreshold.Max}");
item.put("Memory_upperThreshold_Max", subitem);
range.put("Memory", item);
return range;
}
public static Map<String, Map<String, Map<String, Map<String, String>>>> getTriggerRangeByTriggerType()
{
Map<String, Map<String, Map<String, Map<String, String>>>> mulit_range=null;
mulit_range = new HashMap<String, Map<String, Map<String, Map<String, String>>>>();
Map<String, Map<String, Map<String, String>>> range = getTriggerRange(); //get common part of range
Map<String, Map<String, String>> item = null;
Map<String, String> subitem = null;
item = new HashMap<String, Map<String, String>>();
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_min_lowerThreshold_Memory));
subitem.put("message", "{PolicyTrigger.lowerThreshold.Min}");
item.put("lowerThreshold_Min", subitem);
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_max_lowerThreshold_Memory));
subitem.put("message", "{PolicyTrigger.lowerThreshold.Max}");
item.put("lowerThreshold_Max", subitem);
range.put("lowerThreshold", item);
item = new HashMap<String, Map<String, String>>();
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(1));
subitem.put("message", "{PolicyTrigger.upperThreshold.Min}");
item.put("upperThreshold_Min", subitem);
subitem = new HashMap<String, String>();
subitem.put("value", String.valueOf(value_max_upperThreshold_Memory));
subitem.put("message", "{PolicyTrigger.upperThreshold.Max}");
item.put("upperThreshold_Max", subitem);
range.put("upperThreshold", item);
mulit_range.put("trigger_Memory", range);
return mulit_range;
}
public static void updateTriggerRange(int interval) //runtime update trigger value
{
REPORTINTERVAL=interval;
value_metric_refresh_interval = REPORTINTERVAL * 1000;
value_min_statwindow = REPORTINTERVAL;
value_min_breachDuration = REPORTINTERVAL;
value_min_stepUpCoolDownSecs = REPORTINTERVAL;
value_min_stepDownCoolDownSecs = REPORTINTERVAL;
metricTimeRange = REPORTINTERVAL * 60 /60;
}
public static String [] getMetricTypeByAppType(String appType) {
logger.debug("getMetricTypeByAppType for appType: " + appType);
if (Arrays.asList(APPTYPE).contains(appType)) {
String [] specified_metric = (String [])appType_metric_mapping.get(appType);
if (specified_metric != null){
logger.debug("found metric array for appType: " + appType);
return specified_metric;
}
else
return appType_metric_mapping.get(APP_TYPE_DEFAULT);
}
return null;
}
public static int getTriggerDefaultInt(String key) {
return trigger_default.get(key).intValue();
}
}
| |
/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config;
import com.thoughtworks.go.CurrentGoCDVersion;
import com.thoughtworks.go.config.commands.EntityConfigUpdateCommand;
import com.thoughtworks.go.config.exceptions.*;
import com.thoughtworks.go.config.registry.ConfigElementImplementationRegistry;
import com.thoughtworks.go.config.remote.PartialConfig;
import com.thoughtworks.go.config.update.FullConfigUpdateCommand;
import com.thoughtworks.go.domain.GoConfigRevision;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.service.ConfigRepository;
import com.thoughtworks.go.util.CachedDigestUtils;
import com.thoughtworks.go.util.SystemEnvironment;
import com.thoughtworks.go.util.TimeProvider;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.jetbrains.annotations.TestOnly;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import static com.thoughtworks.go.util.ExceptionUtils.bomb;
/**
* This class find the location of cruise-config.xml and turn that into stream
* and passing it into MagicLoader or MagicWriter.
*/
@Component
public class GoFileConfigDataSource {
public static final String FILESYSTEM = "Filesystem";
private static final Logger LOGGER = LoggerFactory.getLogger(GoFileConfigDataSource.class.getName());
private final Charset UTF_8 = StandardCharsets.UTF_8;
private final CachedGoPartials cachedGoPartials;
private final MagicalGoConfigXmlWriter magicalGoConfigXmlWriter;
private final MagicalGoConfigXmlLoader magicalGoConfigXmlLoader;
private final ConfigRepository configRepository;
private final TimeProvider timeProvider;
private final FullConfigSaveMergeFlow fullConfigSaveMergeFlow;
private final FullConfigSaveNormalFlow fullConfigSaveNormalFlow;
private final SystemEnvironment systemEnvironment;
private final GoConfigMigration upgrader;
private final GoConfigCloner cloner = new GoConfigCloner();
private final GoConfigFileReader goConfigFileReader;
private final GoConfigFileWriter goConfigFileWriter;
private final PartialConfigHelper partials;
private ReloadStrategy reloadStrategy = new ReloadIfModified();
/* Will only upgrade cruise config file on application startup. */
@Autowired
public GoFileConfigDataSource(GoConfigMigration upgrader, ConfigRepository configRepository, SystemEnvironment systemEnvironment,
TimeProvider timeProvider, ConfigCache configCache,
ConfigElementImplementationRegistry configElementImplementationRegistry,
CachedGoPartials cachedGoPartials,
FullConfigSaveMergeFlow fullConfigSaveMergeFlow, FullConfigSaveNormalFlow fullConfigSaveNormalFlow, PartialConfigHelper partials) {
this(upgrader, configRepository, systemEnvironment, timeProvider,
new MagicalGoConfigXmlLoader(configCache, configElementImplementationRegistry),
new MagicalGoConfigXmlWriter(configCache, configElementImplementationRegistry),
cachedGoPartials, fullConfigSaveMergeFlow, fullConfigSaveNormalFlow,
new GoConfigFileReader(systemEnvironment), new GoConfigFileWriter(systemEnvironment), partials);
}
GoFileConfigDataSource(GoConfigMigration upgrader, ConfigRepository configRepository, SystemEnvironment systemEnvironment,
TimeProvider timeProvider, MagicalGoConfigXmlLoader magicalGoConfigXmlLoader,
MagicalGoConfigXmlWriter magicalGoConfigXmlWriter,
CachedGoPartials cachedGoPartials, FullConfigSaveMergeFlow fullConfigSaveMergeFlow,
FullConfigSaveNormalFlow fullConfigSaveNormalFlow, GoConfigFileReader goConfigFileReader,
GoConfigFileWriter goConfigFileWriter, PartialConfigHelper partials) {
this.configRepository = configRepository;
this.systemEnvironment = systemEnvironment;
this.upgrader = upgrader;
this.timeProvider = timeProvider;
this.magicalGoConfigXmlLoader = magicalGoConfigXmlLoader;
this.magicalGoConfigXmlWriter = magicalGoConfigXmlWriter;
this.cachedGoPartials = cachedGoPartials;
this.fullConfigSaveMergeFlow = fullConfigSaveMergeFlow;
this.fullConfigSaveNormalFlow = fullConfigSaveNormalFlow;
this.goConfigFileReader = goConfigFileReader;
this.goConfigFileWriter = goConfigFileWriter;
this.partials = partials;
}
private interface ReloadStrategy {
class ReloadTestResult {
final boolean requiresReload;
final long fileSize;
final long modifiedTime;
public ReloadTestResult(boolean requiresReload, long fileSize, long modifiedTime) {
this.requiresReload = requiresReload;
this.fileSize = fileSize;
this.modifiedTime = modifiedTime;
}
}
ReloadTestResult requiresReload(File configFile);
void latestState(CruiseConfig config);
void hasLatest(ReloadTestResult reloadTestResult);
void performingReload(ReloadTestResult reloadTestResult);
}
static class GoConfigSaveResult {
private final GoConfigHolder configHolder;
private final ConfigSaveState configSaveState;
GoConfigSaveResult(GoConfigHolder holder, ConfigSaveState configSaveState) {
configHolder = holder;
this.configSaveState = configSaveState;
}
public GoConfigHolder getConfigHolder() {
return configHolder;
}
public ConfigSaveState getConfigSaveState() {
return configSaveState;
}
}
private static class AlwaysReload implements ReloadStrategy {
@Override
public ReloadTestResult requiresReload(File configFile) {
return new ReloadTestResult(true, 0, 0);
}
@Override
public void latestState(CruiseConfig config) {
}
@Override
public void hasLatest(ReloadTestResult result) {
}
@Override
public void performingReload(ReloadTestResult result) {
}
}
static class ReloadIfModified implements ReloadStrategy {
private long lastModified;
private long prevSize;
private volatile String md5 = "";
@Override
public ReloadTestResult requiresReload(File configFile) {
long lastModified = lastModified(configFile);
long length = length(configFile);
boolean requiresReload = requiresReload(configFile, lastModified, length);
return new ReloadTestResult(requiresReload, length, lastModified);
}
@Override
public void latestState(CruiseConfig config) {
md5 = config.getMd5();
}
@Override
public void hasLatest(ReloadTestResult result) {
rememberLatestFileAttributes(result);
}
@Override
public void performingReload(ReloadTestResult result) {
rememberLatestFileAttributes(result);
}
boolean doesFileContentDiffer(File configFile) {
return !md5.equals(getConfigFileMd5(configFile));
}
boolean doFileAttributesDiffer(long currentLastModified, long currentSize) {
return currentLastModified != lastModified ||
prevSize != currentSize;
}
private long length(File configFile) {
return configFile.length();
}
private long lastModified(File configFile) {
return configFile.lastModified();
}
private boolean requiresReload(File configFile, long currentLastModified, long currentSize) {
return doFileAttributesDiffer(currentLastModified, currentSize) && doesFileContentDiffer(configFile);
}
private String getConfigFileMd5(File configFile) {
String newMd5;
try (FileInputStream inputStream = new FileInputStream(configFile)) {
newMd5 = CachedDigestUtils.md5Hex(inputStream);
} catch (IOException e) {
throw new RuntimeException(e);
}
return newMd5;
}
private void rememberLatestFileAttributes(ReloadTestResult result) {
synchronized (this) {
lastModified = result.modifiedTime;
prevSize = result.fileSize;
}
}
}
public GoFileConfigDataSource reloadEveryTime() {
this.reloadStrategy = new AlwaysReload();
return this;
}
public GoFileConfigDataSource reloadIfModified() {
this.reloadStrategy = new ReloadIfModified();
return this;
}
public File fileLocation() {
return new File(systemEnvironment.getCruiseConfigFile());
}
public GoConfigHolder load() throws Exception {
File configFile = fileLocation();
ReloadStrategy.ReloadTestResult result = reloadStrategy.requiresReload(configFile);
if (!result.requiresReload) {
reloadStrategy.hasLatest(result);
return null;
}
synchronized (this) {
result = reloadStrategy.requiresReload(configFile);
if (!result.requiresReload) {
reloadStrategy.hasLatest(result);
return null;
}
reloadStrategy.performingReload(result);
LOGGER.info("Config file changed at {}", result.modifiedTime);
LOGGER.info("Reloading config file: {}", configFile);
LOGGER.debug("Starting config reload using the optimized flow.");
return forceLoad();
}
}
@Deprecated
@TestOnly
public synchronized GoConfigHolder write(String configFileContent, boolean shouldMigrate) throws Exception {
File configFile = fileLocation();
try {
if (shouldMigrate) {
configFileContent = upgrader.upgradeIfNecessary(configFileContent);
}
GoConfigHolder configHolder = internalLoad(configFileContent, new ConfigModifyingUser(), new ArrayList<>());
String toWrite = configAsXml(configHolder.configForEdit, false);
LOGGER.trace("Writing config file: {}", configFile.getAbsolutePath());
writeToConfigXmlFile(toWrite);
return configHolder;
} catch (Exception e) {
LOGGER.error("Unable to write config file: {}\n{}", configFile.getAbsolutePath(), e.getMessage(), e);
throw e;
}
}
public synchronized EntityConfigSaveResult writeEntityWithLock(EntityConfigUpdateCommand updatingCommand, GoConfigHolder configHolder, Username currentUser) {
CruiseConfig modifiedConfig = cloner.deepClone(configHolder.configForEdit);
try {
updatingCommand.update(modifiedConfig);
} catch (Exception e) {
bomb(e);
}
List<PartialConfig> lastValidPartials = cachedGoPartials.lastValidPartials();
List<PartialConfig> lastKnownPartials = cachedGoPartials.lastKnownPartials();
if (lastKnownPartials.isEmpty() || areKnownPartialsSameAsValidPartials(lastKnownPartials, lastValidPartials)) {
return trySavingEntity(updatingCommand, currentUser, modifiedConfig, lastValidPartials);
}
try {
return trySavingEntity(updatingCommand, currentUser, modifiedConfig, lastValidPartials);
} catch (GoConfigInvalidException e) {
StringBuilder errorMessageBuilder = new StringBuilder();
try {
String message = String.format(
"Merged update operation failed on VALID %s partials. Falling back to using LAST KNOWN %s partials. Exception message was: [%s %s]",
lastValidPartials.size(), lastKnownPartials.size(), e.getMessage(), e.getAllErrorMessages());
errorMessageBuilder.append(message);
LOGGER.warn(message, e);
updatingCommand.clearErrors();
modifiedConfig.setPartials(lastKnownPartials);
String configAsXml = configAsXml(modifiedConfig, false);
GoConfigHolder holder = internalLoad(configAsXml, new ConfigModifyingUser(currentUser.getUsername().toString()), lastKnownPartials);
LOGGER.info("Update operation on merged configuration succeeded with {} KNOWN partials. Now there are {} LAST KNOWN partials",
lastKnownPartials.size(), cachedGoPartials.lastKnownPartials().size());
return new EntityConfigSaveResult(holder.config, holder);
} catch (Exception exceptionDuringFallbackValidation) {
String message = String.format(
"Merged config update operation failed using fallback LAST KNOWN %s partials. Exception message was: %s",
lastKnownPartials.size(), exceptionDuringFallbackValidation.getMessage());
LOGGER.warn(message, exceptionDuringFallbackValidation);
errorMessageBuilder.append(System.lineSeparator());
errorMessageBuilder.append(message);
throw new GoConfigInvalidException(e.getCruiseConfig(), errorMessageBuilder.toString());
}
}
}
// This method should be removed once we have API's for all entities which should use writeEntityWithLock and full config save should use writeFullConfigWithLock
@Deprecated
public synchronized GoConfigSaveResult writeWithLock(UpdateConfigCommand updatingCommand, GoConfigHolder configHolder) {
try {
// Need to convert to xml before we try to write it to the config file.
// If our cruiseConfig fails XSD validation, we don't want to write it incorrectly.
GoConfigHolder validatedConfigHolder;
List<PartialConfig> lastKnownPartials = cachedGoPartials.lastKnownPartials();
List<PartialConfig> lastValidPartials = cachedGoPartials.lastValidPartials();
try {
validatedConfigHolder = trySavingConfig(updatingCommand, configHolder, lastKnownPartials);
updateMergedConfigForEdit(validatedConfigHolder, lastKnownPartials);
} catch (Exception e) {
if (lastKnownPartials.isEmpty() || areKnownPartialsSameAsValidPartials(lastKnownPartials, lastValidPartials)) {
throw e;
} else {
LOGGER.warn("Merged config update operation failed on LATEST {} partials. Falling back to using LAST VALID {} partials. Exception message was: {}", lastKnownPartials.size(), lastValidPartials.size(), e.getMessage(), e);
try {
validatedConfigHolder = trySavingConfig(updatingCommand, configHolder, lastValidPartials);
updateMergedConfigForEdit(validatedConfigHolder, lastValidPartials);
LOGGER.info("Update operation on merged configuration succeeded with old {} LAST VALID partials.", lastValidPartials.size());
} catch (GoConfigInvalidException fallbackFailed) {
LOGGER.warn("Merged config update operation failed using fallback LAST VALID {} partials. Exception message was: {}", lastValidPartials.size(), fallbackFailed.getMessage(), fallbackFailed);
throw new GoConfigInvalidMergeException(lastValidPartials, fallbackFailed);
}
}
}
ConfigSaveState configSaveState = shouldMergeConfig(updatingCommand, configHolder) ? ConfigSaveState.MERGED : ConfigSaveState.UPDATED;
return new GoConfigSaveResult(validatedConfigHolder, configSaveState);
} catch (ConfigFileHasChangedException e) {
LOGGER.warn("Configuration file could not be merged successfully after a concurrent edit: {}", e.getMessage(), e);
throw e;
} catch (GoConfigInvalidException e) {
LOGGER.warn("Configuration file is invalid: {}", e.getMessage(), e);
throw bomb(e.getMessage(), e);
} catch (Exception e) {
LOGGER.error("Configuration file is not valid: {}", e.getMessage(), e);
throw bomb(e.getMessage(), e);
} finally {
LOGGER.debug("[Config Save] Done writing with lock");
}
}
public synchronized GoConfigSaveResult writeFullConfigWithLock(FullConfigUpdateCommand updatingCommand, GoConfigHolder configHolder) {
try {
GoConfigHolder validatedConfigHolder;
try {
validatedConfigHolder = trySavingConfigWithLastKnownPartials(updatingCommand, configHolder);
} catch (Exception e) {
if (!canUpdateConfigWithLastValidPartials())
throw e;
LOGGER.warn("Merged config update operation failed on LATEST {} partials. Falling back to using LAST VALID {} partials. Exception message was: {}", cachedGoPartials.lastKnownPartials().size(), cachedGoPartials.lastValidPartials().size(), e.getMessage(), e);
validatedConfigHolder = trySavingConfigWithLastValidPartials(updatingCommand, configHolder);
}
ConfigSaveState configSaveState = shouldMergeConfig(updatingCommand, configHolder) ? ConfigSaveState.MERGED : ConfigSaveState.UPDATED;
return new GoConfigSaveResult(validatedConfigHolder, configSaveState);
} catch (ConfigFileHasChangedException e) {
LOGGER.warn("Configuration file could not be merged successfully after a concurrent edit: {}", e.getMessage(), e);
throw e;
} catch (GoConfigInvalidException e) {
LOGGER.warn("Configuration file is invalid: {}", e.getMessage(), e);
throw bomb(e.getMessage(), e);
} catch (Exception e) {
LOGGER.error("Configuration file is not valid: {}", e.getMessage(), e);
throw bomb(e.getMessage(), e);
} finally {
LOGGER.debug("[Config Save] Done writing with lock");
}
}
public String configAsXml(CruiseConfig config, boolean skipPreprocessingAndValidation) throws Exception {
LOGGER.debug("[Config Save] === Converting config to XML");
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
magicalGoConfigXmlWriter.write(config, outputStream, skipPreprocessingAndValidation);
LOGGER.debug("[Config Save] === Done converting config to XML");
return outputStream.toString();
}
public String getFileLocation() {
return fileLocation().getAbsolutePath();
}
synchronized GoConfigHolder forceLoad() throws Exception {
File configFile = goConfigFileReader.fileLocation();
CruiseConfig cruiseConfig = this.magicalGoConfigXmlLoader.deserializeConfig(goConfigFileReader.configXml());
LOGGER.debug("Reloading config file: {}", configFile.getAbsolutePath());
GoConfigHolder goConfigHolder;
try {
try {
goConfigHolder = fullConfigSaveNormalFlow.execute(new FullConfigUpdateCommand(cruiseConfig, null), cachedGoPartials.lastKnownPartials(), FILESYSTEM);
} catch (GoConfigInvalidException e) {
if (!canUpdateConfigWithLastValidPartials())
throw e;
goConfigHolder = fullConfigSaveNormalFlow.execute(new FullConfigUpdateCommand(cruiseConfig, null), cachedGoPartials.lastValidPartials(), FILESYSTEM);
}
reloadStrategy.latestState(goConfigHolder.config);
return goConfigHolder;
} catch (Exception e) {
LOGGER.error("Unable to load config file: {} {}", configFile.getAbsolutePath(), e.getMessage(), e);
if (configFile.exists()) {
LOGGER.warn("--- {} ---", configFile.getAbsolutePath());
LOGGER.warn(FileUtils.readFileToString(configFile, StandardCharsets.UTF_8));
LOGGER.warn("------");
}
LOGGER.debug("", e);
throw e;
}
}
@TestOnly
synchronized GoConfigHolder forceLoad(File configFile) throws Exception {
LOGGER.debug("Reloading config file: {}", configFile.getAbsolutePath());
GoConfigHolder holder;
try {
try {
List<PartialConfig> lastKnownPartials = cloner.deepClone(cachedGoPartials.lastKnownPartials());
holder = internalLoad(FileUtils.readFileToString(configFile, UTF_8), new ConfigModifyingUser(FILESYSTEM), lastKnownPartials);
} catch (GoConfigInvalidException e) {
if (!canUpdateConfigWithLastValidPartials()) {
throw e;
} else {
List<PartialConfig> lastValidPartials = cloner.deepClone(cachedGoPartials.lastValidPartials());
holder = internalLoad(FileUtils.readFileToString(configFile, UTF_8), new ConfigModifyingUser(FILESYSTEM), lastValidPartials);
}
}
return holder;
} catch (Exception e) {
LOGGER.error("Unable to load config file: {} {}", configFile.getAbsolutePath(), e.getMessage(), e);
if (configFile.exists()) {
LOGGER.warn("--- {} ---", configFile.getAbsolutePath());
LOGGER.warn(FileUtils.readFileToString(configFile, StandardCharsets.UTF_8));
LOGGER.warn("------");
}
LOGGER.debug("", e);
throw e;
}
}
protected boolean areKnownPartialsSameAsValidPartials(List<PartialConfig> lastKnownPartials, List<PartialConfig> lastValidPartials) {
return partials.isEquivalent(lastKnownPartials, lastValidPartials);
}
private void writeToConfigXmlFile(String content) {
this.goConfigFileWriter.writeToConfigXmlFile(content);
}
private EntityConfigSaveResult trySavingEntity(EntityConfigUpdateCommand updatingCommand, Username currentUser, CruiseConfig modifiedConfig, List<PartialConfig> partials) {
modifiedConfig.setPartials(partials);
CruiseConfig preprocessedConfig = cloner.deepClone(modifiedConfig);
MagicalGoConfigXmlLoader.preprocess(preprocessedConfig);
updatingCommand.encrypt(preprocessedConfig);
if (updatingCommand.isValid(preprocessedConfig)) {
try {
LOGGER.info("[Configuration Changed] Saving updated configuration.");
String configAsXml = configAsXml(modifiedConfig, true);
String md5 = CachedDigestUtils.md5Hex(configAsXml);
MagicalGoConfigXmlLoader.setMd5(modifiedConfig, md5);
MagicalGoConfigXmlLoader.setMd5(preprocessedConfig, md5);
writeToConfigXmlFile(configAsXml);
checkinConfigToGitRepo(partials, preprocessedConfig, configAsXml, md5, currentUser.getUsername().toString());
LOGGER.debug("[Config Save] Done writing with lock");
CruiseConfig mergedCruiseConfigForEdit = modifiedConfig;
if (!partials.isEmpty()) {
LOGGER.debug("[Config Save] Updating GoConfigHolder with mergedCruiseConfigForEdit: Starting.");
mergedCruiseConfigForEdit = cloner.deepClone(modifiedConfig);
mergedCruiseConfigForEdit.merge(partials, true);
LOGGER.debug("[Config Save] Updating GoConfigHolder with mergedCruiseConfigForEdit: Done.");
}
return new EntityConfigSaveResult(updatingCommand.getPreprocessedEntityConfig(), new GoConfigHolder(preprocessedConfig, modifiedConfig, mergedCruiseConfigForEdit));
} catch (Exception e) {
throw new RuntimeException("failed to save : " + e.getMessage());
}
} else {
throw new GoConfigInvalidException(preprocessedConfig, "Validation failed.");
}
}
private GoConfigHolder trySavingConfigWithLastKnownPartials(FullConfigUpdateCommand updateCommand, GoConfigHolder configHolder) throws Exception {
LOGGER.debug("[Config Save] Trying to save config with Last Known Partials");
return trySavingFullConfig(updateCommand, configHolder, cachedGoPartials.lastKnownPartials());
}
private GoConfigHolder trySavingConfigWithLastValidPartials(FullConfigUpdateCommand updateCommand, GoConfigHolder configHolder) throws Exception {
List<PartialConfig> lastValidPartials = cachedGoPartials.lastValidPartials();
GoConfigHolder goConfigHolder;
try {
goConfigHolder = trySavingFullConfig(updateCommand, configHolder, cachedGoPartials.lastValidPartials());
LOGGER.debug("Update operation on merged configuration succeeded with old {} LAST VALID partials.", lastValidPartials.size());
} catch (GoConfigInvalidException fallbackFailed) {
LOGGER.warn("Merged config update operation failed using fallback LAST VALID {} partials. Exception message was: {}", lastValidPartials.size(), fallbackFailed.getMessage(), fallbackFailed);
throw new GoConfigInvalidMergeException(lastValidPartials, fallbackFailed);
}
return goConfigHolder;
}
private boolean canUpdateConfigWithLastValidPartials() {
List<PartialConfig> lastKnownPartials = cachedGoPartials.lastKnownPartials();
List<PartialConfig> lastValidPartials = cachedGoPartials.lastValidPartials();
return (!lastKnownPartials.isEmpty() && !areKnownPartialsSameAsValidPartials(lastKnownPartials, lastValidPartials));
}
private void updateMergedConfigForEdit(GoConfigHolder validatedConfigHolder, List<PartialConfig> partialConfigs) {
if (partialConfigs.isEmpty()) return;
CruiseConfig mergedCruiseConfigForEdit = cloner.deepClone(validatedConfigHolder.configForEdit);
mergedCruiseConfigForEdit.merge(partialConfigs, true);
validatedConfigHolder.mergedConfigForEdit = mergedCruiseConfigForEdit;
}
private GoConfigHolder trySavingFullConfig(FullConfigUpdateCommand updatingCommand, GoConfigHolder configHolder, List<PartialConfig> partials) throws Exception {
String userName = getConfigUpdatingUser(updatingCommand).getUserName();
GoConfigHolder goConfigHolder;
LOGGER.debug("[Config Save] ==-- Getting modified config");
if (shouldMergeConfig(updatingCommand, configHolder)) {
if (!systemEnvironment.get(SystemEnvironment.ENABLE_CONFIG_MERGE_FEATURE)) {
throw new ConfigMergeException(ConfigFileHasChangedException.CONFIG_CHANGED_PLEASE_REFRESH);
}
goConfigHolder = this.fullConfigSaveMergeFlow.execute(updatingCommand, partials, userName);
} else {
goConfigHolder = this.fullConfigSaveNormalFlow.execute(updatingCommand, partials, userName);
}
reloadStrategy.latestState(goConfigHolder.config);
return goConfigHolder;
}
private GoConfigHolder trySavingConfig(UpdateConfigCommand updatingCommand, GoConfigHolder configHolder, List<PartialConfig> partials) throws Exception {
String configAsXml;
GoConfigHolder validatedConfigHolder;
LOGGER.debug("[Config Save] ==-- Getting modified config");
if (shouldMergeConfig(updatingCommand, configHolder)) {
if (!systemEnvironment.get(SystemEnvironment.ENABLE_CONFIG_MERGE_FEATURE)) {
throw new ConfigMergeException(ConfigFileHasChangedException.CONFIG_CHANGED_PLEASE_REFRESH);
}
configAsXml = getMergedConfig((NoOverwriteUpdateConfigCommand) updatingCommand, configHolder.configForEdit.getMd5(), partials);
try {
validatedConfigHolder = internalLoad(configAsXml, getConfigUpdatingUser(updatingCommand), partials);
} catch (Exception e) {
LOGGER.info("[CONFIG_MERGE] Post merge validation failed, latest-md5: {}", configHolder.configForEdit.getMd5());
throw new ConfigMergePostValidationException(e.getMessage(), e);
}
} else {
configAsXml = getUnmergedConfig(updatingCommand, configHolder, partials);
validatedConfigHolder = internalLoad(configAsXml, getConfigUpdatingUser(updatingCommand), partials);
}
LOGGER.info("[Configuration Changed] Saving updated configuration.");
writeToConfigXmlFile(configAsXml);
return validatedConfigHolder;
}
private ConfigModifyingUser getConfigUpdatingUser(UpdateConfigCommand updatingCommand) {
return updatingCommand instanceof UserAware ? ((UserAware) updatingCommand).user() : new ConfigModifyingUser();
}
private String getUnmergedConfig(UpdateConfigCommand updatingCommand, GoConfigHolder configHolder, List<PartialConfig> partials) throws Exception {
CruiseConfig deepCloneForEdit = cloner.deepClone(configHolder.configForEdit);
deepCloneForEdit.setPartials(partials);
CruiseConfig config = updatingCommand.update(deepCloneForEdit);
String configAsXml = configAsXml(config, false);
if (deepCloneForEdit.getPartials().size() < partials.size())
throw new RuntimeException("should never be called");
return configAsXml;
}
private boolean shouldMergeConfig(UpdateConfigCommand updatingCommand, GoConfigHolder configHolder) {
LOGGER.debug("[Config Save] Checking whether config should be merged");
if (updatingCommand instanceof NoOverwriteUpdateConfigCommand) {
NoOverwriteUpdateConfigCommand noOverwriteCommand = (NoOverwriteUpdateConfigCommand) updatingCommand;
return !configHolder.configForEdit.getMd5().equals(noOverwriteCommand.unmodifiedMd5());
}
return false;
}
private String getMergedConfig(NoOverwriteUpdateConfigCommand noOverwriteCommand, String latestMd5, List<PartialConfig> partials) throws Exception {
LOGGER.debug("[Config Save] Getting merged config");
String oldMd5 = noOverwriteCommand.unmodifiedMd5();
CruiseConfig modifiedConfig = getOldConfigAndMutateWithChanges(noOverwriteCommand, oldMd5);
modifiedConfig.setPartials(partials);
String modifiedConfigAsXml = convertMutatedConfigToXml(modifiedConfig, latestMd5);
GoConfigRevision configRevision = new GoConfigRevision(modifiedConfigAsXml, "temporary-md5-for-branch", getConfigUpdatingUser(noOverwriteCommand).getUserName(),
CurrentGoCDVersion.getInstance().formatted(), timeProvider);
String mergedConfigXml = configRepository.getConfigMergedWithLatestRevision(configRevision, oldMd5);
LOGGER.debug("[Config Save] -=- Done converting merged config to XML");
return mergedConfigXml;
}
private String convertMutatedConfigToXml(CruiseConfig modifiedConfig, String latestMd5) {
try {
return configAsXml(modifiedConfig, false);
} catch (Exception e) {
LOGGER.info("[CONFIG_MERGE] Pre merge validation failed, latest-md5: {}", latestMd5);
throw new ConfigMergePreValidationException(e.getMessage(), e);
}
}
private CruiseConfig getOldConfigAndMutateWithChanges(NoOverwriteUpdateConfigCommand noOverwriteCommand, String oldMd5) throws Exception {
LOGGER.debug("[Config Save] --- Mutating old config");
String configXmlAtOldMd5 = configRepository.getRevision(oldMd5).getContent();
CruiseConfig cruiseConfigAtOldMd5 = magicalGoConfigXmlLoader.fromXmlPartial(configXmlAtOldMd5, BasicCruiseConfig.class);
CruiseConfig config = noOverwriteCommand.update(cruiseConfigAtOldMd5);
LOGGER.debug("[Config Save] --- Done mutating old config");
return config;
}
private GoConfigHolder internalLoad(final String content, final ConfigModifyingUser configModifyingUser, final List<PartialConfig> partials) throws Exception {
GoConfigHolder configHolder = magicalGoConfigXmlLoader.loadConfigHolder(content, cruiseConfig -> cruiseConfig.setPartials(partials));
CruiseConfig config = configHolder.config;
checkinConfigToGitRepo(partials, config, content, configHolder.configForEdit.getMd5(), configModifyingUser.getUserName());
return configHolder;
}
private void checkinConfigToGitRepo(List<PartialConfig> partials, CruiseConfig config, String configAsXml, String md5, String currentUser) throws Exception {
reloadStrategy.latestState(config);
LOGGER.debug("[Config Save] === Checking in the valid XML to config.git");
configRepository.checkin(new GoConfigRevision(configAsXml, md5, currentUser, CurrentGoCDVersion.getInstance().formatted(), timeProvider));
LOGGER.debug("[Config Save] === Done checking in to config.git");
cachedGoPartials.markAsValid(partials);
}
}
| |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.api.runtime.migration;
import org.camunda.bpm.engine.migration.MigrationPlan;
import org.camunda.bpm.engine.repository.ProcessDefinition;
import org.camunda.bpm.engine.runtime.ProcessInstance;
import org.camunda.bpm.engine.runtime.VariableInstance;
import org.camunda.bpm.engine.test.ProcessEngineRule;
import org.camunda.bpm.engine.test.api.runtime.migration.models.ProcessModels;
import org.camunda.bpm.engine.test.api.runtime.migration.models.SignalCatchModels;
import org.camunda.bpm.engine.test.util.ProvidedProcessEngineRule;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.RuleChain;
import java.util.HashMap;
import static org.camunda.bpm.engine.test.api.runtime.migration.ModifiableBpmnModelInstance.modify;
import static org.camunda.bpm.engine.test.util.ActivityInstanceAssert.describeActivityInstanceTree;
import static org.camunda.bpm.engine.test.util.ExecutionAssert.describeExecutionTree;
/**
* @author Thorben Lindhauer
*
*/
public class MigrationSignalCatchEventTest {
protected ProcessEngineRule rule = new ProvidedProcessEngineRule();
protected MigrationTestRule testHelper = new MigrationTestRule(rule);
@Rule
public RuleChain ruleChain = RuleChain.outerRule(rule).around(testHelper);
@Test
public void testMigrateEventSubscription() {
// given
ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS);
ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS);
MigrationPlan migrationPlan = rule.getRuntimeService()
.createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId())
.mapActivities("signalCatch", "signalCatch")
.build();
// when
ProcessInstance processInstance = testHelper.createProcessInstanceAndMigrate(migrationPlan);
// then
testHelper.assertEventSubscriptionMigrated("signalCatch", "signalCatch", SignalCatchModels.SIGNAL_NAME);
testHelper.assertExecutionTreeAfterMigration()
.hasProcessDefinitionId(targetProcessDefinition.getId())
.matches(
describeExecutionTree(null).scope().id(testHelper.snapshotBeforeMigration.getProcessInstanceId())
.child("signalCatch").scope().id(testHelper.getSingleExecutionIdForActivityBeforeMigration("signalCatch"))
.done());
testHelper.assertActivityTreeAfterMigration().hasStructure(
describeActivityInstanceTree(targetProcessDefinition.getId())
.activity("signalCatch", testHelper.getSingleActivityInstanceBeforeMigration("signalCatch").getId())
.done());
// and it is possible to trigger the event
rule.getRuntimeService().signalEventReceived(SignalCatchModels.SIGNAL_NAME);
testHelper.completeTask("userTask");
testHelper.assertProcessEnded(processInstance.getId());
}
@Test
public void testMigrateEventSubscriptionChangeActivityId() {
// given
ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS);
ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(modify(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS)
.changeElementId("signalCatch", "newSignalCatch"));
MigrationPlan migrationPlan = rule.getRuntimeService()
.createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId())
.mapActivities("signalCatch", "newSignalCatch")
.build();
// when
ProcessInstance processInstance = testHelper.createProcessInstanceAndMigrate(migrationPlan);
// then
testHelper.assertEventSubscriptionMigrated("signalCatch", "newSignalCatch", SignalCatchModels.SIGNAL_NAME);
// and it is possible to trigger the event
rule.getRuntimeService().signalEventReceived(SignalCatchModels.SIGNAL_NAME);
testHelper.completeTask("userTask");
testHelper.assertProcessEnded(processInstance.getId());
}
@Test
public void testMigrateEventSubscriptionPreserveSignalName() {
// given
ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS);
ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(ProcessModels.newModel()
.startEvent()
.intermediateCatchEvent("signalCatch")
.signal("new" + SignalCatchModels.SIGNAL_NAME)
.userTask("userTask")
.endEvent()
.done());
MigrationPlan migrationPlan = rule.getRuntimeService()
.createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId())
.mapActivities("signalCatch", "signalCatch")
.build();
// when
ProcessInstance processInstance = testHelper.createProcessInstanceAndMigrate(migrationPlan);
// then the signal name of the event subscription has not changed
testHelper.assertEventSubscriptionMigrated("signalCatch", "signalCatch", SignalCatchModels.SIGNAL_NAME);
// and it is possible to trigger the event
rule.getRuntimeService().signalEventReceived(SignalCatchModels.SIGNAL_NAME);
testHelper.completeTask("userTask");
testHelper.assertProcessEnded(processInstance.getId());
}
@Test
public void testMigrateEventSubscriptionUpdateSignalName() {
// given
ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS);
ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(ProcessModels.newModel()
.startEvent()
.intermediateCatchEvent("signalCatch")
.signal("new" + SignalCatchModels.SIGNAL_NAME)
.userTask("userTask")
.endEvent()
.done());
MigrationPlan migrationPlan = rule.getRuntimeService()
.createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId())
.mapActivities("signalCatch", "signalCatch")
.updateEventTrigger()
.build();
// when
ProcessInstance processInstance = testHelper.createProcessInstanceAndMigrate(migrationPlan);
// then the message event subscription's event name has not changed
testHelper.assertEventSubscriptionMigrated(
"signalCatch", SignalCatchModels.SIGNAL_NAME,
"signalCatch", "new" + SignalCatchModels.SIGNAL_NAME);
// and it is possible to trigger the event
rule.getRuntimeService().signalEventReceived("new" + SignalCatchModels.SIGNAL_NAME);
testHelper.completeTask("userTask");
testHelper.assertProcessEnded(processInstance.getId());
}
@Test
public void testMigrateJobAddParentScope() {
// given
ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS);
ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.SUBPROCESS_SIGNAL_CATCH_PROCESS);
MigrationPlan migrationPlan = rule.getRuntimeService()
.createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId())
.mapActivities("signalCatch", "signalCatch")
.build();
// when
ProcessInstance processInstance = testHelper.createProcessInstanceAndMigrate(migrationPlan);
// then
testHelper.assertEventSubscriptionMigrated("signalCatch", "signalCatch", SignalCatchModels.SIGNAL_NAME);
testHelper.assertExecutionTreeAfterMigration()
.hasProcessDefinitionId(targetProcessDefinition.getId())
.matches(
describeExecutionTree(null).scope().id(testHelper.snapshotBeforeMigration.getProcessInstanceId())
.child(null).scope()
.child("signalCatch").scope().id(testHelper.getSingleExecutionIdForActivityBeforeMigration("signalCatch"))
.done());
testHelper.assertActivityTreeAfterMigration().hasStructure(
describeActivityInstanceTree(targetProcessDefinition.getId())
.beginScope("subProcess")
.activity("signalCatch", testHelper.getSingleActivityInstanceBeforeMigration("signalCatch").getId())
.done());
// and it is possible to trigger the event
rule.getRuntimeService().signalEventReceived(SignalCatchModels.SIGNAL_NAME);
testHelper.completeTask("userTask");
testHelper.assertProcessEnded(processInstance.getId());
}
@Test
public void testMigrateEventSubscriptionUpdateSignalExpressionNameWithVariables() {
// given
String newSignalName = "new" + SignalCatchModels.SIGNAL_NAME + "-${var}";
ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS);
ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(ProcessModels.newModel()
.startEvent()
.intermediateCatchEvent("signalCatch")
.signal(newSignalName)
.userTask("userTask")
.endEvent()
.done());
MigrationPlan migrationPlan = rule.getRuntimeService()
.createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId())
.mapActivities("signalCatch", "signalCatch")
.updateEventTrigger()
.build();
HashMap<String, Object> variables = new HashMap<String, Object>();
variables.put("var", "foo");
// when
ProcessInstance processInstance = testHelper.createProcessInstanceAndMigrate(migrationPlan, variables);
// then there should be a variable
VariableInstance beforeMigration = testHelper.snapshotBeforeMigration.getSingleVariable("var");
Assert.assertEquals(1, testHelper.snapshotAfterMigration.getVariables().size());
testHelper.assertVariableMigratedToExecution(beforeMigration, beforeMigration.getExecutionId());
// and the signal event subscription's event name has changed
String resolvedSignalName = "new" + SignalCatchModels.SIGNAL_NAME + "-foo";
testHelper.assertEventSubscriptionMigrated(
"signalCatch", SignalCatchModels.SIGNAL_NAME,
"signalCatch", resolvedSignalName);
// and it is possible to trigger the event and complete the task afterwards
rule.getRuntimeService().signalEventReceived(resolvedSignalName);
testHelper.completeTask("userTask");
testHelper.assertProcessEnded(processInstance.getId());
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
import org.apache.lucene.spatial.query.SpatialArgs;
import org.apache.lucene.spatial.query.SpatialOperation;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.geo.ShapesAvailability;
import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.geo.parsers.ShapeParser;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
import java.util.Objects;
import java.util.function.Supplier;
/**
* {@link QueryBuilder} that builds a GeoShape Query
*/
public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuilder> {
public static final String NAME = "geo_shape";
public static final String DEFAULT_SHAPE_INDEX_NAME = "shapes";
public static final String DEFAULT_SHAPE_FIELD_NAME = "shape";
public static final ShapeRelation DEFAULT_SHAPE_RELATION = ShapeRelation.INTERSECTS;
/**
* The default value for ignore_unmapped.
*/
public static final boolean DEFAULT_IGNORE_UNMAPPED = false;
private static final ParseField SHAPE_FIELD = new ParseField("shape");
private static final ParseField STRATEGY_FIELD = new ParseField("strategy");
private static final ParseField RELATION_FIELD = new ParseField("relation");
private static final ParseField INDEXED_SHAPE_FIELD = new ParseField("indexed_shape");
private static final ParseField SHAPE_ID_FIELD = new ParseField("id");
private static final ParseField SHAPE_TYPE_FIELD = new ParseField("type");
private static final ParseField SHAPE_INDEX_FIELD = new ParseField("index");
private static final ParseField SHAPE_PATH_FIELD = new ParseField("path");
private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped");
private final String fieldName;
private final ShapeBuilder shape;
private final Supplier<ShapeBuilder> supplier;
private SpatialStrategy strategy;
private final String indexedShapeId;
private final String indexedShapeType;
private String indexedShapeIndex = DEFAULT_SHAPE_INDEX_NAME;
private String indexedShapePath = DEFAULT_SHAPE_FIELD_NAME;
private ShapeRelation relation = DEFAULT_SHAPE_RELATION;
private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED;
/**
* Creates a new GeoShapeQueryBuilder whose Query will be against the given
* field name using the given Shape
*
* @param fieldName
* Name of the field that will be queried
* @param shape
* Shape used in the Query
*/
public GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape) {
this(fieldName, shape, null, null);
}
/**
* Creates a new GeoShapeQueryBuilder whose Query will be against the given
* field name and will use the Shape found with the given ID in the given
* type
*
* @param fieldName
* Name of the field that will be filtered
* @param indexedShapeId
* ID of the indexed Shape that will be used in the Query
* @param indexedShapeType
* Index type of the indexed Shapes
*/
public GeoShapeQueryBuilder(String fieldName, String indexedShapeId, String indexedShapeType) {
this(fieldName, (ShapeBuilder) null, indexedShapeId, indexedShapeType);
}
private GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape, String indexedShapeId, String indexedShapeType) {
if (fieldName == null) {
throw new IllegalArgumentException("fieldName is required");
}
if (shape == null && indexedShapeId == null) {
throw new IllegalArgumentException("either shapeBytes or indexedShapeId and indexedShapeType are required");
}
if (indexedShapeId != null && indexedShapeType == null) {
throw new IllegalArgumentException("indexedShapeType is required if indexedShapeId is specified");
}
this.fieldName = fieldName;
this.shape = shape;
this.indexedShapeId = indexedShapeId;
this.indexedShapeType = indexedShapeType;
this.supplier = null;
}
private GeoShapeQueryBuilder(String fieldName, Supplier<ShapeBuilder> supplier, String indexedShapeId, String indexedShapeType) {
this.fieldName = fieldName;
this.shape = null;
this.supplier = supplier;
this.indexedShapeId = indexedShapeId;
this.indexedShapeType = indexedShapeType;
}
/**
* Read from a stream.
*/
public GeoShapeQueryBuilder(StreamInput in) throws IOException {
super(in);
fieldName = in.readString();
if (in.readBoolean()) {
shape = in.readNamedWriteable(ShapeBuilder.class);
indexedShapeId = null;
indexedShapeType = null;
} else {
shape = null;
indexedShapeId = in.readOptionalString();
indexedShapeType = in.readOptionalString();
indexedShapeIndex = in.readOptionalString();
indexedShapePath = in.readOptionalString();
}
relation = ShapeRelation.readFromStream(in);
strategy = in.readOptionalWriteable(SpatialStrategy::readFromStream);
ignoreUnmapped = in.readBoolean();
supplier = null;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
if (supplier != null) {
throw new IllegalStateException("supplier must be null, can't serialize suppliers, missing a rewriteAndFetch?");
}
out.writeString(fieldName);
boolean hasShape = shape != null;
out.writeBoolean(hasShape);
if (hasShape) {
out.writeNamedWriteable(shape);
} else {
out.writeOptionalString(indexedShapeId);
out.writeOptionalString(indexedShapeType);
out.writeOptionalString(indexedShapeIndex);
out.writeOptionalString(indexedShapePath);
}
relation.writeTo(out);
out.writeOptionalWriteable(strategy);
out.writeBoolean(ignoreUnmapped);
}
/**
* @return the name of the field that will be queried
*/
public String fieldName() {
return fieldName;
}
/**
* @return the shape used in the Query
*/
public ShapeBuilder shape() {
return shape;
}
/**
* @return the ID of the indexed Shape that will be used in the Query
*/
public String indexedShapeId() {
return indexedShapeId;
}
/**
* @return the document type of the indexed Shape that will be used in the
* Query
*/
public String indexedShapeType() {
return indexedShapeType;
}
/**
* Defines which spatial strategy will be used for building the geo shape
* Query. When not set, the strategy that will be used will be the one that
* is associated with the geo shape field in the mappings.
*
* @param strategy
* The spatial strategy to use for building the geo shape Query
* @return this
*/
public GeoShapeQueryBuilder strategy(SpatialStrategy strategy) {
if (strategy != null && strategy == SpatialStrategy.TERM && relation != ShapeRelation.INTERSECTS) {
throw new IllegalArgumentException("strategy [" + strategy.getStrategyName() + "] only supports relation ["
+ ShapeRelation.INTERSECTS.getRelationName() + "] found relation [" + relation.getRelationName() + "]");
}
this.strategy = strategy;
return this;
}
/**
* @return The spatial strategy to use for building the geo shape Query
*/
public SpatialStrategy strategy() {
return strategy;
}
/**
* Sets the name of the index where the indexed Shape can be found
*
* @param indexedShapeIndex Name of the index where the indexed Shape is
* @return this
*/
public GeoShapeQueryBuilder indexedShapeIndex(String indexedShapeIndex) {
this.indexedShapeIndex = indexedShapeIndex;
return this;
}
/**
* @return the index name for the indexed Shape that will be used in the
* Query
*/
public String indexedShapeIndex() {
return indexedShapeIndex;
}
/**
* Sets the path of the field in the indexed Shape document that has the Shape itself
*
* @param indexedShapePath Path of the field where the Shape itself is defined
* @return this
*/
public GeoShapeQueryBuilder indexedShapePath(String indexedShapePath) {
this.indexedShapePath = indexedShapePath;
return this;
}
/**
* @return the path of the indexed Shape that will be used in the Query
*/
public String indexedShapePath() {
return indexedShapePath;
}
/**
* Sets the relation of query shape and indexed shape.
*
* @param relation relation of the shapes
* @return this
*/
public GeoShapeQueryBuilder relation(ShapeRelation relation) {
if (relation == null) {
throw new IllegalArgumentException("No Shape Relation defined");
}
if (strategy != null && strategy == SpatialStrategy.TERM && relation != ShapeRelation.INTERSECTS) {
throw new IllegalArgumentException("current strategy [" + strategy.getStrategyName() + "] only supports relation ["
+ ShapeRelation.INTERSECTS.getRelationName() + "] found relation [" + relation.getRelationName() + "]");
}
this.relation = relation;
return this;
}
/**
* @return the relation of query shape and indexed shape to use in the Query
*/
public ShapeRelation relation() {
return relation;
}
/**
* Sets whether the query builder should ignore unmapped fields (and run a
* {@link MatchNoDocsQuery} in place of this query) or throw an exception if
* the field is unmapped.
*/
public GeoShapeQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) {
this.ignoreUnmapped = ignoreUnmapped;
return this;
}
/**
* Gets whether the query builder will ignore unmapped fields (and run a
* {@link MatchNoDocsQuery} in place of this query) or throw an exception if
* the field is unmapped.
*/
public boolean ignoreUnmapped() {
return ignoreUnmapped;
}
@Override
protected Query doToQuery(QueryShardContext context) {
if (shape == null || supplier != null) {
throw new UnsupportedOperationException("query must be rewritten first");
}
final ShapeBuilder shapeToQuery = shape;
final MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
if (ignoreUnmapped) {
return new MatchNoDocsQuery();
} else {
throw new QueryShardException(context, "failed to find geo_shape field [" + fieldName + "]");
}
}
// TODO: This isn't the nicest way to check this
if (!(fieldType instanceof GeoShapeFieldMapper.GeoShapeFieldType)) {
throw new QueryShardException(context, "Field [" + fieldName + "] is not a geo_shape");
}
final GeoShapeFieldMapper.GeoShapeFieldType shapeFieldType = (GeoShapeFieldMapper.GeoShapeFieldType) fieldType;
PrefixTreeStrategy strategy = shapeFieldType.defaultStrategy();
if (this.strategy != null) {
strategy = shapeFieldType.resolveStrategy(this.strategy);
}
Query query;
if (strategy instanceof RecursivePrefixTreeStrategy && relation == ShapeRelation.DISJOINT) {
// this strategy doesn't support disjoint anymore: but it did
// before, including creating lucene fieldcache (!)
// in this case, execute disjoint as exists && !intersects
BooleanQuery.Builder bool = new BooleanQuery.Builder();
Query exists = ExistsQueryBuilder.newFilter(context, fieldName);
Query intersects = strategy.makeQuery(getArgs(shapeToQuery, ShapeRelation.INTERSECTS));
bool.add(exists, BooleanClause.Occur.MUST);
bool.add(intersects, BooleanClause.Occur.MUST_NOT);
query = new ConstantScoreQuery(bool.build());
} else {
query = new ConstantScoreQuery(strategy.makeQuery(getArgs(shapeToQuery, relation)));
}
return query;
}
/**
* Fetches the Shape with the given ID in the given type and index.
*
* @param getRequest
* GetRequest containing index, type and id
* @param path
* Name or path of the field in the Shape Document where the
* Shape itself is located
*/
private void fetch(Client client, GetRequest getRequest, String path, ActionListener<ShapeBuilder> listener) {
if (ShapesAvailability.JTS_AVAILABLE == false) {
throw new IllegalStateException("JTS not available");
}
getRequest.preference("_local");
getRequest.operationThreaded(false);
client.get(getRequest, new ActionListener<GetResponse>(){
@Override
public void onResponse(GetResponse response) {
try {
if (!response.isExists()) {
throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type()
+ "] not found");
}
if (response.isSourceEmpty()) {
throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() +
"] source disabled");
}
String[] pathElements = path.split("\\.");
int currentPathSlot = 0;
// It is safe to use EMPTY here because this never uses namedObject
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, response.getSourceAsBytesRef())) {
XContentParser.Token currentToken;
while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (currentToken == XContentParser.Token.FIELD_NAME) {
if (pathElements[currentPathSlot].equals(parser.currentName())) {
parser.nextToken();
if (++currentPathSlot == pathElements.length) {
listener.onResponse(ShapeParser.parse(parser));
}
} else {
parser.nextToken();
parser.skipChildren();
}
}
}
throw new IllegalStateException("Shape with name [" + getRequest.id() + "] found but missing " + path + " field");
}
} catch (Exception e) {
onFailure(e);
}
}
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
});
}
public static SpatialArgs getArgs(ShapeBuilder shape, ShapeRelation relation) {
switch (relation) {
case DISJOINT:
return new SpatialArgs(SpatialOperation.IsDisjointTo, shape.build());
case INTERSECTS:
return new SpatialArgs(SpatialOperation.Intersects, shape.build());
case WITHIN:
return new SpatialArgs(SpatialOperation.IsWithin, shape.build());
case CONTAINS:
return new SpatialArgs(SpatialOperation.Contains, shape.build());
default:
throw new IllegalArgumentException("invalid relation [" + relation + "]");
}
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.startObject(fieldName);
if (strategy != null) {
builder.field(STRATEGY_FIELD.getPreferredName(), strategy.getStrategyName());
}
if (shape != null) {
builder.field(SHAPE_FIELD.getPreferredName());
shape.toXContent(builder, params);
} else {
builder.startObject(INDEXED_SHAPE_FIELD.getPreferredName())
.field(SHAPE_ID_FIELD.getPreferredName(), indexedShapeId)
.field(SHAPE_TYPE_FIELD.getPreferredName(), indexedShapeType);
if (indexedShapeIndex != null) {
builder.field(SHAPE_INDEX_FIELD.getPreferredName(), indexedShapeIndex);
}
if (indexedShapePath != null) {
builder.field(SHAPE_PATH_FIELD.getPreferredName(), indexedShapePath);
}
builder.endObject();
}
if(relation != null) {
builder.field(RELATION_FIELD.getPreferredName(), relation.getRelationName());
}
builder.endObject();
builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped);
printBoostAndQueryName(builder);
builder.endObject();
}
public static GeoShapeQueryBuilder fromXContent(XContentParser parser) throws IOException {
String fieldName = null;
ShapeRelation shapeRelation = null;
SpatialStrategy strategy = null;
ShapeBuilder shape = null;
String id = null;
String type = null;
String index = null;
String shapePath = null;
XContentParser.Token token;
String currentFieldName = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String queryName = null;
boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (fieldName != null) {
throw new ParsingException(parser.getTokenLocation(), "[" +
GeoShapeQueryBuilder.NAME + "] point specified twice. [" + currentFieldName + "]");
}
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
token = parser.nextToken();
if (SHAPE_FIELD.match(currentFieldName)) {
shape = ShapeParser.parse(parser);
} else if (STRATEGY_FIELD.match(currentFieldName)) {
String strategyName = parser.text();
strategy = SpatialStrategy.fromString(strategyName);
if (strategy == null) {
throw new ParsingException(parser.getTokenLocation(), "Unknown strategy [" + strategyName + " ]");
}
} else if (RELATION_FIELD.match(currentFieldName)) {
shapeRelation = ShapeRelation.getRelationByName(parser.text());
if (shapeRelation == null) {
throw new ParsingException(parser.getTokenLocation(), "Unknown shape operation [" + parser.text() + " ]");
}
} else if (INDEXED_SHAPE_FIELD.match(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (SHAPE_ID_FIELD.match(currentFieldName)) {
id = parser.text();
} else if (SHAPE_TYPE_FIELD.match(currentFieldName)) {
type = parser.text();
} else if (SHAPE_INDEX_FIELD.match(currentFieldName)) {
index = parser.text();
} else if (SHAPE_PATH_FIELD.match(currentFieldName)) {
shapePath = parser.text();
}
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME +
"] unknown token [" + token + "] after [" + currentFieldName + "]");
}
}
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME +
"] query does not support [" + currentFieldName + "]");
}
}
}
} else if (token.isValue()) {
if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName)) {
boost = parser.floatValue();
} else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName)) {
queryName = parser.text();
} else if (IGNORE_UNMAPPED_FIELD.match(currentFieldName)) {
ignoreUnmapped = parser.booleanValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME +
"] query does not support [" + currentFieldName + "]");
}
}
}
GeoShapeQueryBuilder builder;
if (shape != null) {
builder = new GeoShapeQueryBuilder(fieldName, shape);
} else {
builder = new GeoShapeQueryBuilder(fieldName, id, type);
}
if (index != null) {
builder.indexedShapeIndex(index);
}
if (shapePath != null) {
builder.indexedShapePath(shapePath);
}
if (shapeRelation != null) {
builder.relation(shapeRelation);
}
if (strategy != null) {
builder.strategy(strategy);
}
if (queryName != null) {
builder.queryName(queryName);
}
builder.boost(boost);
builder.ignoreUnmapped(ignoreUnmapped);
return builder;
}
@Override
protected boolean doEquals(GeoShapeQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName)
&& Objects.equals(indexedShapeId, other.indexedShapeId)
&& Objects.equals(indexedShapeIndex, other.indexedShapeIndex)
&& Objects.equals(indexedShapePath, other.indexedShapePath)
&& Objects.equals(indexedShapeType, other.indexedShapeType)
&& Objects.equals(relation, other.relation)
&& Objects.equals(shape, other.shape)
&& Objects.equals(supplier, other.supplier)
&& Objects.equals(strategy, other.strategy)
&& Objects.equals(ignoreUnmapped, other.ignoreUnmapped);
}
@Override
protected int doHashCode() {
return Objects.hash(fieldName, indexedShapeId, indexedShapeIndex,
indexedShapePath, indexedShapeType, relation, shape, strategy, ignoreUnmapped, supplier);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
if (supplier != null) {
return supplier.get() == null ? this : new GeoShapeQueryBuilder(this.fieldName, supplier.get()).relation(relation).strategy
(strategy);
} else if (this.shape == null) {
SetOnce<ShapeBuilder> supplier = new SetOnce<>();
queryRewriteContext.registerAsyncAction((client, listener) -> {
GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId);
fetch(client, getRequest, indexedShapePath, ActionListener.wrap(builder-> {
supplier.set(builder);
listener.onResponse(null);
}, listener::onFailure));
});
return new GeoShapeQueryBuilder(this.fieldName, supplier::get, this.indexedShapeId, this.indexedShapeType).relation(relation)
.strategy(strategy);
}
return this;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocolPB;
import java.io.IOException;
import java.util.EnumSet;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FsServerDefaults;
import org.apache.hadoop.fs.Options.Rename;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo;
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
import org.apache.hadoop.hdfs.protocol.proto.AclProtos.GetAclStatusRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.AclProtos.GetAclStatusResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.AclProtos.ModifyAclEntriesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.AclProtos.ModifyAclEntriesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveAclEntriesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveAclEntriesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveAclRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveAclResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveDefaultAclRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveDefaultAclResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.AclProtos.SetAclRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.AclProtos.SetAclResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AbandonBlockRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AbandonBlockResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddBlockRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddBlockResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCacheDirectiveRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCacheDirectiveResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AllowSnapshotRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AllowSnapshotResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CheckAccessRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CheckAccessResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CompleteRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CompleteResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ConcatRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ConcatResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateSnapshotRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateSnapshotResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateSymlinkRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateSymlinkResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.DatanodeStorageReportProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.DeleteRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.DeleteResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.DeleteSnapshotRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.DeleteSnapshotResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.DisallowSnapshotRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.DisallowSnapshotResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.FinalizeUpgradeRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.FinalizeUpgradeResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.FsyncRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.FsyncResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetBlockLocationsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetBlockLocationsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetBlockLocationsResponseProto.Builder;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetContentSummaryRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetContentSummaryResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetCurrentEditLogTxidRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetCurrentEditLogTxidResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetDataEncryptionKeyRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetDataEncryptionKeyResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetDatanodeReportRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetDatanodeReportResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetDatanodeStorageReportRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetDatanodeStorageReportResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetEditsFromTxidRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetEditsFromTxidResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFileInfoRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFileInfoResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFileLinkInfoRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFileLinkInfoResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFsStatsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFsStatusRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetLinkTargetRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetLinkTargetResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetListingRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetListingResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetPreferredBlockSizeRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetPreferredBlockSizeResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetServerDefaultsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetServerDefaultsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshotDiffReportRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshotDiffReportResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshottableDirListingRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshottableDirListingResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetStoragePoliciesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetStoragePoliciesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetStoragePolicyRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetStoragePolicyResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.IsFileClosedRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.IsFileClosedResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCacheDirectivesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCacheDirectivesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MkdirsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MkdirsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCacheDirectiveRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCacheDirectiveResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCachePoolRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCachePoolResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RecoverLeaseRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RecoverLeaseResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCacheDirectiveRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCacheDirectiveResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2RequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2ResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameSnapshotRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameSnapshotResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenewLeaseRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenewLeaseResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ReportBadBlocksRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ReportBadBlocksResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RestoreFailedStorageRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RestoreFailedStorageResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RollEditsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RollEditsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RollingUpgradeRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RollingUpgradeResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SaveNamespaceRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SaveNamespaceResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetBalancerBandwidthRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetBalancerBandwidthResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetOwnerRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetOwnerResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetPermissionRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetPermissionResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetQuotaRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetQuotaResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetReplicationRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetReplicationResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetSafeModeRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetSafeModeResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetStoragePolicyRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetStoragePolicyResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetTimesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetTimesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.TruncateRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.TruncateResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdateBlockForPipelineRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdateBlockForPipelineResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdatePipelineRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdatePipelineResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.CreateEncryptionZoneResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.CreateEncryptionZoneRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.GetEZForPathResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.GetEZForPathRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.ListEncryptionZonesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.ListEncryptionZonesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockStoragePolicyProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto;
import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto;
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.io.EnumSetWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto;
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto;
import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto;
import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto;
import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto;
import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto;
import org.apache.hadoop.security.token.Token;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
/**
* This class is used on the server side. Calls come across the wire for the
* for protocol {@link ClientNamenodeProtocolPB}.
* This class translates the PB data types
* to the native data types used inside the NN as specified in the generic
* ClientProtocol.
*/
@InterfaceAudience.Private
@InterfaceStability.Stable
public class ClientNamenodeProtocolServerSideTranslatorPB implements
ClientNamenodeProtocolPB {
final private ClientProtocol server;
static final DeleteSnapshotResponseProto VOID_DELETE_SNAPSHOT_RESPONSE =
DeleteSnapshotResponseProto.newBuilder().build();
static final RenameSnapshotResponseProto VOID_RENAME_SNAPSHOT_RESPONSE =
RenameSnapshotResponseProto.newBuilder().build();
static final AllowSnapshotResponseProto VOID_ALLOW_SNAPSHOT_RESPONSE =
AllowSnapshotResponseProto.newBuilder().build();
static final DisallowSnapshotResponseProto VOID_DISALLOW_SNAPSHOT_RESPONSE =
DisallowSnapshotResponseProto.newBuilder().build();
static final GetSnapshottableDirListingResponseProto
NULL_GET_SNAPSHOTTABLE_DIR_LISTING_RESPONSE =
GetSnapshottableDirListingResponseProto.newBuilder().build();
static final SetStoragePolicyResponseProto VOID_SET_STORAGE_POLICY_RESPONSE =
SetStoragePolicyResponseProto.newBuilder().build();
private static final CreateResponseProto VOID_CREATE_RESPONSE =
CreateResponseProto.newBuilder().build();
private static final SetPermissionResponseProto VOID_SET_PERM_RESPONSE =
SetPermissionResponseProto.newBuilder().build();
private static final SetOwnerResponseProto VOID_SET_OWNER_RESPONSE =
SetOwnerResponseProto.newBuilder().build();
private static final AbandonBlockResponseProto VOID_ADD_BLOCK_RESPONSE =
AbandonBlockResponseProto.newBuilder().build();
private static final ReportBadBlocksResponseProto VOID_REP_BAD_BLOCK_RESPONSE =
ReportBadBlocksResponseProto.newBuilder().build();
private static final ConcatResponseProto VOID_CONCAT_RESPONSE =
ConcatResponseProto.newBuilder().build();
private static final Rename2ResponseProto VOID_RENAME2_RESPONSE =
Rename2ResponseProto.newBuilder().build();
private static final GetListingResponseProto VOID_GETLISTING_RESPONSE =
GetListingResponseProto.newBuilder().build();
private static final RenewLeaseResponseProto VOID_RENEWLEASE_RESPONSE =
RenewLeaseResponseProto.newBuilder().build();
private static final RefreshNodesResponseProto VOID_REFRESHNODES_RESPONSE =
RefreshNodesResponseProto.newBuilder().build();
private static final FinalizeUpgradeResponseProto VOID_FINALIZEUPGRADE_RESPONSE =
FinalizeUpgradeResponseProto.newBuilder().build();
private static final MetaSaveResponseProto VOID_METASAVE_RESPONSE =
MetaSaveResponseProto.newBuilder().build();
private static final GetFileInfoResponseProto VOID_GETFILEINFO_RESPONSE =
GetFileInfoResponseProto.newBuilder().build();
private static final GetFileLinkInfoResponseProto VOID_GETFILELINKINFO_RESPONSE =
GetFileLinkInfoResponseProto.newBuilder().build();
private static final SetQuotaResponseProto VOID_SETQUOTA_RESPONSE =
SetQuotaResponseProto.newBuilder().build();
private static final FsyncResponseProto VOID_FSYNC_RESPONSE =
FsyncResponseProto.newBuilder().build();
private static final SetTimesResponseProto VOID_SETTIMES_RESPONSE =
SetTimesResponseProto.newBuilder().build();
private static final CreateSymlinkResponseProto VOID_CREATESYMLINK_RESPONSE =
CreateSymlinkResponseProto.newBuilder().build();
private static final UpdatePipelineResponseProto
VOID_UPDATEPIPELINE_RESPONSE =
UpdatePipelineResponseProto.newBuilder().build();
private static final CancelDelegationTokenResponseProto
VOID_CANCELDELEGATIONTOKEN_RESPONSE =
CancelDelegationTokenResponseProto.newBuilder().build();
private static final SetBalancerBandwidthResponseProto
VOID_SETBALANCERBANDWIDTH_RESPONSE =
SetBalancerBandwidthResponseProto.newBuilder().build();
private static final SetAclResponseProto
VOID_SETACL_RESPONSE = SetAclResponseProto.getDefaultInstance();
private static final ModifyAclEntriesResponseProto
VOID_MODIFYACLENTRIES_RESPONSE = ModifyAclEntriesResponseProto
.getDefaultInstance();
private static final RemoveAclEntriesResponseProto
VOID_REMOVEACLENTRIES_RESPONSE = RemoveAclEntriesResponseProto
.getDefaultInstance();
private static final RemoveDefaultAclResponseProto
VOID_REMOVEDEFAULTACL_RESPONSE = RemoveDefaultAclResponseProto
.getDefaultInstance();
private static final RemoveAclResponseProto
VOID_REMOVEACL_RESPONSE = RemoveAclResponseProto.getDefaultInstance();
private static final SetXAttrResponseProto
VOID_SETXATTR_RESPONSE = SetXAttrResponseProto.getDefaultInstance();
private static final RemoveXAttrResponseProto
VOID_REMOVEXATTR_RESPONSE = RemoveXAttrResponseProto.getDefaultInstance();
private static final CheckAccessResponseProto
VOID_CHECKACCESS_RESPONSE = CheckAccessResponseProto.getDefaultInstance();
/**
* Constructor
*
* @param server - the NN server
* @throws IOException
*/
public ClientNamenodeProtocolServerSideTranslatorPB(ClientProtocol server)
throws IOException {
this.server = server;
}
@Override
public GetBlockLocationsResponseProto getBlockLocations(
RpcController controller, GetBlockLocationsRequestProto req)
throws ServiceException {
try {
LocatedBlocks b = server.getBlockLocations(req.getSrc(), req.getOffset(),
req.getLength());
Builder builder = GetBlockLocationsResponseProto
.newBuilder();
if (b != null) {
builder.setLocations(PBHelperClient.convert(b)).build();
}
return builder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetServerDefaultsResponseProto getServerDefaults(
RpcController controller, GetServerDefaultsRequestProto req)
throws ServiceException {
try {
FsServerDefaults result = server.getServerDefaults();
return GetServerDefaultsResponseProto.newBuilder()
.setServerDefaults(PBHelperClient.convert(result))
.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public CreateResponseProto create(RpcController controller,
CreateRequestProto req) throws ServiceException {
try {
HdfsFileStatus result = server.create(req.getSrc(),
PBHelperClient.convert(req.getMasked()), req.getClientName(),
PBHelperClient.convertCreateFlag(req.getCreateFlag()), req.getCreateParent(),
(short) req.getReplication(), req.getBlockSize(),
PBHelperClient.convertCryptoProtocolVersions(
req.getCryptoProtocolVersionList()));
if (result != null) {
return CreateResponseProto.newBuilder().setFs(PBHelperClient.convert(result))
.build();
}
return VOID_CREATE_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public AppendResponseProto append(RpcController controller,
AppendRequestProto req) throws ServiceException {
try {
EnumSetWritable<CreateFlag> flags = req.hasFlag() ?
PBHelperClient.convertCreateFlag(req.getFlag()) :
new EnumSetWritable<>(EnumSet.of(CreateFlag.APPEND));
LastBlockWithStatus result = server.append(req.getSrc(),
req.getClientName(), flags);
AppendResponseProto.Builder builder = AppendResponseProto.newBuilder();
if (result.getLastBlock() != null) {
builder.setBlock(PBHelperClient.convertLocatedBlock(
result.getLastBlock()));
}
if (result.getFileStatus() != null) {
builder.setStat(PBHelperClient.convert(result.getFileStatus()));
}
return builder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public SetReplicationResponseProto setReplication(RpcController controller,
SetReplicationRequestProto req) throws ServiceException {
try {
boolean result =
server.setReplication(req.getSrc(), (short) req.getReplication());
return SetReplicationResponseProto.newBuilder().setResult(result).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public SetPermissionResponseProto setPermission(RpcController controller,
SetPermissionRequestProto req) throws ServiceException {
try {
server.setPermission(req.getSrc(), PBHelperClient.convert(req.getPermission()));
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_SET_PERM_RESPONSE;
}
@Override
public SetOwnerResponseProto setOwner(RpcController controller,
SetOwnerRequestProto req) throws ServiceException {
try {
server.setOwner(req.getSrc(),
req.hasUsername() ? req.getUsername() : null,
req.hasGroupname() ? req.getGroupname() : null);
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_SET_OWNER_RESPONSE;
}
@Override
public AbandonBlockResponseProto abandonBlock(RpcController controller,
AbandonBlockRequestProto req) throws ServiceException {
try {
server.abandonBlock(PBHelperClient.convert(req.getB()), req.getFileId(),
req.getSrc(), req.getHolder());
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_ADD_BLOCK_RESPONSE;
}
@Override
public AddBlockResponseProto addBlock(RpcController controller,
AddBlockRequestProto req) throws ServiceException {
try {
List<DatanodeInfoProto> excl = req.getExcludeNodesList();
List<String> favor = req.getFavoredNodesList();
LocatedBlock result = server.addBlock(
req.getSrc(),
req.getClientName(),
req.hasPrevious() ? PBHelperClient.convert(req.getPrevious()) : null,
(excl == null || excl.size() == 0) ? null : PBHelperClient.convert(excl
.toArray(new DatanodeInfoProto[excl.size()])), req.getFileId(),
(favor == null || favor.size() == 0) ? null : favor
.toArray(new String[favor.size()]));
return AddBlockResponseProto.newBuilder()
.setBlock(PBHelperClient.convertLocatedBlock(result)).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetAdditionalDatanodeResponseProto getAdditionalDatanode(
RpcController controller, GetAdditionalDatanodeRequestProto req)
throws ServiceException {
try {
List<DatanodeInfoProto> existingList = req.getExistingsList();
List<String> existingStorageIDsList = req.getExistingStorageUuidsList();
List<DatanodeInfoProto> excludesList = req.getExcludesList();
LocatedBlock result = server.getAdditionalDatanode(req.getSrc(),
req.getFileId(), PBHelperClient.convert(req.getBlk()),
PBHelperClient.convert(existingList.toArray(
new DatanodeInfoProto[existingList.size()])),
existingStorageIDsList.toArray(
new String[existingStorageIDsList.size()]),
PBHelperClient.convert(excludesList.toArray(
new DatanodeInfoProto[excludesList.size()])),
req.getNumAdditionalNodes(), req.getClientName());
return GetAdditionalDatanodeResponseProto.newBuilder().setBlock(
PBHelperClient.convertLocatedBlock(result))
.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public CompleteResponseProto complete(RpcController controller,
CompleteRequestProto req) throws ServiceException {
try {
boolean result =
server.complete(req.getSrc(), req.getClientName(),
req.hasLast() ? PBHelperClient.convert(req.getLast()) : null,
req.hasFileId() ? req.getFileId() : HdfsConstants.GRANDFATHER_INODE_ID);
return CompleteResponseProto.newBuilder().setResult(result).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public ReportBadBlocksResponseProto reportBadBlocks(RpcController controller,
ReportBadBlocksRequestProto req) throws ServiceException {
try {
List<LocatedBlockProto> bl = req.getBlocksList();
server.reportBadBlocks(PBHelperClient.convertLocatedBlocks(
bl.toArray(new LocatedBlockProto[bl.size()])));
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_REP_BAD_BLOCK_RESPONSE;
}
@Override
public ConcatResponseProto concat(RpcController controller,
ConcatRequestProto req) throws ServiceException {
try {
List<String> srcs = req.getSrcsList();
server.concat(req.getTrg(), srcs.toArray(new String[srcs.size()]));
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_CONCAT_RESPONSE;
}
@Override
public RenameResponseProto rename(RpcController controller,
RenameRequestProto req) throws ServiceException {
try {
boolean result = server.rename(req.getSrc(), req.getDst());
return RenameResponseProto.newBuilder().setResult(result).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public Rename2ResponseProto rename2(RpcController controller,
Rename2RequestProto req) throws ServiceException {
try {
server.rename2(req.getSrc(), req.getDst(),
req.getOverwriteDest() ? Rename.OVERWRITE : Rename.NONE);
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_RENAME2_RESPONSE;
}
@Override
public TruncateResponseProto truncate(RpcController controller,
TruncateRequestProto req) throws ServiceException {
try {
boolean result = server.truncate(req.getSrc(), req.getNewLength(),
req.getClientName());
return TruncateResponseProto.newBuilder().setResult(result).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public DeleteResponseProto delete(RpcController controller,
DeleteRequestProto req) throws ServiceException {
try {
boolean result = server.delete(req.getSrc(), req.getRecursive());
return DeleteResponseProto.newBuilder().setResult(result).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public MkdirsResponseProto mkdirs(RpcController controller,
MkdirsRequestProto req) throws ServiceException {
try {
boolean result = server.mkdirs(req.getSrc(),
PBHelperClient.convert(req.getMasked()), req.getCreateParent());
return MkdirsResponseProto.newBuilder().setResult(result).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetListingResponseProto getListing(RpcController controller,
GetListingRequestProto req) throws ServiceException {
try {
DirectoryListing result = server.getListing(
req.getSrc(), req.getStartAfter().toByteArray(),
req.getNeedLocation());
if (result !=null) {
return GetListingResponseProto.newBuilder().setDirList(
PBHelperClient.convert(result)).build();
} else {
return VOID_GETLISTING_RESPONSE;
}
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public RenewLeaseResponseProto renewLease(RpcController controller,
RenewLeaseRequestProto req) throws ServiceException {
try {
server.renewLease(req.getClientName());
return VOID_RENEWLEASE_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public RecoverLeaseResponseProto recoverLease(RpcController controller,
RecoverLeaseRequestProto req) throws ServiceException {
try {
boolean result = server.recoverLease(req.getSrc(), req.getClientName());
return RecoverLeaseResponseProto.newBuilder().setResult(result).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public RestoreFailedStorageResponseProto restoreFailedStorage(
RpcController controller, RestoreFailedStorageRequestProto req)
throws ServiceException {
try {
boolean result = server.restoreFailedStorage(req.getArg());
return RestoreFailedStorageResponseProto.newBuilder().setResult(result)
.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetFsStatsResponseProto getFsStats(RpcController controller,
GetFsStatusRequestProto req) throws ServiceException {
try {
return PBHelperClient.convert(server.getStats());
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetDatanodeReportResponseProto getDatanodeReport(
RpcController controller, GetDatanodeReportRequestProto req)
throws ServiceException {
try {
List<? extends DatanodeInfoProto> result = PBHelperClient.convert(server
.getDatanodeReport(PBHelperClient.convert(req.getType())));
return GetDatanodeReportResponseProto.newBuilder()
.addAllDi(result).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetDatanodeStorageReportResponseProto getDatanodeStorageReport(
RpcController controller, GetDatanodeStorageReportRequestProto req)
throws ServiceException {
try {
List<DatanodeStorageReportProto> reports = PBHelperClient.convertDatanodeStorageReports(
server.getDatanodeStorageReport(PBHelperClient.convert(req.getType())));
return GetDatanodeStorageReportResponseProto.newBuilder()
.addAllDatanodeStorageReports(reports)
.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetPreferredBlockSizeResponseProto getPreferredBlockSize(
RpcController controller, GetPreferredBlockSizeRequestProto req)
throws ServiceException {
try {
long result = server.getPreferredBlockSize(req.getFilename());
return GetPreferredBlockSizeResponseProto.newBuilder().setBsize(result)
.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public SetSafeModeResponseProto setSafeMode(RpcController controller,
SetSafeModeRequestProto req) throws ServiceException {
try {
boolean result = server.setSafeMode(PBHelperClient.convert(req.getAction()),
req.getChecked());
return SetSafeModeResponseProto.newBuilder().setResult(result).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public SaveNamespaceResponseProto saveNamespace(RpcController controller,
SaveNamespaceRequestProto req) throws ServiceException {
try {
final long timeWindow = req.hasTimeWindow() ? req.getTimeWindow() : 0;
final long txGap = req.hasTxGap() ? req.getTxGap() : 0;
boolean saved = server.saveNamespace(timeWindow, txGap);
return SaveNamespaceResponseProto.newBuilder().setSaved(saved).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public RollEditsResponseProto rollEdits(RpcController controller,
RollEditsRequestProto request) throws ServiceException {
try {
long txid = server.rollEdits();
return RollEditsResponseProto.newBuilder()
.setNewSegmentTxId(txid)
.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public RefreshNodesResponseProto refreshNodes(RpcController controller,
RefreshNodesRequestProto req) throws ServiceException {
try {
server.refreshNodes();
return VOID_REFRESHNODES_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public FinalizeUpgradeResponseProto finalizeUpgrade(RpcController controller,
FinalizeUpgradeRequestProto req) throws ServiceException {
try {
server.finalizeUpgrade();
return VOID_FINALIZEUPGRADE_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public RollingUpgradeResponseProto rollingUpgrade(RpcController controller,
RollingUpgradeRequestProto req) throws ServiceException {
try {
final RollingUpgradeInfo info = server.rollingUpgrade(
PBHelperClient.convert(req.getAction()));
final RollingUpgradeResponseProto.Builder b = RollingUpgradeResponseProto.newBuilder();
if (info != null) {
b.setRollingUpgradeInfo(PBHelperClient.convert(info));
}
return b.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public ListCorruptFileBlocksResponseProto listCorruptFileBlocks(
RpcController controller, ListCorruptFileBlocksRequestProto req)
throws ServiceException {
try {
CorruptFileBlocks result = server.listCorruptFileBlocks(
req.getPath(), req.hasCookie() ? req.getCookie(): null);
return ListCorruptFileBlocksResponseProto.newBuilder()
.setCorrupt(PBHelperClient.convert(result))
.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public MetaSaveResponseProto metaSave(RpcController controller,
MetaSaveRequestProto req) throws ServiceException {
try {
server.metaSave(req.getFilename());
return VOID_METASAVE_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetFileInfoResponseProto getFileInfo(RpcController controller,
GetFileInfoRequestProto req) throws ServiceException {
try {
HdfsFileStatus result = server.getFileInfo(req.getSrc());
if (result != null) {
return GetFileInfoResponseProto.newBuilder().setFs(
PBHelperClient.convert(result)).build();
}
return VOID_GETFILEINFO_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetFileLinkInfoResponseProto getFileLinkInfo(RpcController controller,
GetFileLinkInfoRequestProto req) throws ServiceException {
try {
HdfsFileStatus result = server.getFileLinkInfo(req.getSrc());
if (result != null) {
return GetFileLinkInfoResponseProto.newBuilder().setFs(
PBHelperClient.convert(result)).build();
} else {
return VOID_GETFILELINKINFO_RESPONSE;
}
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetContentSummaryResponseProto getContentSummary(
RpcController controller, GetContentSummaryRequestProto req)
throws ServiceException {
try {
ContentSummary result = server.getContentSummary(req.getPath());
return GetContentSummaryResponseProto.newBuilder()
.setSummary(PBHelperClient.convert(result)).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public SetQuotaResponseProto setQuota(RpcController controller,
SetQuotaRequestProto req) throws ServiceException {
try {
server.setQuota(req.getPath(), req.getNamespaceQuota(),
req.getStoragespaceQuota(),
req.hasStorageType() ?
PBHelperClient.convertStorageType(req.getStorageType()): null);
return VOID_SETQUOTA_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public FsyncResponseProto fsync(RpcController controller,
FsyncRequestProto req) throws ServiceException {
try {
server.fsync(req.getSrc(), req.getFileId(),
req.getClient(), req.getLastBlockLength());
return VOID_FSYNC_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public SetTimesResponseProto setTimes(RpcController controller,
SetTimesRequestProto req) throws ServiceException {
try {
server.setTimes(req.getSrc(), req.getMtime(), req.getAtime());
return VOID_SETTIMES_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public CreateSymlinkResponseProto createSymlink(RpcController controller,
CreateSymlinkRequestProto req) throws ServiceException {
try {
server.createSymlink(req.getTarget(), req.getLink(),
PBHelperClient.convert(req.getDirPerm()), req.getCreateParent());
return VOID_CREATESYMLINK_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetLinkTargetResponseProto getLinkTarget(RpcController controller,
GetLinkTargetRequestProto req) throws ServiceException {
try {
String result = server.getLinkTarget(req.getPath());
GetLinkTargetResponseProto.Builder builder = GetLinkTargetResponseProto
.newBuilder();
if (result != null) {
builder.setTargetPath(result);
}
return builder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public UpdateBlockForPipelineResponseProto updateBlockForPipeline(
RpcController controller, UpdateBlockForPipelineRequestProto req)
throws ServiceException {
try {
LocatedBlockProto result = PBHelperClient.convertLocatedBlock(
server.updateBlockForPipeline(PBHelperClient.convert(req.getBlock()),
req.getClientName()));
return UpdateBlockForPipelineResponseProto.newBuilder().setBlock(result)
.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public UpdatePipelineResponseProto updatePipeline(RpcController controller,
UpdatePipelineRequestProto req) throws ServiceException {
try {
List<DatanodeIDProto> newNodes = req.getNewNodesList();
List<String> newStorageIDs = req.getStorageIDsList();
server.updatePipeline(req.getClientName(),
PBHelperClient.convert(req.getOldBlock()),
PBHelperClient.convert(req.getNewBlock()),
PBHelperClient.convert(newNodes.toArray(new DatanodeIDProto[newNodes.size()])),
newStorageIDs.toArray(new String[newStorageIDs.size()]));
return VOID_UPDATEPIPELINE_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetDelegationTokenResponseProto getDelegationToken(
RpcController controller, GetDelegationTokenRequestProto req)
throws ServiceException {
try {
Token<DelegationTokenIdentifier> token = server
.getDelegationToken(new Text(req.getRenewer()));
GetDelegationTokenResponseProto.Builder rspBuilder =
GetDelegationTokenResponseProto.newBuilder();
if (token != null) {
rspBuilder.setToken(PBHelperClient.convert(token));
}
return rspBuilder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public RenewDelegationTokenResponseProto renewDelegationToken(
RpcController controller, RenewDelegationTokenRequestProto req)
throws ServiceException {
try {
long result = server.renewDelegationToken(PBHelperClient
.convertDelegationToken(req.getToken()));
return RenewDelegationTokenResponseProto.newBuilder()
.setNewExpiryTime(result).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public CancelDelegationTokenResponseProto cancelDelegationToken(
RpcController controller, CancelDelegationTokenRequestProto req)
throws ServiceException {
try {
server.cancelDelegationToken(PBHelperClient.convertDelegationToken(req
.getToken()));
return VOID_CANCELDELEGATIONTOKEN_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public SetBalancerBandwidthResponseProto setBalancerBandwidth(
RpcController controller, SetBalancerBandwidthRequestProto req)
throws ServiceException {
try {
server.setBalancerBandwidth(req.getBandwidth());
return VOID_SETBALANCERBANDWIDTH_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetDataEncryptionKeyResponseProto getDataEncryptionKey(
RpcController controller, GetDataEncryptionKeyRequestProto request)
throws ServiceException {
try {
GetDataEncryptionKeyResponseProto.Builder builder =
GetDataEncryptionKeyResponseProto.newBuilder();
DataEncryptionKey encryptionKey = server.getDataEncryptionKey();
if (encryptionKey != null) {
builder.setDataEncryptionKey(PBHelperClient.convert(encryptionKey));
}
return builder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public CreateSnapshotResponseProto createSnapshot(RpcController controller,
CreateSnapshotRequestProto req) throws ServiceException {
try {
final CreateSnapshotResponseProto.Builder builder
= CreateSnapshotResponseProto.newBuilder();
final String snapshotPath = server.createSnapshot(req.getSnapshotRoot(),
req.hasSnapshotName()? req.getSnapshotName(): null);
if (snapshotPath != null) {
builder.setSnapshotPath(snapshotPath);
}
return builder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public DeleteSnapshotResponseProto deleteSnapshot(RpcController controller,
DeleteSnapshotRequestProto req) throws ServiceException {
try {
server.deleteSnapshot(req.getSnapshotRoot(), req.getSnapshotName());
return VOID_DELETE_SNAPSHOT_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public AllowSnapshotResponseProto allowSnapshot(RpcController controller,
AllowSnapshotRequestProto req) throws ServiceException {
try {
server.allowSnapshot(req.getSnapshotRoot());
return VOID_ALLOW_SNAPSHOT_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public DisallowSnapshotResponseProto disallowSnapshot(RpcController controller,
DisallowSnapshotRequestProto req) throws ServiceException {
try {
server.disallowSnapshot(req.getSnapshotRoot());
return VOID_DISALLOW_SNAPSHOT_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public RenameSnapshotResponseProto renameSnapshot(RpcController controller,
RenameSnapshotRequestProto request) throws ServiceException {
try {
server.renameSnapshot(request.getSnapshotRoot(),
request.getSnapshotOldName(), request.getSnapshotNewName());
return VOID_RENAME_SNAPSHOT_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetSnapshottableDirListingResponseProto getSnapshottableDirListing(
RpcController controller, GetSnapshottableDirListingRequestProto request)
throws ServiceException {
try {
SnapshottableDirectoryStatus[] result = server
.getSnapshottableDirListing();
if (result != null) {
return GetSnapshottableDirListingResponseProto.newBuilder().
setSnapshottableDirList(PBHelperClient.convert(result)).build();
} else {
return NULL_GET_SNAPSHOTTABLE_DIR_LISTING_RESPONSE;
}
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetSnapshotDiffReportResponseProto getSnapshotDiffReport(
RpcController controller, GetSnapshotDiffReportRequestProto request)
throws ServiceException {
try {
SnapshotDiffReport report = server.getSnapshotDiffReport(
request.getSnapshotRoot(), request.getFromSnapshot(),
request.getToSnapshot());
return GetSnapshotDiffReportResponseProto.newBuilder()
.setDiffReport(PBHelperClient.convert(report)).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public IsFileClosedResponseProto isFileClosed(
RpcController controller, IsFileClosedRequestProto request)
throws ServiceException {
try {
boolean result = server.isFileClosed(request.getSrc());
return IsFileClosedResponseProto.newBuilder().setResult(result).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public AddCacheDirectiveResponseProto addCacheDirective(
RpcController controller, AddCacheDirectiveRequestProto request)
throws ServiceException {
try {
long id = server.addCacheDirective(
PBHelperClient.convert(request.getInfo()),
PBHelperClient.convertCacheFlags(request.getCacheFlags()));
return AddCacheDirectiveResponseProto.newBuilder().
setId(id).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public ModifyCacheDirectiveResponseProto modifyCacheDirective(
RpcController controller, ModifyCacheDirectiveRequestProto request)
throws ServiceException {
try {
server.modifyCacheDirective(
PBHelperClient.convert(request.getInfo()),
PBHelperClient.convertCacheFlags(request.getCacheFlags()));
return ModifyCacheDirectiveResponseProto.newBuilder().build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public RemoveCacheDirectiveResponseProto
removeCacheDirective(RpcController controller,
RemoveCacheDirectiveRequestProto request)
throws ServiceException {
try {
server.removeCacheDirective(request.getId());
return RemoveCacheDirectiveResponseProto.
newBuilder().build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public ListCacheDirectivesResponseProto listCacheDirectives(
RpcController controller, ListCacheDirectivesRequestProto request)
throws ServiceException {
try {
CacheDirectiveInfo filter =
PBHelperClient.convert(request.getFilter());
BatchedEntries<CacheDirectiveEntry> entries =
server.listCacheDirectives(request.getPrevId(), filter);
ListCacheDirectivesResponseProto.Builder builder =
ListCacheDirectivesResponseProto.newBuilder();
builder.setHasMore(entries.hasMore());
for (int i=0, n=entries.size(); i<n; i++) {
builder.addElements(PBHelperClient.convert(entries.get(i)));
}
return builder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public AddCachePoolResponseProto addCachePool(RpcController controller,
AddCachePoolRequestProto request) throws ServiceException {
try {
server.addCachePool(PBHelperClient.convert(request.getInfo()));
return AddCachePoolResponseProto.newBuilder().build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public ModifyCachePoolResponseProto modifyCachePool(RpcController controller,
ModifyCachePoolRequestProto request) throws ServiceException {
try {
server.modifyCachePool(PBHelperClient.convert(request.getInfo()));
return ModifyCachePoolResponseProto.newBuilder().build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public RemoveCachePoolResponseProto removeCachePool(RpcController controller,
RemoveCachePoolRequestProto request) throws ServiceException {
try {
server.removeCachePool(request.getPoolName());
return RemoveCachePoolResponseProto.newBuilder().build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public ListCachePoolsResponseProto listCachePools(RpcController controller,
ListCachePoolsRequestProto request) throws ServiceException {
try {
BatchedEntries<CachePoolEntry> entries =
server.listCachePools(request.getPrevPoolName());
ListCachePoolsResponseProto.Builder responseBuilder =
ListCachePoolsResponseProto.newBuilder();
responseBuilder.setHasMore(entries.hasMore());
for (int i=0, n=entries.size(); i<n; i++) {
responseBuilder.addEntries(PBHelperClient.convert(entries.get(i)));
}
return responseBuilder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public ModifyAclEntriesResponseProto modifyAclEntries(
RpcController controller, ModifyAclEntriesRequestProto req)
throws ServiceException {
try {
server.modifyAclEntries(req.getSrc(), PBHelperClient.convertAclEntry(req.getAclSpecList()));
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_MODIFYACLENTRIES_RESPONSE;
}
@Override
public RemoveAclEntriesResponseProto removeAclEntries(
RpcController controller, RemoveAclEntriesRequestProto req)
throws ServiceException {
try {
server.removeAclEntries(req.getSrc(),
PBHelperClient.convertAclEntry(req.getAclSpecList()));
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_REMOVEACLENTRIES_RESPONSE;
}
@Override
public RemoveDefaultAclResponseProto removeDefaultAcl(
RpcController controller, RemoveDefaultAclRequestProto req)
throws ServiceException {
try {
server.removeDefaultAcl(req.getSrc());
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_REMOVEDEFAULTACL_RESPONSE;
}
@Override
public RemoveAclResponseProto removeAcl(RpcController controller,
RemoveAclRequestProto req) throws ServiceException {
try {
server.removeAcl(req.getSrc());
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_REMOVEACL_RESPONSE;
}
@Override
public SetAclResponseProto setAcl(RpcController controller,
SetAclRequestProto req) throws ServiceException {
try {
server.setAcl(req.getSrc(), PBHelperClient.convertAclEntry(req.getAclSpecList()));
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_SETACL_RESPONSE;
}
@Override
public GetAclStatusResponseProto getAclStatus(RpcController controller,
GetAclStatusRequestProto req) throws ServiceException {
try {
return PBHelperClient.convert(server.getAclStatus(req.getSrc()));
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public CreateEncryptionZoneResponseProto createEncryptionZone(
RpcController controller, CreateEncryptionZoneRequestProto req)
throws ServiceException {
try {
server.createEncryptionZone(req.getSrc(), req.getKeyName());
return CreateEncryptionZoneResponseProto.newBuilder().build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetEZForPathResponseProto getEZForPath(
RpcController controller, GetEZForPathRequestProto req)
throws ServiceException {
try {
GetEZForPathResponseProto.Builder builder =
GetEZForPathResponseProto.newBuilder();
final EncryptionZone ret = server.getEZForPath(req.getSrc());
if (ret != null) {
builder.setZone(PBHelperClient.convert(ret));
}
return builder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public ListEncryptionZonesResponseProto listEncryptionZones(
RpcController controller, ListEncryptionZonesRequestProto req)
throws ServiceException {
try {
BatchedEntries<EncryptionZone> entries = server
.listEncryptionZones(req.getId());
ListEncryptionZonesResponseProto.Builder builder =
ListEncryptionZonesResponseProto.newBuilder();
builder.setHasMore(entries.hasMore());
for (int i=0; i<entries.size(); i++) {
builder.addZones(PBHelperClient.convert(entries.get(i)));
}
return builder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public SetErasureCodingPolicyResponseProto setErasureCodingPolicy(
RpcController controller, SetErasureCodingPolicyRequestProto req)
throws ServiceException {
try {
ErasureCodingPolicy ecPolicy = req.hasEcPolicy() ?
PBHelperClient.convertErasureCodingPolicy(req.getEcPolicy()) : null;
server.setErasureCodingPolicy(req.getSrc(), ecPolicy);
return SetErasureCodingPolicyResponseProto.newBuilder().build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public SetXAttrResponseProto setXAttr(RpcController controller,
SetXAttrRequestProto req) throws ServiceException {
try {
server.setXAttr(req.getSrc(), PBHelperClient.convertXAttr(req.getXAttr()),
PBHelperClient.convert(req.getFlag()));
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_SETXATTR_RESPONSE;
}
@Override
public GetXAttrsResponseProto getXAttrs(RpcController controller,
GetXAttrsRequestProto req) throws ServiceException {
try {
return PBHelperClient.convertXAttrsResponse(server.getXAttrs(req.getSrc(),
PBHelperClient.convertXAttrs(req.getXAttrsList())));
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public ListXAttrsResponseProto listXAttrs(RpcController controller,
ListXAttrsRequestProto req) throws ServiceException {
try {
return PBHelperClient.convertListXAttrsResponse(server.listXAttrs(req.getSrc()));
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public RemoveXAttrResponseProto removeXAttr(RpcController controller,
RemoveXAttrRequestProto req) throws ServiceException {
try {
server.removeXAttr(req.getSrc(), PBHelperClient.convertXAttr(req.getXAttr()));
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_REMOVEXATTR_RESPONSE;
}
@Override
public CheckAccessResponseProto checkAccess(RpcController controller,
CheckAccessRequestProto req) throws ServiceException {
try {
server.checkAccess(req.getPath(), PBHelperClient.convert(req.getMode()));
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_CHECKACCESS_RESPONSE;
}
@Override
public SetStoragePolicyResponseProto setStoragePolicy(
RpcController controller, SetStoragePolicyRequestProto request)
throws ServiceException {
try {
server.setStoragePolicy(request.getSrc(), request.getPolicyName());
} catch (IOException e) {
throw new ServiceException(e);
}
return VOID_SET_STORAGE_POLICY_RESPONSE;
}
@Override
public GetStoragePolicyResponseProto getStoragePolicy(
RpcController controller, GetStoragePolicyRequestProto request)
throws ServiceException {
try {
BlockStoragePolicyProto policy = PBHelperClient.convert(server
.getStoragePolicy(request.getPath()));
return GetStoragePolicyResponseProto.newBuilder()
.setStoragePolicy(policy).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetStoragePoliciesResponseProto getStoragePolicies(
RpcController controller, GetStoragePoliciesRequestProto request)
throws ServiceException {
try {
BlockStoragePolicy[] policies = server.getStoragePolicies();
GetStoragePoliciesResponseProto.Builder builder =
GetStoragePoliciesResponseProto.newBuilder();
if (policies == null) {
return builder.build();
}
for (BlockStoragePolicy policy : policies) {
builder.addPolicies(PBHelperClient.convert(policy));
}
return builder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
public GetCurrentEditLogTxidResponseProto getCurrentEditLogTxid(RpcController controller,
GetCurrentEditLogTxidRequestProto req) throws ServiceException {
try {
return GetCurrentEditLogTxidResponseProto.newBuilder().setTxid(
server.getCurrentEditLogTxid()).build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetEditsFromTxidResponseProto getEditsFromTxid(RpcController controller,
GetEditsFromTxidRequestProto req) throws ServiceException {
try {
return PBHelperClient.convertEditsResponse(server.getEditsFromTxid(
req.getTxid()));
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetErasureCodingPoliciesResponseProto getErasureCodingPolicies(RpcController controller,
GetErasureCodingPoliciesRequestProto request) throws ServiceException {
try {
ErasureCodingPolicy[] ecPolicies = server.getErasureCodingPolicies();
GetErasureCodingPoliciesResponseProto.Builder resBuilder = GetErasureCodingPoliciesResponseProto
.newBuilder();
for (ErasureCodingPolicy ecPolicy : ecPolicies) {
resBuilder.addEcPolicies(PBHelperClient.convertErasureCodingPolicy(ecPolicy));
}
return resBuilder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetErasureCodingPolicyResponseProto getErasureCodingPolicy(RpcController controller,
GetErasureCodingPolicyRequestProto request) throws ServiceException {
try {
ErasureCodingPolicy ecPolicy = server.getErasureCodingPolicy(request.getSrc());
GetErasureCodingPolicyResponseProto.Builder builder = GetErasureCodingPolicyResponseProto.newBuilder();
if (ecPolicy != null) {
builder.setEcPolicy(PBHelperClient.convertErasureCodingPolicy(ecPolicy));
}
return builder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
}
| |
/*
* Copyright (c) 2014. Massachusetts Institute of Technology
* Released under the BSD 2-Clause License
* http://opensource.org/licenses/BSD-2-Clause
*/
package bits.glui;
import java.awt.Font;
import java.awt.event.*;
import java.util.*;
import bits.draw3d.DrawEnv;
import bits.draw3d.Rect;
import bits.glui.event.*;
import bits.math3d.Vec;
import bits.math3d.Vec4;
/**
* TODO: fix potential threading issue with adding/removing children.
* Even mSafeChildren will give up a concurrent modification exception
* if iterated during add/remove event.
*
* @author decamp
*/
public class GPanel implements GComponent {
private static final Font DEFAULT_FONT = new Font( "Verdana", Font.PLAIN, 12 );
private GDispatcher mDispatcher = null;
private GComponent mParent = null;
private final List<GComponent> mChildren;
private final List<GComponent> mSafeChildren;
private GLayout mLayout = null;
private int mX = 0;
private int mY = 0;
private int mW = 1;
private int mH = 1;
private final Rect mAbsoluteBounds = Rect.fromBounds( 0, 0, 1, 1 );
private boolean mHasAbsoluteBounds = false;
private boolean mHasForeground = true;
private final Vec4 mForeground = new Vec4( 1, 1, 1, 1 );
private boolean mHasBackground = false;
private final Vec4 mBackground = new Vec4( 0, 0, 0, 0 );
private Font mFont = DEFAULT_FONT;
private boolean mDisplayed = false;
private boolean mVisible = true;
private boolean mEnabled = true;
private boolean mHasFocus = false;
private boolean mTreeIsMouseFocusable = false;
private boolean mNeedsPaint = false;
private boolean mNeedsLayout = false;
private GComponentListener mComponentCaster = null;
private GAncestorListener mAncestorCaster = null;
private GFocusListener mFocusCaster = null;
private GPaintListener mPaintCaster = null;
private GMouseListener mMouseCaster = null;
private GMouseMotionListener mMouseMotionCaster = null;
private GMouseWheelListener mMouseWheelCaster = null;
private GKeyListener mKeyCaster = null;
public GPanel() {
this( new ArrayList<GComponent>() );
}
protected GPanel( List<GComponent> children ) {
mChildren = children;
mSafeChildren = Collections.unmodifiableList( mChildren );
}
@Override
public synchronized void addChild( GComponent child ) {
if( mChildren.contains( child ) ) {
return;
}
mChildren.add( child );
childAdded( child );
}
@Override
public synchronized void removeChild( GComponent child ) {
if( mChildren.remove( child ) ) {
childRemoved( child );
}
}
@Override
public synchronized void clearChildren() {
if( mChildren.isEmpty() ) {
return;
}
for( GComponent p : mChildren ) {
childRemoved( p );
}
mChildren.clear();
}
@Override
public List<GComponent> children() {
return mSafeChildren;
}
@Override
public void setLayout( GLayout layout ) {
mLayout = layout;
}
@Override
public GComponent parent() {
return mParent;
}
@Override
public synchronized void getBounds( Rect out ) {
out.x0 = mX;
out.y0 = mY;
out.x1 = mX + mW;
out.y1 = mY + mH;
}
@Override
public synchronized Rect getBounds() {
return new Rect( mX, mY, mX + mW, mY + mH );
}
@Override
public void getAbsoluteBounds( Rect out ) {
synchronized( this ) {
if( mHasAbsoluteBounds ) {
out.set( mAbsoluteBounds );
return;
}
}
Rect rect = null;
GComponent parent = mParent;
if( parent != null ) {
rect = new Rect();
parent.getAbsoluteBounds( rect );
}
synchronized( this ) {
Rect abs = mAbsoluteBounds;
if( rect != null ) {
abs.x0 = rect.x0 + mX;
abs.x1 = abs.x0 + mW;
abs.y0 = rect.y0 + mY;
abs.y1 = abs.y0 + mH;
} else {
abs.x0 = mX;
abs.y0 = mY;
abs.x1 = mX + mW;
abs.y1 = mY + mH;
}
out.set( abs );
}
}
@Override
public Rect getAbsoluteBounds() {
Rect rect = new Rect();
getAbsoluteBounds( rect );
return rect;
}
@Override
public int x() {
return mX;
}
@Override
public int y() {
return mY;
}
@Override
public int width() {
return mW;
}
@Override
public int height() {
return mH;
}
@Override
public boolean contains( int x, int y ) {
return x >= 0 && y >= 0 && x < mW && y < mH;
}
@Override
public GComponent setPosition( int x, int y ) {
return setBounds( x, y, width(), height() );
}
@Override
public GComponent setSize( int w, int h ) {
return setBounds( x(), y(), w, h );
}
@Override
public synchronized GComponent setBounds( int x, int y, int w, int h ) {
boolean moved = x != mX || y != mY;
boolean resized = w != mW || h != mH;
if( moved || resized ) {
mX = x;
mY = y;
mW = w;
mH = h;
mHasAbsoluteBounds = false;
if( moved ) {
treeProcessAncestorMoved( this );
}
if( resized ) {
treeProcessAncestorResized( this );
applyLayout();
}
}
return this;
}
@Override
public synchronized void setVisible( boolean visible ) {
if( visible == mVisible ) {
return;
}
mVisible = visible;
if( visible ) {
treeProcessParentShown();
} else {
treeProcessParentHidden();
}
}
@Override
public boolean isVisible() {
return mVisible;
}
@Override
public synchronized void setEnabled( boolean enable ) {
if( enable == mEnabled ) {
return;
}
mEnabled = enable;
if( mDispatcher != null ) {
mDispatcher.firePropertyChange( this, PROP_ENABLED, !enable, enable );
}
}
@Override
public boolean isEnabled() {
return mEnabled;
}
@Override
public boolean isDisplayed() {
return mDisplayed;
}
@Override
public GPanel setFont( Font font ) {
mFont = font == null ? DEFAULT_FONT : font;
return this;
}
@Override
public Font getFont() {
return mFont;
}
@Override
public synchronized GPanel setForeground( Vec4 color ) {
if( color != null ) {
Vec.put( color, mForeground );
mHasForeground = true;
} else {
mHasForeground = false;
}
return this;
}
@Override
public synchronized GPanel setForeground( float r, float g, float b, float a ) {
mHasForeground = true;
mForeground.x = r;
mForeground.y = g;
mForeground.z = b;
mForeground.w = a;
return this;
}
@Override
public synchronized boolean getForeground( Vec4 out ) {
if( mHasForeground ) {
if( out != null ) {
Vec.put( mForeground, out );
}
return true;
}
return false;
}
@Override
public synchronized GPanel setBackground( Vec4 background ) {
if( background != null ) {
Vec.put( background, mBackground );
mHasBackground = true;
} else {
mHasBackground = false;
}
return this;
}
@Override
public synchronized GPanel setBackground( float r, float g, float b, float a ) {
mHasBackground = true;
mBackground.x = r;
mBackground.y = g;
mBackground.z = b;
mBackground.w = a;
return this;
}
@Override
public synchronized boolean getBackground( Vec4 out ) {
if( mHasBackground ) {
if( out != null ) {
Vec.put( mBackground, out );
}
return true;
}
return false;
}
@Override
public synchronized void addComponentListener( GComponentListener listener ) {
mComponentCaster = GluiMulticaster.add( mComponentCaster, listener );
}
@Override
public synchronized void removeComponentListener( GComponentListener listener ) {
mComponentCaster = GluiMulticaster.remove( mComponentCaster, listener );
}
@Override
public synchronized void addAncestorListener( GAncestorListener listener ) {
mAncestorCaster = GluiMulticaster.add( mAncestorCaster, listener );
}
@Override
public synchronized void removeAncestorListener( GAncestorListener listener ) {
mAncestorCaster = GluiMulticaster.remove( mAncestorCaster, listener );
}
@Override
public synchronized void addFocusListener( GFocusListener listener ) {
mFocusCaster = GluiMulticaster.add( mFocusCaster, listener );
}
@Override
public synchronized void removeFocusListener( GFocusListener listener ) {
mFocusCaster = GluiMulticaster.remove( mFocusCaster, listener );
}
@Override
public synchronized void addMouseListener( GMouseListener listener ) {
boolean prev = hasMouseListener();
mMouseCaster = GluiMulticaster.add( mMouseCaster, listener );
if( mDispatcher != null && prev != hasMouseListener() ) {
mDispatcher.firePropertyChange( this, PROP_HAS_MOUSE_LISTENER, false, true );
}
}
@Override
public synchronized void removeMouseListener( GMouseListener listener ) {
boolean prev = hasMouseListener();
mMouseCaster = GluiMulticaster.remove( mMouseCaster, listener );
if( mDispatcher != null && prev != hasMouseListener() ) {
mDispatcher.firePropertyChange( this, PROP_HAS_MOUSE_LISTENER, true, false );
}
}
@Override
public synchronized void addMouseMotionListener( GMouseMotionListener listener ) {
boolean prev = hasMouseListener();
mMouseMotionCaster = GluiMulticaster.add( mMouseMotionCaster, listener );
if( mDispatcher != null && prev != hasMouseListener() ) {
mDispatcher.firePropertyChange( this, PROP_HAS_MOUSE_LISTENER, false, true );
}
}
@Override
public synchronized void removeMouseMotionListener( GMouseMotionListener listener ) {
boolean prev = hasMouseListener();
mMouseMotionCaster = GluiMulticaster.remove( mMouseMotionCaster, listener );
if( mDispatcher != null && prev != hasMouseListener() ) {
mDispatcher.firePropertyChange( this, PROP_HAS_MOUSE_LISTENER, true, false );
}
}
@Override
public synchronized void addMouseWheelListener( GMouseWheelListener listener ) {
boolean prev = hasMouseListener();
mMouseWheelCaster = GluiMulticaster.add( mMouseWheelCaster, listener );
if( mDispatcher != null && prev != hasMouseListener() ) {
mDispatcher.firePropertyChange( this, PROP_HAS_MOUSE_LISTENER, false, true );
}
}
@Override
public synchronized void removeMouseWheelListener( GMouseWheelListener listener ) {
boolean prev = hasMouseListener();
mMouseWheelCaster = GluiMulticaster.remove( mMouseWheelCaster, listener );
if( mDispatcher != null && prev != hasMouseListener() ) {
mDispatcher.firePropertyChange( this, PROP_HAS_MOUSE_LISTENER, true, false );
}
}
@Override
public synchronized void addKeyListener( GKeyListener listener ) {
boolean prev = hasKeyListener();
mKeyCaster = GluiMulticaster.add( mKeyCaster, listener );
if( mDispatcher != null && prev != hasKeyListener() ) {
mDispatcher.firePropertyChange( this, PROP_HAS_KEY_LISTENER, false, true );
}
}
@Override
public synchronized void removeKeyListener( GKeyListener listener ) {
boolean prev = hasKeyListener();
mKeyCaster = GluiMulticaster.remove( mKeyCaster, listener );
if( mDispatcher != null && prev != hasKeyListener() ) {
mDispatcher.firePropertyChange( this, PROP_HAS_KEY_LISTENER, true, false );
}
}
@Override
public synchronized void addPaintListener( GPaintListener listener ) {
mPaintCaster = GluiMulticaster.add( mPaintCaster, listener );
}
@Override
public synchronized void removePaintListener( GPaintListener listener ) {
mPaintCaster = GluiMulticaster.remove( mPaintCaster, listener );
}
@Override
public synchronized boolean hasKeyListener() {
return mKeyCaster != null;
}
@Override
public synchronized boolean hasMouseListener() {
return mMouseCaster != null ||
mMouseMotionCaster != null ||
mMouseWheelCaster != null;
}
@Override
public boolean hasFocus() {
return mHasFocus;
}
@Override
public synchronized boolean requestFocus() {
if( mDispatcher == null || !GToolkit.isKeyboardFocusable( this ) ) {
return false;
}
mDispatcher.fireRequestFocus( this );
return true;
}
@Override
public void transferFocusBackward() {
GDispatcher d = mDispatcher;
if( d != null ) {
d.fireTransferFocusBackward( this );
}
}
@Override
public void transferFocusForward() {
GDispatcher d = mDispatcher;
if( d != null ) {
d.fireTransferFocusForward( this );
}
}
@Override
public void startModal() {
GDispatcher d = mDispatcher;
if( d != null ) {
d.firePushInputRoot( this );
}
}
@Override
public void stopModal() {
GDispatcher d = mDispatcher;
if( d != null ) {
d.firePopInputRoot( this );
}
}
@Override
public synchronized GComponent componentAt( int x, int y ) {
if( !contains( x, y ) ) {
return null;
}
int size = mChildren.size();
while( size-- > 0 ) {
GComponent child = mChildren.get( size );
GComponent ret = child.componentAt( x - child.x(), y - child.y() );
if( ret != null ) {
return ret;
}
}
return this;
}
@Override
public synchronized GComponent displayedComponentAt( int x, int y ) {
if( !mDisplayed || !contains( x, y ) ) {
return null;
}
int size = mChildren.size();
while( size-- > 0 ) {
GComponent child = mChildren.get( size );
GComponent ret = child.displayedComponentAt( x - child.x(), y - child.y() );
if( ret != null ) {
return ret;
}
}
return this;
}
@Override
public synchronized GComponent mouseFocusableComponentAt( int x, int y ) {
if( !mTreeIsMouseFocusable || !contains( x, y ) ) {
return null;
}
int size = mChildren.size();
while( size-- > 0 ) {
GComponent child = mChildren.get( size );
GComponent ret = child.mouseFocusableComponentAt( x - child.x(), y - child.y() );
if( ret != null ) {
return ret;
}
}
return GToolkit.isMouseFocusable( this ) ? this : null;
}
@Override
public boolean hasMouseFocusableComponent() {
return mTreeIsMouseFocusable;
}
@Override
public synchronized void applyLayout() {
if( mNeedsLayout || mLayout == null && mChildren.isEmpty() ) {
return;
}
mNeedsLayout = true;
if( mDispatcher != null ) {
mDispatcher.fireLayout( this );
}
}
@Override
public boolean needsLayout() {
return mNeedsLayout;
}
@Override
public synchronized void repaint() {
if( mNeedsPaint || mDispatcher == null ) {
return;
}
mNeedsPaint = true;
mDispatcher.firePaint( this );
}
@Override
public boolean needsRepaint() {
return mNeedsPaint;
}
@Override
public GDispatcher dispatcher() {
return mDispatcher;
}
@Override
public synchronized void treeProcessParentChanged( GDispatcher dispatcher, GComponent parent ) {
if( dispatcher == mDispatcher && parent == mParent ) {
return;
}
GDispatcher out = dispatcher != null ? dispatcher : mDispatcher;
mDispatcher = dispatcher;
mParent = parent;
mHasAbsoluteBounds = false;
// Notify ancestor has changed.
if( out != null && mAncestorCaster != null ) {
GAncestorEvent e = new GAncestorEvent( this, GAncestorEvent.ANCESTOR_CHANGED, parent );
out.fireAncestorEvent( e );
}
updateDisplayed( out );
mNeedsLayout = false;
mNeedsPaint = false;
applyLayout();
if( mDisplayed ) {
repaint();
}
mDispatcher = dispatcher;
if( !mChildren.isEmpty() ) {
for( GComponent c: mChildren ) {
c.treeProcessParentChanged( dispatcher, this );
}
}
}
@Override
public synchronized void treeProcessAncestorMoved( GComponent source ) {
mHasAbsoluteBounds = false;
if( mDispatcher != null ) {
if( source == this ) {
if( mComponentCaster != null ) {
GComponentEvent e = new GComponentEvent( this, GComponentEvent.COMPONENT_MOVED );
mDispatcher.fireComponentEvent( e );
}
} else if( mAncestorCaster != null ) {
GAncestorEvent e = new GAncestorEvent( this, GAncestorEvent.ANCESTOR_MOVED, source );
mDispatcher.fireAncestorEvent( e );
}
}
if( !mChildren.isEmpty() ) {
for( GComponent c: mChildren ) {
c.treeProcessAncestorMoved( source );
}
}
}
@Override
public synchronized void treeProcessAncestorResized( GComponent source ) {
mHasAbsoluteBounds = false;
if( mDispatcher != null ) {
if( source == this ) {
if( mComponentCaster != null ) {
GComponentEvent e = new GComponentEvent( this, GComponentEvent.COMPONENT_RESIZED );
mDispatcher.fireComponentEvent( e );
}
} else if( mAncestorCaster != null ) {
GAncestorEvent e = new GAncestorEvent( this, GAncestorEvent.ANCESTOR_RESIZED, source );
mDispatcher.fireAncestorEvent( e );
}
}
if( !mChildren.isEmpty() ) {
for( GComponent c: mChildren ) {
c.treeProcessAncestorResized( source );
}
}
}
@Override
public synchronized void treeProcessParentShown() {
if( updateDisplayed() && !mChildren.isEmpty() ) {
for( GComponent c: mChildren ) {
c.treeProcessParentShown();
}
}
}
@Override
public synchronized void treeProcessParentHidden() {
if( updateDisplayed() && !mChildren.isEmpty() ) {
for( GComponent c: mChildren ) {
c.treeProcessParentHidden();
}
}
}
@Override
public synchronized void treeValidateHasMouseFocusable() {
mTreeIsMouseFocusable = GToolkit.isMouseFocusable( this );
for( GComponent c: mChildren ) {
c.treeValidateHasMouseFocusable();
mTreeIsMouseFocusable = mTreeIsMouseFocusable || c.hasMouseFocusableComponent();
}
}
@Override
public void treeProcessLayout() {
GLayout m;
synchronized( this ) {
m = mLayout;
mNeedsLayout = false;
}
if( m != null ) {
m.layoutPane( this );
}
synchronized( this ) {
for( GComponent p: mChildren ) {
p.treeProcessLayout();
}
}
}
@Override
public synchronized void processPaint( DrawEnv g ) {
if( !mDisplayed ) {
return;
}
mNeedsPaint = false;
paintComponent( g );
paintChildren( g );
}
@Override
public void processComponentEvent( GComponentEvent e ) {
GComponentListener c = mComponentCaster;
if( c == null ) {
return;
}
switch( e.id() ) {
case GComponentEvent.COMPONENT_MOVED:
c.componentMoved( e );
break;
case GComponentEvent.COMPONENT_RESIZED:
c.componentResized( e );
break;
case GComponentEvent.COMPONENT_SHOWN:
c.componentShown( e );
break;
case GComponentEvent.COMPONENT_HIDDEN:
c.componentHidden( e );
break;
}
}
@Override
public void processAncestorEvent( GAncestorEvent e ) {
GAncestorListener c = mAncestorCaster;
if( c == null ) {
return;
}
switch( e.id() ) {
case GAncestorEvent.ANCESTOR_CHANGED:
c.ancestorChanged( e );
break;
case GAncestorEvent.ANCESTOR_MOVED:
c.ancestorMoved( e );
break;
case GAncestorEvent.ANCESTOR_RESIZED:
c.ancestorResized( e );
break;
}
}
@Override
public void processFocusEvent( GFocusEvent e ) {
GFocusListener f = mFocusCaster;
switch( e.id() ) {
case GFocusEvent.FOCUS_GAINED:
mHasFocus = true;
if( f != null ) {
f.focusGained( e );
}
break;
case GFocusEvent.FOCUS_LOST:
mHasFocus = false;
if( f != null ) {
f.focusLost( e );
}
break;
}
}
@Override
public void processMouseEvent( GMouseEvent e ) {
GMouseListener m = mMouseCaster;
if( m == null ) {
return;
}
switch( e.id() ) {
case GMouseEvent.MOUSE_PRESSED:
m.mousePressed( e );
break;
case GMouseEvent.MOUSE_RELEASED:
m.mouseReleased( e );
break;
case GMouseEvent.MOUSE_CLICKED:
m.mouseClicked( e );
break;
case GMouseEvent.MOUSE_ENTERED:
m.mouseEntered( e );
break;
case GMouseEvent.MOUSE_EXITED:
m.mouseExited( e );
break;
}
e.consume();
}
@Override
public void processMouseMotionEvent( GMouseEvent e ) {
GMouseMotionListener m = mMouseMotionCaster;
if( m == null ) {
return;
}
if( e.id() == MouseEvent.MOUSE_MOVED ) {
m.mouseMoved( e );
} else {
m.mouseDragged( e );
}
e.consume();
}
@Override
public void processMouseWheelEvent( GMouseWheelEvent e ) {
GMouseWheelListener m = mMouseWheelCaster;
if( m == null ) {
return;
}
m.mouseWheelMoved( e );
e.consume();
}
@Override
public void processKeyEvent( GKeyEvent e ) {
GKeyListener m = mKeyCaster;
if( m == null ) {
return;
}
switch( e.id() ) {
case GKeyEvent.KEY_PRESSED:
m.keyPressed( e );
break;
case GKeyEvent.KEY_RELEASED:
m.keyReleased( e );
break;
case GKeyEvent.KEY_TYPED:
m.keyTyped( e );
break;
}
e.consume();
}
protected void paintComponent( DrawEnv g ) {
GPaintListener c = mPaintCaster;
if( c != null ) {
c.paint( g );
}
}
protected void paintChildren( DrawEnv g ) {
if( mChildren.isEmpty() ) {
return;
}
for( GComponent p : mChildren ) {
if( p.isDisplayed() ) {
prepareView( g, p );
p.processPaint( g );
}
}
}
protected void prepareView( DrawEnv g, GComponent p ) {
Rect b = g.mWorkRect;
p.getAbsoluteBounds( b );
Rect viewport = g.mContextViewport;
int x = b.x0 - viewport.x0;
int y = b.y0 - viewport.y0;
int w = b.width();
int h = b.height();
g.mProj.setOrtho( 0, w, 0, h, -1, 1 );
g.mView.identity();
g.mViewport.apply( x, y, w, h );
g.mScissorTest.apply( true, x, y, w, h );
}
protected void childAdded( GComponent child ) {
child.treeProcessParentChanged( mDispatcher, this );
applyLayout();
}
protected void childRemoved( GComponent child ) {
child.treeProcessParentChanged( null, null );
applyLayout();
}
private synchronized boolean updateDisplayed() {
return updateDisplayed( mDispatcher );
}
private boolean updateDisplayed( GDispatcher out ) {
boolean displayed = mDispatcher!= null &&
mVisible &&
( mParent == null || mParent.isDisplayed() );
if( mDisplayed == displayed ) {
return false;
}
mDisplayed = displayed;
mTreeIsMouseFocusable &= displayed;
if( out != null ) {
if( mComponentCaster != null ) {
int id = displayed ? GComponentEvent.COMPONENT_SHOWN : GComponentEvent.COMPONENT_HIDDEN;
out.fireComponentEvent( new GComponentEvent( this, id ) );
}
out.firePropertyChange( this, PROP_DISPLAYED, !displayed, displayed );
}
return true;
}
//**********************
// Deprecation Ghetto
//**********************
@Override
@Deprecated public void bounds( Rect out ) {
getBounds( out );
}
@Override
@Deprecated public void absoluteBounds( Rect out ) {
getAbsoluteBounds( out );
}
@Override
@Deprecated public Rect absoluteBounds() {
return getAbsoluteBounds();
}
@Override
@Deprecated public GComponent position( int x, int y ) {
return setPosition( x, y );
}
@Override
@Deprecated public GComponent bounds( int x, int y, int w, int h ) {
return setBounds( x, y, w, h );
}
@Override
@Deprecated public GComponent size( int w, int y ) {
return setSize( w, y );
}
@Override
@Deprecated public boolean background( Vec4 out ) {
return getBackground( out );
}
@Override
@Deprecated public boolean foreground( Vec4 out ) {
return getForeground( out );
}
@Override
@Deprecated public GComponent font( Font font ) {
return setFont( font );
}
@Override
@Deprecated public Font font() {
return getFont();
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFFlowModFailedErrorMsgVer14 implements OFFlowModFailedErrorMsg {
private static final Logger logger = LoggerFactory.getLogger(OFFlowModFailedErrorMsgVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int MINIMUM_LENGTH = 12;
private final static long DEFAULT_XID = 0x0L;
private final static OFErrorCauseData DEFAULT_DATA = OFErrorCauseData.NONE;
// OF message fields
private final long xid;
private final OFFlowModFailedCode code;
private final OFErrorCauseData data;
//
// package private constructor - used by readers, builders, and factory
OFFlowModFailedErrorMsgVer14(long xid, OFFlowModFailedCode code, OFErrorCauseData data) {
if(code == null) {
throw new NullPointerException("OFFlowModFailedErrorMsgVer14: property code cannot be null");
}
if(data == null) {
throw new NullPointerException("OFFlowModFailedErrorMsgVer14: property data cannot be null");
}
this.xid = xid;
this.code = code;
this.data = data;
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.ERROR;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFErrorType getErrType() {
return OFErrorType.FLOW_MOD_FAILED;
}
@Override
public OFFlowModFailedCode getCode() {
return code;
}
@Override
public OFErrorCauseData getData() {
return data;
}
public OFFlowModFailedErrorMsg.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFFlowModFailedErrorMsg.Builder {
final OFFlowModFailedErrorMsgVer14 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean codeSet;
private OFFlowModFailedCode code;
private boolean dataSet;
private OFErrorCauseData data;
BuilderWithParent(OFFlowModFailedErrorMsgVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.ERROR;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFFlowModFailedErrorMsg.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFErrorType getErrType() {
return OFErrorType.FLOW_MOD_FAILED;
}
@Override
public OFFlowModFailedCode getCode() {
return code;
}
@Override
public OFFlowModFailedErrorMsg.Builder setCode(OFFlowModFailedCode code) {
this.code = code;
this.codeSet = true;
return this;
}
@Override
public OFErrorCauseData getData() {
return data;
}
@Override
public OFFlowModFailedErrorMsg.Builder setData(OFErrorCauseData data) {
this.data = data;
this.dataSet = true;
return this;
}
@Override
public OFFlowModFailedErrorMsg build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
OFFlowModFailedCode code = this.codeSet ? this.code : parentMessage.code;
if(code == null)
throw new NullPointerException("Property code must not be null");
OFErrorCauseData data = this.dataSet ? this.data : parentMessage.data;
if(data == null)
throw new NullPointerException("Property data must not be null");
//
return new OFFlowModFailedErrorMsgVer14(
xid,
code,
data
);
}
}
static class Builder implements OFFlowModFailedErrorMsg.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean codeSet;
private OFFlowModFailedCode code;
private boolean dataSet;
private OFErrorCauseData data;
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.ERROR;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFFlowModFailedErrorMsg.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFErrorType getErrType() {
return OFErrorType.FLOW_MOD_FAILED;
}
@Override
public OFFlowModFailedCode getCode() {
return code;
}
@Override
public OFFlowModFailedErrorMsg.Builder setCode(OFFlowModFailedCode code) {
this.code = code;
this.codeSet = true;
return this;
}
@Override
public OFErrorCauseData getData() {
return data;
}
@Override
public OFFlowModFailedErrorMsg.Builder setData(OFErrorCauseData data) {
this.data = data;
this.dataSet = true;
return this;
}
//
@Override
public OFFlowModFailedErrorMsg build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
if(!this.codeSet)
throw new IllegalStateException("Property code doesn't have default value -- must be set");
if(code == null)
throw new NullPointerException("Property code must not be null");
OFErrorCauseData data = this.dataSet ? this.data : DEFAULT_DATA;
if(data == null)
throw new NullPointerException("Property data must not be null");
return new OFFlowModFailedErrorMsgVer14(
xid,
code,
data
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFFlowModFailedErrorMsg> {
@Override
public OFFlowModFailedErrorMsg readFrom(ChannelBuffer bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 5
byte version = bb.readByte();
if(version != (byte) 0x5)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_14(5), got="+version);
// fixed value property type == 1
byte type = bb.readByte();
if(type != (byte) 0x1)
throw new OFParseError("Wrong type: Expected=OFType.ERROR(1), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
// fixed value property errType == 5
short errType = bb.readShort();
if(errType != (short) 0x5)
throw new OFParseError("Wrong errType: Expected=OFErrorType.FLOW_MOD_FAILED(5), got="+errType);
OFFlowModFailedCode code = OFFlowModFailedCodeSerializerVer14.readFrom(bb);
OFErrorCauseData data = OFErrorCauseData.read(bb, length - (bb.readerIndex() - start), OFVersion.OF_14);
OFFlowModFailedErrorMsgVer14 flowModFailedErrorMsgVer14 = new OFFlowModFailedErrorMsgVer14(
xid,
code,
data
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", flowModFailedErrorMsgVer14);
return flowModFailedErrorMsgVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFFlowModFailedErrorMsgVer14Funnel FUNNEL = new OFFlowModFailedErrorMsgVer14Funnel();
static class OFFlowModFailedErrorMsgVer14Funnel implements Funnel<OFFlowModFailedErrorMsgVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFFlowModFailedErrorMsgVer14 message, PrimitiveSink sink) {
// fixed value property version = 5
sink.putByte((byte) 0x5);
// fixed value property type = 1
sink.putByte((byte) 0x1);
// FIXME: skip funnel of length
sink.putLong(message.xid);
// fixed value property errType = 5
sink.putShort((short) 0x5);
OFFlowModFailedCodeSerializerVer14.putTo(message.code, sink);
message.data.putTo(sink);
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFFlowModFailedErrorMsgVer14> {
@Override
public void write(ChannelBuffer bb, OFFlowModFailedErrorMsgVer14 message) {
int startIndex = bb.writerIndex();
// fixed value property version = 5
bb.writeByte((byte) 0x5);
// fixed value property type = 1
bb.writeByte((byte) 0x1);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
bb.writeInt(U32.t(message.xid));
// fixed value property errType = 5
bb.writeShort((short) 0x5);
OFFlowModFailedCodeSerializerVer14.writeTo(bb, message.code);
message.data.writeTo(bb);
// update length field
int length = bb.writerIndex() - startIndex;
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFFlowModFailedErrorMsgVer14(");
b.append("xid=").append(xid);
b.append(", ");
b.append("code=").append(code);
b.append(", ");
b.append("data=").append(data);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFFlowModFailedErrorMsgVer14 other = (OFFlowModFailedErrorMsgVer14) obj;
if( xid != other.xid)
return false;
if (code == null) {
if (other.code != null)
return false;
} else if (!code.equals(other.code))
return false;
if (data == null) {
if (other.data != null)
return false;
} else if (!data.equals(other.data))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * result + ((code == null) ? 0 : code.hashCode());
result = prime * result + ((data == null) ? 0 : data.hashCode());
return result;
}
}
| |
/*
* Copyright (C) 2016 R&D Solutions Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.hawkcd.scheduler;
import io.hawkcd.model.*;
import io.hawkcd.model.enums.*;
import io.hawkcd.services.AgentService;
import io.hawkcd.services.PipelineService;
import io.hawkcd.services.interfaces.IAgentService;
import io.hawkcd.services.interfaces.IPipelineService;
import io.hawkcd.utilities.constants.LoggerMessages;
import org.apache.log4j.Logger;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
public class StatusUpdaterService {
private static final Logger LOGGER = Logger.getLogger(StatusUpdaterService.class.getName());
private IAgentService agentService;
private IPipelineService pipelineService;
public StatusUpdaterService() {
this.agentService = new AgentService();
this.pipelineService = new PipelineService();
}
public StatusUpdaterService(IAgentService agentService, IPipelineService pipelineService) {
this.agentService = agentService;
this.pipelineService = pipelineService;
}
public void updateStatuses() {
List<Agent> agents = (List<Agent>) this.agentService.getAll().getEntity();
for (Agent agent : agents) {
this.updateAgentStatus(agent);
}
List<Pipeline> pipelinesInProgress = (List<Pipeline>) this.pipelineService.getAllPreparedPipelinesInProgress().getEntity();
for (Pipeline pipeline : pipelinesInProgress) {
if (pipeline.shouldBeCanceled()) {
this.cancelPipeline(pipeline);
LOGGER.info(String.format(LoggerMessages.PIPELINE_CANCELED, pipeline.getExecutionId(), pipeline.getPipelineDefinitionName()));
ServiceResult result = new ServiceResult(null, NotificationType.WARNING, "Pipeline " + pipeline.getPipelineDefinitionName() + " was successfully canceled");
} else if (pipeline.getStatus() == PipelineStatus.PAUSED) {
this.pausePipeline(pipeline);
} else {
this.updateAllStatuses(pipeline);
}
this.pipelineService.update(pipeline);
}
}
public Agent updateAgentStatus(Agent agent) {
if (agent != null) {
LocalDateTime lastReportedTime = agent.getLastReportedTime();
LocalDateTime currentTime = ZonedDateTime.now(ZoneOffset.UTC).toLocalDateTime();
long timeBetweenReports = ChronoUnit.SECONDS.between(lastReportedTime, currentTime);
if (agent.isConnected() && (timeBetweenReports > 12)) {
agent.setConnected(false);
this.agentService.update(agent);
}
}
return agent;
}
public boolean updateAllStatuses(Object node) {
Pipeline pipelineToUpdate = null;
Queue<Object> queue = new LinkedList<>();
queue.add(node);
while (!queue.isEmpty()) {
Object queueNode = queue.poll();
if (queueNode.getClass() == Job.class) {
pipelineToUpdate = (Pipeline) node;
this.updatePipelineStatus(pipelineToUpdate);
return true;
}
if (queueNode.getClass() == Pipeline.class) {
pipelineToUpdate = (Pipeline) queueNode;
queue.addAll(pipelineToUpdate.getStages());
this.updateStageStatusesInSequence(pipelineToUpdate.getStages());
} else {
Stage stageNode = (Stage) queueNode;
queue.addAll(stageNode.getJobs());
}
}
return false;
}
public void updateStageStatusesInSequence(List<Stage> stages) {
for (Stage currentStage : stages) {
if (currentStage.getStatus() == StageStatus.PAUSED) {
currentStage.setStatus(StageStatus.IN_PROGRESS);
currentStage.setTriggeredManually(false);
}
if (currentStage.getStatus() == StageStatus.IN_PROGRESS) {
this.updateStageStatus(currentStage);
if (currentStage.getStatus() == StageStatus.PASSED) {
continue;
} else {
break;
}
} else if ((currentStage.getStatus() == StageStatus.NOT_RUN)) {
currentStage.setStartTime(ZonedDateTime.now(ZoneOffset.UTC).toLocalDateTime());
currentStage.setStatus(StageStatus.IN_PROGRESS);
break;
} else if (currentStage.getStatus() == StageStatus.PASSED) {
continue;
} else {
break;
}
}
}
public void updateStageStatus(Stage stage) {
List<JobStatus> jobStatuses = new ArrayList<>();
List<Job> jobs = stage.getJobs();
for (Job job : jobs) {
JobStatus jobStatus = job.getStatus();
jobStatuses.add(jobStatus);
}
if (jobStatuses.contains(JobStatus.FAILED)) {
stage.setEndTime(ZonedDateTime.now(ZoneOffset.UTC).toLocalDateTime());
stage.setStatus(StageStatus.FAILED);
LOGGER.info(String.format("Stage %s set to %s", stage.getStageDefinitionName(), JobStatus.FAILED));
} else if (this.areAllPassed(jobStatuses)) {
stage.setEndTime(ZonedDateTime.now(ZoneOffset.UTC).toLocalDateTime());
stage.setStatus(StageStatus.PASSED);
LOGGER.info(String.format("Stage %s set to %s", stage.getStageDefinitionName(), JobStatus.PASSED));
}
}
public void updatePipelineStatus(Pipeline pipeline) {
List<Stage> stages = pipeline.getStages();
List<StageStatus> stageStatuses = new ArrayList<>();
for (Stage stage : stages) {
StageStatus stageStatus = stage.getStatus();
stageStatuses.add(stageStatus);
if (stage.isTriggeredManually() && (stage.getStatus() == StageStatus.IN_PROGRESS)) {
pipeline.setStatus(PipelineStatus.PAUSED);
stage.setStatus(StageStatus.PAUSED);
String pipelinePaused = String.format("Pipeline %s set to %s", pipeline.getPipelineDefinitionName(), PipelineStatus.PAUSED);
LOGGER.info(pipelinePaused);
String stagePaused = String.format("Stage %s must be triggered manually", stage.getStageDefinitionName());
LOGGER.info(stagePaused);
String notificationMessage = pipelinePaused + System.lineSeparator() + stagePaused;
ServiceResult notification = new ServiceResult(null, NotificationType.WARNING, notificationMessage);
}
}
if (stageStatuses.contains(StageStatus.FAILED)) {
pipeline.setStatus(PipelineStatus.FAILED);
pipeline.setEndTime(ZonedDateTime.now(ZoneOffset.UTC).toLocalDateTime());
LOGGER.info(String.format("Pipeline %s set to %s", pipeline.getPipelineDefinitionName(), PipelineStatus.FAILED));
} else if (this.areAllPassed(stageStatuses)) {
pipeline.setStatus(PipelineStatus.PASSED);
pipeline.setEndTime(ZonedDateTime.now(ZoneOffset.UTC).toLocalDateTime());
LOGGER.info(String.format("Pipeline %s set to %s", pipeline.getPipelineDefinitionName(), PipelineStatus.PASSED));
}
}
public boolean areAllPassed(List<?> statuses) {
String[] statusesAsString = new String[statuses.size()];
int index = 0;
if (statuses.isEmpty()) {
return false;
}
for (Object status : statuses) {
statusesAsString[index] = status.toString();
index++;
}
for (String aStatusesAsString : statusesAsString) {
if (!aStatusesAsString.equals("PASSED")) {
return false;
}
}
return true;
}
private void cancelPipeline(Pipeline pipeline) {
pipeline.setShouldBeCanceled(false);
pipeline.setStatus(PipelineStatus.CANCELED);
pipeline.setEndTime(ZonedDateTime.now(ZoneOffset.UTC).toLocalDateTime());
for (Stage stage : pipeline.getStages()) {
if (stage.getStatus() == StageStatus.IN_PROGRESS || stage.getStatus() == StageStatus.AWAITING) {
stage.setStatus(StageStatus.CANCELED);
for (Job job : stage.getJobs()) {
job.setStatus(JobStatus.CANCELED);
for (Task task : job.getTasks()) {
task.setStatus(TaskStatus.CANCELED);
}
}
}
}
}
private void pausePipeline(Pipeline pipeline) {
List<Stage> stages = pipeline.getStages();
for (Stage stage : stages) {
if (stage.getStatus() == StageStatus.IN_PROGRESS) {
stage.setStatus(StageStatus.PAUSED);
break;
}
}
}
}
| |
/**
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.dogecoin.core;
import com.google.dogecoin.params.MainNetParams;
import java.io.IOException;
import java.io.OutputStream;
import java.math.BigInteger;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import static com.google.dogecoin.core.Utils.uint32ToByteStreamLE;
import static com.google.dogecoin.core.Utils.uint64ToByteStreamLE;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* A PeerAddress holds an IP address and port number representing the network location of
* a peer in the Bitcoin P2P network. It exists primarily for serialization purposes.
*/
public class PeerAddress extends ChildMessage {
private static final long serialVersionUID = 7501293709324197411L;
static final int MESSAGE_SIZE = 30;
private InetAddress addr;
private int port;
private BigInteger services;
private long time;
/**
* Construct a peer address from a serialized payload.
*/
public PeerAddress(NetworkParameters params, byte[] payload, int offset, int protocolVersion) throws ProtocolException {
super(params, payload, offset, protocolVersion);
}
/**
* Construct a peer address from a serialized payload.
* @param params NetworkParameters object.
* @param msg Bitcoin protocol formatted byte array containing message content.
* @param offset The location of the first msg byte within the array.
* @param protocolVersion Bitcoin protocol version.
* @param parseLazy Whether to perform a full parse immediately or delay until a read is requested.
* @param parseRetain Whether to retain the backing byte array for quick reserialization.
* If true and the backing byte array is invalidated due to modification of a field then
* the cached bytes may be repopulated and retained if the message is serialized again in the future.
* @throws ProtocolException
*/
public PeerAddress(NetworkParameters params, byte[] msg, int offset, int protocolVersion, Message parent, boolean parseLazy,
boolean parseRetain) throws ProtocolException {
super(params, msg, offset, protocolVersion, parent, parseLazy, parseRetain, UNKNOWN_LENGTH);
// Message length is calculated in parseLite which is guaranteed to be called before it is ever read.
// Even though message length is static for a PeerAddress it is safer to leave it there
// as it will be set regardless of which constructor was used.
}
/**
* Construct a peer address from a memorized or hardcoded address.
*/
public PeerAddress(InetAddress addr, int port, int protocolVersion) {
this.addr = checkNotNull(addr);
this.port = port;
this.protocolVersion = protocolVersion;
this.services = BigInteger.ZERO;
length = protocolVersion > 31402 ? MESSAGE_SIZE : MESSAGE_SIZE - 4;
}
/**
* Constructs a peer address from the given IP address and port. Protocol version is the default.
*/
public PeerAddress(InetAddress addr, int port) {
this(addr, port, NetworkParameters.PROTOCOL_VERSION);
}
/**
* Constructs a peer address from the given IP address. Port and protocol version are default for the prodnet.
*/
public PeerAddress(InetAddress addr) {
this(addr, MainNetParams.get().getPort());
}
public PeerAddress(InetSocketAddress addr) {
this(addr.getAddress(), addr.getPort());
}
public static PeerAddress localhost(NetworkParameters params) {
try {
return new PeerAddress(InetAddress.getLocalHost(), params.getPort());
} catch (UnknownHostException e) {
throw new RuntimeException(e); // Broken system.
}
}
@Override
protected void bitcoinSerializeToStream(OutputStream stream) throws IOException {
if (protocolVersion >= 31402) {
//TODO this appears to be dynamic because the client only ever sends out it's own address
//so assumes itself to be up. For a fuller implementation this needs to be dynamic only if
//the address refers to this client.
int secs = (int) (Utils.now().getTime() / 1000);
uint32ToByteStreamLE(secs, stream);
}
uint64ToByteStreamLE(services, stream); // nServices.
// Java does not provide any utility to map an IPv4 address into IPv6 space, so we have to do it by hand.
byte[] ipBytes = addr.getAddress();
if (ipBytes.length == 4) {
byte[] v6addr = new byte[16];
System.arraycopy(ipBytes, 0, v6addr, 12, 4);
v6addr[10] = (byte) 0xFF;
v6addr[11] = (byte) 0xFF;
ipBytes = v6addr;
}
stream.write(ipBytes);
// And write out the port. Unlike the rest of the protocol, address and port is in big endian byte order.
stream.write((byte) (0xFF & port >> 8));
stream.write((byte) (0xFF & port));
}
protected void parseLite() {
length = protocolVersion > 31402 ? MESSAGE_SIZE : MESSAGE_SIZE - 4;
}
@Override
protected void parse() throws ProtocolException {
// Format of a serialized address:
// uint32 timestamp
// uint64 services (flags determining what the node can do)
// 16 bytes ip address
// 2 bytes port num
if (protocolVersion > 31402)
time = readUint32();
else
time = -1;
services = readUint64();
byte[] addrBytes = readBytes(16);
try {
addr = InetAddress.getByAddress(addrBytes);
} catch (UnknownHostException e) {
throw new RuntimeException(e); // Cannot happen.
}
port = ((0xFF & bytes[cursor++]) << 8) | (0xFF & bytes[cursor++]);
}
/* (non-Javadoc)
* @see Message#getMessageSize()
*/
@Override
public int getMessageSize() {
// The 4 byte difference is the uint32 timestamp that was introduced in version 31402
length = protocolVersion > 31402 ? MESSAGE_SIZE : MESSAGE_SIZE - 4;
return length;
}
/**
* @return the addr
*/
public InetAddress getAddr() {
maybeParse();
return addr;
}
/**
* @param addr the addr to set
*/
public void setAddr(InetAddress addr) {
unCache();
this.addr = addr;
}
/**
* @return the port
*/
public int getPort() {
maybeParse();
return port;
}
/**
* @param port the port to set
*/
public void setPort(int port) {
unCache();
this.port = port;
}
/**
* @return the services
*/
public BigInteger getServices() {
maybeParse();
return services;
}
/**
* @param services the services to set
*/
public void setServices(BigInteger services) {
unCache();
this.services = services;
}
/**
* @return the time
*/
public long getTime() {
maybeParse();
return time;
}
/**
* @param time the time to set
*/
public void setTime(long time) {
unCache();
this.time = time;
}
@Override
public String toString() {
return "[" + addr.getHostAddress() + "]:" + port;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof PeerAddress)) return false;
PeerAddress other = (PeerAddress) o;
return other.addr.equals(addr) &&
other.port == port &&
other.services.equals(services) &&
other.time == time;
}
@Override
public int hashCode() {
return addr.hashCode() ^ port ^ (int) time ^ services.hashCode();
}
public InetSocketAddress toSocketAddress() {
return new InetSocketAddress(addr, port);
}
}
| |
package openfoodfacts.github.scrachx.openfood.views;
import android.Manifest;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.Settings;
import android.text.TextUtils;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.app.ActivityCompat;
import androidx.core.app.NavUtils;
import androidx.core.app.NotificationCompat;
import androidx.core.content.ContextCompat;
import androidx.recyclerview.widget.ItemTouchHelper;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.afollestad.materialdialogs.MaterialDialog;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import io.reactivex.Completable;
import io.reactivex.CompletableSource;
import io.reactivex.Single;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.functions.Function;
import io.reactivex.schedulers.Schedulers;
import openfoodfacts.github.scrachx.openfood.BuildConfig;
import openfoodfacts.github.scrachx.openfood.R;
import openfoodfacts.github.scrachx.openfood.databinding.ActivityHistoryScanBinding;
import openfoodfacts.github.scrachx.openfood.models.HistoryItem;
import openfoodfacts.github.scrachx.openfood.models.HistoryProduct;
import openfoodfacts.github.scrachx.openfood.models.HistoryProductDao;
import openfoodfacts.github.scrachx.openfood.utils.FileUtils;
import openfoodfacts.github.scrachx.openfood.utils.SwipeController;
import openfoodfacts.github.scrachx.openfood.utils.SwipeControllerActions;
import openfoodfacts.github.scrachx.openfood.utils.Utils;
import openfoodfacts.github.scrachx.openfood.views.adapters.HistoryListAdapter;
import openfoodfacts.github.scrachx.openfood.views.listeners.CommonBottomListenerInstaller;
import openfoodfacts.github.scrachx.openfood.views.scan.ContinuousScanActivity;
public class HistoryScanActivity extends BaseActivity implements SwipeControllerActions {
private ActivityHistoryScanBinding binding;
private List<HistoryItem> productItems;
private boolean emptyHistory;
private HistoryProductDao mHistoryProductDao;
private HistoryListAdapter adapter;
private Disposable disposable;
private List<HistoryProduct> listHistoryProducts;
//boolean to determine if image should be loaded or not
private boolean isLowBatteryMode = false;
private static String SORT_TYPE = "none";
public static void start(Context context) {
Intent starter = new Intent(context, HistoryScanActivity.class);
context.startActivity(starter);
}
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getResources().getBoolean(R.bool.portrait_only)) {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}
binding = ActivityHistoryScanBinding.inflate(getLayoutInflater());
setContentView(binding.getRoot());
binding.scanFirst.setOnClickListener(v -> onScanFirst());
setTitle(getString(R.string.scan_history_drawer));
Objects.requireNonNull(getSupportActionBar()).setDisplayHomeAsUpEnabled(true);
// If Battery Level is low and the user has checked the Disable Image in Preferences , then set isLowBatteryMode to true
if (Utils.isDisableImageLoad(this) && Utils.isBatteryLevelLow(this)) {
isLowBatteryMode = true;
}
mHistoryProductDao = Utils.getDaoSession().getHistoryProductDao();
productItems = new ArrayList<>();
setInfo(binding.emptyHistoryInfo);
binding.srRefreshHistoryScanList.setOnRefreshListener(() -> {
mHistoryProductDao = Utils.getDaoSession().getHistoryProductDao();
productItems = new ArrayList<>();
setInfo(binding.emptyHistoryInfo);
fillView();
binding.srRefreshHistoryScanList.setRefreshing(false);
});
CommonBottomListenerInstaller.install(this, binding.navigationBottom.bottomNavigation);
}
@Override
protected void onStart() {
super.onStart();
//to fill the view in any case even if the user scans products from History screen...
fillView();
}
@Override
public void onRightClicked(int position) {
if (CollectionUtils.isNotEmpty(listHistoryProducts)) {
mHistoryProductDao.delete(listHistoryProducts.get(position));
}
adapter.remove(productItems.get(position));
adapter.notifyItemRemoved(position);
adapter.notifyItemRangeChanged(position, adapter.getItemCount());
if (adapter.getItemCount() == 0) {
binding.emptyHistoryInfo.setVisibility(View.VISIBLE);
binding.scanFirst.setVisibility(View.VISIBLE);
}
}
public void exportCSV() {
String folderMain = FileUtils.getCsvFolderName();
Toast.makeText(this, R.string.txt_exporting_history, Toast.LENGTH_LONG).show();
File baseDir = new File(Environment.getExternalStorageDirectory(), folderMain);
if (!baseDir.exists()) {
baseDir.mkdirs();
}
Log.d("dir", String.valueOf(baseDir));
String fileName = BuildConfig.FLAVOR.toUpperCase() + "-" + new SimpleDateFormat("yyyy-MM-dd", Locale.getDefault()).format(new Date()) + ".csv";
File csvFile = new File(baseDir, fileName);
boolean isDownload = false;
try (CSVPrinter writer = new CSVPrinter(new FileWriter(csvFile), CSVFormat.DEFAULT.withHeader(getResources().getStringArray(R.array.headers)))) {
for (HistoryProduct hp : mHistoryProductDao.loadAll()) {
writer.printRecord(hp.getBarcode(), hp.getTitle(), hp.getBrands());
}
Toast.makeText(this, R.string.txt_history_exported, Toast.LENGTH_LONG).show();
isDownload = true;
} catch (IOException e) {
Log.e(HistoryScanActivity.class.getSimpleName(), "can export to " + csvFile, e);
}
Intent downloadIntent = new Intent(Intent.ACTION_VIEW);
NotificationManager notificationManager = YourListedProductsActivity.createNotification(csvFile, downloadIntent, this);
if (isDownload) {
NotificationCompat.Builder builder = new NotificationCompat.Builder(this, "export_channel")
.setContentTitle(getString(R.string.notify_title))
.setContentText(getString(R.string.notify_content))
.setContentIntent(PendingIntent.getActivity(this, 4, downloadIntent, 0))
.setSmallIcon(R.mipmap.ic_launcher);
notificationManager.notify(7, builder.build());
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_history, menu);
menu.findItem(R.id.action_export_all_history)
.setVisible(!emptyHistory);
menu.findItem(R.id.action_remove_all_history)
.setVisible(!emptyHistory);
return true;
}
@Override
protected void onDestroy() {
super.onDestroy();
disposable.dispose();
binding = null;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
// Respond to the action bar's Up/Home button
case android.R.id.home:
NavUtils.navigateUpFromSameTask(this);
return true;
case R.id.action_remove_all_history:
new MaterialDialog.Builder(this)
.title(R.string.title_clear_history_dialog)
.content(R.string.text_clear_history_dialog)
.onPositive((dialog, which) -> {
mHistoryProductDao.deleteAll();
productItems.clear();
final RecyclerView.Adapter adapter = binding.listHistoryScan.getAdapter();
if (adapter != null) {
adapter.notifyDataSetChanged();
}
binding.emptyHistoryInfo.setVisibility(View.VISIBLE);
binding.scanFirst.setVisibility(View.VISIBLE);
})
.positiveText(R.string.txtYes)
.negativeText(R.string.txtNo)
.show();
return true;
case R.id.action_export_all_history:
if (ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.WRITE_EXTERNAL_STORAGE)) {
new MaterialDialog.Builder(this)
.title(R.string.action_about)
.content(R.string.permision_write_external_storage)
.neutralText(R.string.txtOk)
.show();
} else {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, Utils
.MY_PERMISSIONS_REQUEST_STORAGE);
}
} else {
exportCSV();
}
return true;
case R.id.sort_history:
MaterialDialog.Builder builder = new MaterialDialog.Builder(this);
builder.title(R.string.sort_by);
String[] sortTypes;
if (BuildConfig.FLAVOR.equals("off")) {
sortTypes = new String[]{getString(R.string.by_title), getString(R.string.by_brand), getString(R.string.by_nutrition_grade), getString(
R.string.by_barcode), getString(R.string.by_time)};
} else {
sortTypes = new String[]{getString(R.string.by_title), getString(R.string.by_brand), getString(R.string.by_time), getString(R.string.by_barcode)};
}
builder.items(sortTypes);
builder.itemsCallback((dialog, itemView, position, text) -> {
switch (position) {
case 0:
SORT_TYPE = "title";
fillView();
break;
case 1:
SORT_TYPE = "brand";
fillView();
break;
case 2:
if (BuildConfig.FLAVOR.equals("off")) {
SORT_TYPE = "grade";
} else {
SORT_TYPE = "time";
}
fillView();
break;
case 3:
SORT_TYPE = "barcode";
fillView();
break;
default:
SORT_TYPE = "time";
fillView();
break;
}
});
builder.show();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == Utils.MY_PERMISSIONS_REQUEST_STORAGE) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
exportCSV();
} else {
new MaterialDialog.Builder(this)
.title(R.string.permission_title)
.content(R.string.permission_denied)
.negativeText(R.string.txtNo)
.positiveText(R.string.txtYes)
.onPositive((dialog, which) -> {
Intent intent = new Intent();
intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
Uri uri = Uri.fromParts("package", getPackageName(), null);
intent.setData(uri);
startActivity(intent);
})
.show();
}
} else if (requestCode == Utils.MY_PERMISSIONS_REQUEST_CAMERA) {
if (grantResults.length > 0 && grantResults[0] == PackageManager
.PERMISSION_GRANTED) {
Intent intent = new Intent(HistoryScanActivity.this, ContinuousScanActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
}
}
}
@Override
public void onResume() {
super.onResume();
CommonBottomListenerInstaller.selectNavigationItem(binding.navigationBottom.bottomNavigation, R.id.history_bottom_nav);
}
protected void onScanFirst() {
if (Utils.isHardwareCameraInstalled(getBaseContext())) {
if (ContextCompat.checkSelfPermission(getBaseContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.shouldShowRequestPermissionRationale(HistoryScanActivity.this, Manifest.permission.CAMERA)) {
new MaterialDialog.Builder(this)
.title(R.string.action_about)
.content(R.string.permission_camera)
.neutralText(R.string.txtOk)
.onNeutral((dialog, which) -> ActivityCompat.requestPermissions(HistoryScanActivity.this, new String[]{Manifest
.permission.CAMERA}, Utils.MY_PERMISSIONS_REQUEST_CAMERA))
.show();
} else {
ActivityCompat.requestPermissions(HistoryScanActivity.this, new String[]{Manifest.permission.CAMERA}, Utils
.MY_PERMISSIONS_REQUEST_CAMERA);
}
} else {
Intent intent = new Intent(HistoryScanActivity.this, ContinuousScanActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
}
}
}
public void setInfo(TextView view) {
String info = getString(R.string.scan_first_string);
view.setText(info);
}
/**
* Function to compare history items based on title, brand, barcode, time and nutrition grade
*
* @param sortType String to determine type of sorting
* @param productItems List of history items to be sorted
*/
private void sort(String sortType, List<HistoryItem> productItems) {
switch (sortType) {
case "title":
Collections.sort(productItems, (historyItem, t1) -> {
if (TextUtils.isEmpty(historyItem.getTitle())) {
historyItem.setTitle(getResources().getString(R.string.no_title));
}
if (TextUtils.isEmpty(t1.getTitle())) {
t1.setTitle(getResources().getString(R.string.no_title));
}
return historyItem.getTitle().compareToIgnoreCase(t1.getTitle());
});
break;
case "brand":
Collections.sort(productItems, (historyItem, t1) -> {
if (TextUtils.isEmpty(historyItem.getBrands())) {
historyItem.setBrands(getResources().getString(R.string.no_brand));
}
if (TextUtils.isEmpty(t1.getBrands())) {
t1.setBrands(getResources().getString(R.string.no_brand));
}
return historyItem.getBrands().compareToIgnoreCase(t1.getBrands());
});
break;
case "barcode":
Collections.sort(productItems, (historyItem, t1) -> historyItem.getBarcode().compareTo(t1.getBarcode()));
break;
case "grade":
Collections.sort(productItems, (historyItem, t1) -> {
String nGrade1;
String nGrade2;
if (historyItem.getNutritionGrade() == null) {
nGrade1 = "E";
} else {
nGrade1 = historyItem.getNutritionGrade();
}
if (t1.getNutritionGrade() == null) {
nGrade2 = "E";
} else {
nGrade2 = t1.getNutritionGrade();
}
return nGrade1.compareToIgnoreCase(nGrade2);
});
break;
default:
Collections.sort(productItems, (historyItem, t1) -> 0);
}
}
private void fillView() {
if (disposable != null) {
disposable.dispose();
}
Log.i(HistoryScanActivity.class.getSimpleName(), "task fillview started...");
disposable = getFillViewCompletable().subscribe(() ->
Log.i(HistoryScanActivity.class.getSimpleName(), "task fillview ended"));
}
@Override
public void onPause() {
super.onPause();
}
private Completable getFillViewCompletable() {
final Completable refreshAct = Completable.fromAction(() -> {
if (binding.srRefreshHistoryScanList.isRefreshing()) {
binding.historyProgressbar.setVisibility(View.GONE);
} else {
binding.historyProgressbar.setVisibility(View.VISIBLE);
}
});
final Single<List<HistoryProduct>> dbSingle = Single.fromCallable(() -> {
productItems.clear();
List<HistoryProduct> historyProducts = mHistoryProductDao.queryBuilder().orderDesc(HistoryProductDao.Properties.LastSeen).list();
for (HistoryProduct historyProduct : historyProducts) {
productItems.add(new HistoryItem(historyProduct.getTitle(), historyProduct.getBrands(), historyProduct.getUrl(), historyProduct
.getBarcode(), historyProduct.getLastSeen(), historyProduct.getQuantity(), historyProduct.getNutritionGrade()));
}
return historyProducts;
});
final Function<List<HistoryProduct>, CompletableSource> updateUiFunc = historyProducts -> {
if (historyProducts.isEmpty()) {
emptyHistory = true;
binding.historyProgressbar.setVisibility(View.GONE);
binding.emptyHistoryInfo.setVisibility(View.VISIBLE);
binding.scanFirst.setVisibility(View.VISIBLE);
invalidateOptionsMenu();
return Completable.complete();
}
sort(SORT_TYPE, productItems);
adapter = new HistoryListAdapter(productItems, HistoryScanActivity.this, isLowBatteryMode);
binding.listHistoryScan.setAdapter(adapter);
binding.listHistoryScan.setLayoutManager(new LinearLayoutManager(HistoryScanActivity.this));
binding.historyProgressbar.setVisibility(View.GONE);
SwipeController swipeController = new SwipeController(HistoryScanActivity.this, HistoryScanActivity.this);
ItemTouchHelper itemTouchhelper = new ItemTouchHelper(swipeController);
itemTouchhelper.attachToRecyclerView(binding.listHistoryScan);
return Completable.complete();
};
return refreshAct.subscribeOn(AndroidSchedulers.mainThread()) // Change ui on main thread
.observeOn(Schedulers.io()) // Switch for db operations
.andThen(dbSingle)
.observeOn(AndroidSchedulers.mainThread()) // Change ui on main thread
.flatMapCompletable(updateUiFunc);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.