gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/pubsub/v1/pubsub.proto
package com.google.pubsub.v1;
/**
* <pre>
* Request for the ModifyPushConfig method.
* </pre>
*
* Protobuf type {@code google.pubsub.v1.ModifyPushConfigRequest}
*/
public final class ModifyPushConfigRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.pubsub.v1.ModifyPushConfigRequest)
ModifyPushConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ModifyPushConfigRequest.newBuilder() to construct.
private ModifyPushConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ModifyPushConfigRequest() {
subscription_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ModifyPushConfigRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
subscription_ = s;
break;
}
case 18: {
com.google.pubsub.v1.PushConfig.Builder subBuilder = null;
if (pushConfig_ != null) {
subBuilder = pushConfig_.toBuilder();
}
pushConfig_ = input.readMessage(com.google.pubsub.v1.PushConfig.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(pushConfig_);
pushConfig_ = subBuilder.buildPartial();
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.pubsub.v1.PubsubProto.internal_static_google_pubsub_v1_ModifyPushConfigRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.pubsub.v1.PubsubProto.internal_static_google_pubsub_v1_ModifyPushConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.pubsub.v1.ModifyPushConfigRequest.class, com.google.pubsub.v1.ModifyPushConfigRequest.Builder.class);
}
public static final int SUBSCRIPTION_FIELD_NUMBER = 1;
private volatile java.lang.Object subscription_;
/**
* <pre>
* The name of the subscription.
* Format is `projects/{project}/subscriptions/{sub}`.
* </pre>
*
* <code>string subscription = 1;</code>
*/
public java.lang.String getSubscription() {
java.lang.Object ref = subscription_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
subscription_ = s;
return s;
}
}
/**
* <pre>
* The name of the subscription.
* Format is `projects/{project}/subscriptions/{sub}`.
* </pre>
*
* <code>string subscription = 1;</code>
*/
public com.google.protobuf.ByteString
getSubscriptionBytes() {
java.lang.Object ref = subscription_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subscription_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PUSH_CONFIG_FIELD_NUMBER = 2;
private com.google.pubsub.v1.PushConfig pushConfig_;
/**
* <pre>
* The push configuration for future deliveries.
* An empty `pushConfig` indicates that the Pub/Sub system should
* stop pushing messages from the given subscription and allow
* messages to be pulled and acknowledged - effectively pausing
* the subscription if `Pull` is not called.
* </pre>
*
* <code>.google.pubsub.v1.PushConfig push_config = 2;</code>
*/
public boolean hasPushConfig() {
return pushConfig_ != null;
}
/**
* <pre>
* The push configuration for future deliveries.
* An empty `pushConfig` indicates that the Pub/Sub system should
* stop pushing messages from the given subscription and allow
* messages to be pulled and acknowledged - effectively pausing
* the subscription if `Pull` is not called.
* </pre>
*
* <code>.google.pubsub.v1.PushConfig push_config = 2;</code>
*/
public com.google.pubsub.v1.PushConfig getPushConfig() {
return pushConfig_ == null ? com.google.pubsub.v1.PushConfig.getDefaultInstance() : pushConfig_;
}
/**
* <pre>
* The push configuration for future deliveries.
* An empty `pushConfig` indicates that the Pub/Sub system should
* stop pushing messages from the given subscription and allow
* messages to be pulled and acknowledged - effectively pausing
* the subscription if `Pull` is not called.
* </pre>
*
* <code>.google.pubsub.v1.PushConfig push_config = 2;</code>
*/
public com.google.pubsub.v1.PushConfigOrBuilder getPushConfigOrBuilder() {
return getPushConfig();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getSubscriptionBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, subscription_);
}
if (pushConfig_ != null) {
output.writeMessage(2, getPushConfig());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getSubscriptionBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, subscription_);
}
if (pushConfig_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getPushConfig());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.pubsub.v1.ModifyPushConfigRequest)) {
return super.equals(obj);
}
com.google.pubsub.v1.ModifyPushConfigRequest other = (com.google.pubsub.v1.ModifyPushConfigRequest) obj;
boolean result = true;
result = result && getSubscription()
.equals(other.getSubscription());
result = result && (hasPushConfig() == other.hasPushConfig());
if (hasPushConfig()) {
result = result && getPushConfig()
.equals(other.getPushConfig());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SUBSCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getSubscription().hashCode();
if (hasPushConfig()) {
hash = (37 * hash) + PUSH_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getPushConfig().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.pubsub.v1.ModifyPushConfigRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.pubsub.v1.ModifyPushConfigRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.pubsub.v1.ModifyPushConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.pubsub.v1.ModifyPushConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.pubsub.v1.ModifyPushConfigRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.pubsub.v1.ModifyPushConfigRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.pubsub.v1.ModifyPushConfigRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.pubsub.v1.ModifyPushConfigRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.pubsub.v1.ModifyPushConfigRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.pubsub.v1.ModifyPushConfigRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.pubsub.v1.ModifyPushConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.pubsub.v1.ModifyPushConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.pubsub.v1.ModifyPushConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request for the ModifyPushConfig method.
* </pre>
*
* Protobuf type {@code google.pubsub.v1.ModifyPushConfigRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.pubsub.v1.ModifyPushConfigRequest)
com.google.pubsub.v1.ModifyPushConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.pubsub.v1.PubsubProto.internal_static_google_pubsub_v1_ModifyPushConfigRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.pubsub.v1.PubsubProto.internal_static_google_pubsub_v1_ModifyPushConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.pubsub.v1.ModifyPushConfigRequest.class, com.google.pubsub.v1.ModifyPushConfigRequest.Builder.class);
}
// Construct using com.google.pubsub.v1.ModifyPushConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
subscription_ = "";
if (pushConfigBuilder_ == null) {
pushConfig_ = null;
} else {
pushConfig_ = null;
pushConfigBuilder_ = null;
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.pubsub.v1.PubsubProto.internal_static_google_pubsub_v1_ModifyPushConfigRequest_descriptor;
}
public com.google.pubsub.v1.ModifyPushConfigRequest getDefaultInstanceForType() {
return com.google.pubsub.v1.ModifyPushConfigRequest.getDefaultInstance();
}
public com.google.pubsub.v1.ModifyPushConfigRequest build() {
com.google.pubsub.v1.ModifyPushConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.pubsub.v1.ModifyPushConfigRequest buildPartial() {
com.google.pubsub.v1.ModifyPushConfigRequest result = new com.google.pubsub.v1.ModifyPushConfigRequest(this);
result.subscription_ = subscription_;
if (pushConfigBuilder_ == null) {
result.pushConfig_ = pushConfig_;
} else {
result.pushConfig_ = pushConfigBuilder_.build();
}
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.pubsub.v1.ModifyPushConfigRequest) {
return mergeFrom((com.google.pubsub.v1.ModifyPushConfigRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.pubsub.v1.ModifyPushConfigRequest other) {
if (other == com.google.pubsub.v1.ModifyPushConfigRequest.getDefaultInstance()) return this;
if (!other.getSubscription().isEmpty()) {
subscription_ = other.subscription_;
onChanged();
}
if (other.hasPushConfig()) {
mergePushConfig(other.getPushConfig());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.pubsub.v1.ModifyPushConfigRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.pubsub.v1.ModifyPushConfigRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object subscription_ = "";
/**
* <pre>
* The name of the subscription.
* Format is `projects/{project}/subscriptions/{sub}`.
* </pre>
*
* <code>string subscription = 1;</code>
*/
public java.lang.String getSubscription() {
java.lang.Object ref = subscription_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
subscription_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The name of the subscription.
* Format is `projects/{project}/subscriptions/{sub}`.
* </pre>
*
* <code>string subscription = 1;</code>
*/
public com.google.protobuf.ByteString
getSubscriptionBytes() {
java.lang.Object ref = subscription_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subscription_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The name of the subscription.
* Format is `projects/{project}/subscriptions/{sub}`.
* </pre>
*
* <code>string subscription = 1;</code>
*/
public Builder setSubscription(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
subscription_ = value;
onChanged();
return this;
}
/**
* <pre>
* The name of the subscription.
* Format is `projects/{project}/subscriptions/{sub}`.
* </pre>
*
* <code>string subscription = 1;</code>
*/
public Builder clearSubscription() {
subscription_ = getDefaultInstance().getSubscription();
onChanged();
return this;
}
/**
* <pre>
* The name of the subscription.
* Format is `projects/{project}/subscriptions/{sub}`.
* </pre>
*
* <code>string subscription = 1;</code>
*/
public Builder setSubscriptionBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
subscription_ = value;
onChanged();
return this;
}
private com.google.pubsub.v1.PushConfig pushConfig_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.pubsub.v1.PushConfig, com.google.pubsub.v1.PushConfig.Builder, com.google.pubsub.v1.PushConfigOrBuilder> pushConfigBuilder_;
/**
* <pre>
* The push configuration for future deliveries.
* An empty `pushConfig` indicates that the Pub/Sub system should
* stop pushing messages from the given subscription and allow
* messages to be pulled and acknowledged - effectively pausing
* the subscription if `Pull` is not called.
* </pre>
*
* <code>.google.pubsub.v1.PushConfig push_config = 2;</code>
*/
public boolean hasPushConfig() {
return pushConfigBuilder_ != null || pushConfig_ != null;
}
/**
* <pre>
* The push configuration for future deliveries.
* An empty `pushConfig` indicates that the Pub/Sub system should
* stop pushing messages from the given subscription and allow
* messages to be pulled and acknowledged - effectively pausing
* the subscription if `Pull` is not called.
* </pre>
*
* <code>.google.pubsub.v1.PushConfig push_config = 2;</code>
*/
public com.google.pubsub.v1.PushConfig getPushConfig() {
if (pushConfigBuilder_ == null) {
return pushConfig_ == null ? com.google.pubsub.v1.PushConfig.getDefaultInstance() : pushConfig_;
} else {
return pushConfigBuilder_.getMessage();
}
}
/**
* <pre>
* The push configuration for future deliveries.
* An empty `pushConfig` indicates that the Pub/Sub system should
* stop pushing messages from the given subscription and allow
* messages to be pulled and acknowledged - effectively pausing
* the subscription if `Pull` is not called.
* </pre>
*
* <code>.google.pubsub.v1.PushConfig push_config = 2;</code>
*/
public Builder setPushConfig(com.google.pubsub.v1.PushConfig value) {
if (pushConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
pushConfig_ = value;
onChanged();
} else {
pushConfigBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* The push configuration for future deliveries.
* An empty `pushConfig` indicates that the Pub/Sub system should
* stop pushing messages from the given subscription and allow
* messages to be pulled and acknowledged - effectively pausing
* the subscription if `Pull` is not called.
* </pre>
*
* <code>.google.pubsub.v1.PushConfig push_config = 2;</code>
*/
public Builder setPushConfig(
com.google.pubsub.v1.PushConfig.Builder builderForValue) {
if (pushConfigBuilder_ == null) {
pushConfig_ = builderForValue.build();
onChanged();
} else {
pushConfigBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The push configuration for future deliveries.
* An empty `pushConfig` indicates that the Pub/Sub system should
* stop pushing messages from the given subscription and allow
* messages to be pulled and acknowledged - effectively pausing
* the subscription if `Pull` is not called.
* </pre>
*
* <code>.google.pubsub.v1.PushConfig push_config = 2;</code>
*/
public Builder mergePushConfig(com.google.pubsub.v1.PushConfig value) {
if (pushConfigBuilder_ == null) {
if (pushConfig_ != null) {
pushConfig_ =
com.google.pubsub.v1.PushConfig.newBuilder(pushConfig_).mergeFrom(value).buildPartial();
} else {
pushConfig_ = value;
}
onChanged();
} else {
pushConfigBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* The push configuration for future deliveries.
* An empty `pushConfig` indicates that the Pub/Sub system should
* stop pushing messages from the given subscription and allow
* messages to be pulled and acknowledged - effectively pausing
* the subscription if `Pull` is not called.
* </pre>
*
* <code>.google.pubsub.v1.PushConfig push_config = 2;</code>
*/
public Builder clearPushConfig() {
if (pushConfigBuilder_ == null) {
pushConfig_ = null;
onChanged();
} else {
pushConfig_ = null;
pushConfigBuilder_ = null;
}
return this;
}
/**
* <pre>
* The push configuration for future deliveries.
* An empty `pushConfig` indicates that the Pub/Sub system should
* stop pushing messages from the given subscription and allow
* messages to be pulled and acknowledged - effectively pausing
* the subscription if `Pull` is not called.
* </pre>
*
* <code>.google.pubsub.v1.PushConfig push_config = 2;</code>
*/
public com.google.pubsub.v1.PushConfig.Builder getPushConfigBuilder() {
onChanged();
return getPushConfigFieldBuilder().getBuilder();
}
/**
* <pre>
* The push configuration for future deliveries.
* An empty `pushConfig` indicates that the Pub/Sub system should
* stop pushing messages from the given subscription and allow
* messages to be pulled and acknowledged - effectively pausing
* the subscription if `Pull` is not called.
* </pre>
*
* <code>.google.pubsub.v1.PushConfig push_config = 2;</code>
*/
public com.google.pubsub.v1.PushConfigOrBuilder getPushConfigOrBuilder() {
if (pushConfigBuilder_ != null) {
return pushConfigBuilder_.getMessageOrBuilder();
} else {
return pushConfig_ == null ?
com.google.pubsub.v1.PushConfig.getDefaultInstance() : pushConfig_;
}
}
/**
* <pre>
* The push configuration for future deliveries.
* An empty `pushConfig` indicates that the Pub/Sub system should
* stop pushing messages from the given subscription and allow
* messages to be pulled and acknowledged - effectively pausing
* the subscription if `Pull` is not called.
* </pre>
*
* <code>.google.pubsub.v1.PushConfig push_config = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.pubsub.v1.PushConfig, com.google.pubsub.v1.PushConfig.Builder, com.google.pubsub.v1.PushConfigOrBuilder>
getPushConfigFieldBuilder() {
if (pushConfigBuilder_ == null) {
pushConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.pubsub.v1.PushConfig, com.google.pubsub.v1.PushConfig.Builder, com.google.pubsub.v1.PushConfigOrBuilder>(
getPushConfig(),
getParentForChildren(),
isClean());
pushConfig_ = null;
}
return pushConfigBuilder_;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.pubsub.v1.ModifyPushConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest)
private static final com.google.pubsub.v1.ModifyPushConfigRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.pubsub.v1.ModifyPushConfigRequest();
}
public static com.google.pubsub.v1.ModifyPushConfigRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ModifyPushConfigRequest>
PARSER = new com.google.protobuf.AbstractParser<ModifyPushConfigRequest>() {
public ModifyPushConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ModifyPushConfigRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ModifyPushConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ModifyPushConfigRequest> getParserForType() {
return PARSER;
}
public com.google.pubsub.v1.ModifyPushConfigRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package com.yanglin.model;
import java.util.Date;
public class SurveyInfo {
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.baoanhao
*
* @mbggenerated
*/
private String baoanhao;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.peifufangshi
*
* @mbggenerated
*/
private String peifufangshi;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.zerenxishu
*
* @mbggenerated
*/
private String zerenxishu;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.xieshangjiakoubili
*
* @mbggenerated
*/
private String xieshangjiakoubili;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.lipeileixing
*
* @mbggenerated
*/
private String lipeileixing;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.chuxianyuanyin
*
* @mbggenerated
*/
private String chuxianyuanyin;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.shiguleixing
*
* @mbggenerated
*/
private String shiguleixing;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.disanfang_cheliangshu
*
* @mbggenerated
*/
private String disanfangCheliangshu;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.chakan_daichakanren
*
* @mbggenerated
*/
private String chakanDaichakanren;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.chakan_jigoumingcheng
*
* @mbggenerated
*/
private String chakanJigoumingcheng;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.chakan_riqi
*
* @mbggenerated
*/
private Date chakanRiqi;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.chakan_didian
*
* @mbggenerated
*/
private String chakanDidian;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.chakan_leixing
*
* @mbggenerated
*/
private String chakanLeixing;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.shifou_yidongduanchuli
*
* @mbggenerated
*/
private String shifouYidongduanchuli;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.chakan_baogao
*
* @mbggenerated
*/
private String chakanBaogao;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.chepaihao
*
* @mbggenerated
*/
private String chepaihao;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.VIN
*
* @mbggenerated
*/
private String vin;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.dingsun_chexing
*
* @mbggenerated
*/
private String dingsunChexing;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.chezhu_xingming
*
* @mbggenerated
*/
private String chezhuXingming;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.zhengjian_leixing
*
* @mbggenerated
*/
private String zhengjianLeixing;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.chuxian_jiashiyuan
*
* @mbggenerated
*/
private String chuxianJiashiyuan;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.jiashizhenghao
*
* @mbggenerated
*/
private String jiashizhenghao;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.zhunjiachexing
*
* @mbggenerated
*/
private String zhunjiachexing;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.cheliangleibie
*
* @mbggenerated
*/
private String cheliangleibie;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.jiashiren_shifou_beibaoren
*
* @mbggenerated
*/
private String jiashirenShifouBeibaoren;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.xingshizheng_zhuceriqi
*
* @mbggenerated
*/
private Date xingshizhengZhuceriqi;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.xingshizheng_fazhengriqi
*
* @mbggenerated
*/
private Date xingshizhengFazhengriqi;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.shifou_baoan_chuxiandidian
*
* @mbggenerated
*/
private String shifouBaoanChuxiandidian;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column surveyinfo.chuxiandidian
*
* @mbggenerated
*/
private String chuxiandidian;
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.baoanhao
*
* @return the value of surveyinfo.baoanhao
*
* @mbggenerated
*/
public String getBaoanhao() {
return baoanhao;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.baoanhao
*
* @param baoanhao the value for surveyinfo.baoanhao
*
* @mbggenerated
*/
public void setBaoanhao(String baoanhao) {
this.baoanhao = baoanhao == null ? null : baoanhao.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.peifufangshi
*
* @return the value of surveyinfo.peifufangshi
*
* @mbggenerated
*/
public String getPeifufangshi() {
return peifufangshi;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.peifufangshi
*
* @param peifufangshi the value for surveyinfo.peifufangshi
*
* @mbggenerated
*/
public void setPeifufangshi(String peifufangshi) {
this.peifufangshi = peifufangshi == null ? null : peifufangshi.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.zerenxishu
*
* @return the value of surveyinfo.zerenxishu
*
* @mbggenerated
*/
public String getZerenxishu() {
return zerenxishu;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.zerenxishu
*
* @param zerenxishu the value for surveyinfo.zerenxishu
*
* @mbggenerated
*/
public void setZerenxishu(String zerenxishu) {
this.zerenxishu = zerenxishu == null ? null : zerenxishu.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.xieshangjiakoubili
*
* @return the value of surveyinfo.xieshangjiakoubili
*
* @mbggenerated
*/
public String getXieshangjiakoubili() {
return xieshangjiakoubili;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.xieshangjiakoubili
*
* @param xieshangjiakoubili the value for surveyinfo.xieshangjiakoubili
*
* @mbggenerated
*/
public void setXieshangjiakoubili(String xieshangjiakoubili) {
this.xieshangjiakoubili = xieshangjiakoubili == null ? null : xieshangjiakoubili.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.lipeileixing
*
* @return the value of surveyinfo.lipeileixing
*
* @mbggenerated
*/
public String getLipeileixing() {
return lipeileixing;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.lipeileixing
*
* @param lipeileixing the value for surveyinfo.lipeileixing
*
* @mbggenerated
*/
public void setLipeileixing(String lipeileixing) {
this.lipeileixing = lipeileixing == null ? null : lipeileixing.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.chuxianyuanyin
*
* @return the value of surveyinfo.chuxianyuanyin
*
* @mbggenerated
*/
public String getChuxianyuanyin() {
return chuxianyuanyin;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.chuxianyuanyin
*
* @param chuxianyuanyin the value for surveyinfo.chuxianyuanyin
*
* @mbggenerated
*/
public void setChuxianyuanyin(String chuxianyuanyin) {
this.chuxianyuanyin = chuxianyuanyin == null ? null : chuxianyuanyin.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.shiguleixing
*
* @return the value of surveyinfo.shiguleixing
*
* @mbggenerated
*/
public String getShiguleixing() {
return shiguleixing;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.shiguleixing
*
* @param shiguleixing the value for surveyinfo.shiguleixing
*
* @mbggenerated
*/
public void setShiguleixing(String shiguleixing) {
this.shiguleixing = shiguleixing == null ? null : shiguleixing.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.disanfang_cheliangshu
*
* @return the value of surveyinfo.disanfang_cheliangshu
*
* @mbggenerated
*/
public String getDisanfangCheliangshu() {
return disanfangCheliangshu;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.disanfang_cheliangshu
*
* @param disanfangCheliangshu the value for surveyinfo.disanfang_cheliangshu
*
* @mbggenerated
*/
public void setDisanfangCheliangshu(String disanfangCheliangshu) {
this.disanfangCheliangshu = disanfangCheliangshu == null ? null : disanfangCheliangshu.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.chakan_daichakanren
*
* @return the value of surveyinfo.chakan_daichakanren
*
* @mbggenerated
*/
public String getChakanDaichakanren() {
return chakanDaichakanren;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.chakan_daichakanren
*
* @param chakanDaichakanren the value for surveyinfo.chakan_daichakanren
*
* @mbggenerated
*/
public void setChakanDaichakanren(String chakanDaichakanren) {
this.chakanDaichakanren = chakanDaichakanren == null ? null : chakanDaichakanren.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.chakan_jigoumingcheng
*
* @return the value of surveyinfo.chakan_jigoumingcheng
*
* @mbggenerated
*/
public String getChakanJigoumingcheng() {
return chakanJigoumingcheng;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.chakan_jigoumingcheng
*
* @param chakanJigoumingcheng the value for surveyinfo.chakan_jigoumingcheng
*
* @mbggenerated
*/
public void setChakanJigoumingcheng(String chakanJigoumingcheng) {
this.chakanJigoumingcheng = chakanJigoumingcheng == null ? null : chakanJigoumingcheng.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.chakan_riqi
*
* @return the value of surveyinfo.chakan_riqi
*
* @mbggenerated
*/
public Date getChakanRiqi() {
return chakanRiqi;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.chakan_riqi
*
* @param chakanRiqi the value for surveyinfo.chakan_riqi
*
* @mbggenerated
*/
public void setChakanRiqi(Date chakanRiqi) {
this.chakanRiqi = chakanRiqi;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.chakan_didian
*
* @return the value of surveyinfo.chakan_didian
*
* @mbggenerated
*/
public String getChakanDidian() {
return chakanDidian;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.chakan_didian
*
* @param chakanDidian the value for surveyinfo.chakan_didian
*
* @mbggenerated
*/
public void setChakanDidian(String chakanDidian) {
this.chakanDidian = chakanDidian == null ? null : chakanDidian.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.chakan_leixing
*
* @return the value of surveyinfo.chakan_leixing
*
* @mbggenerated
*/
public String getChakanLeixing() {
return chakanLeixing;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.chakan_leixing
*
* @param chakanLeixing the value for surveyinfo.chakan_leixing
*
* @mbggenerated
*/
public void setChakanLeixing(String chakanLeixing) {
this.chakanLeixing = chakanLeixing == null ? null : chakanLeixing.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.shifou_yidongduanchuli
*
* @return the value of surveyinfo.shifou_yidongduanchuli
*
* @mbggenerated
*/
public String getShifouYidongduanchuli() {
return shifouYidongduanchuli;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.shifou_yidongduanchuli
*
* @param shifouYidongduanchuli the value for surveyinfo.shifou_yidongduanchuli
*
* @mbggenerated
*/
public void setShifouYidongduanchuli(String shifouYidongduanchuli) {
this.shifouYidongduanchuli = shifouYidongduanchuli == null ? null : shifouYidongduanchuli.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.chakan_baogao
*
* @return the value of surveyinfo.chakan_baogao
*
* @mbggenerated
*/
public String getChakanBaogao() {
return chakanBaogao;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.chakan_baogao
*
* @param chakanBaogao the value for surveyinfo.chakan_baogao
*
* @mbggenerated
*/
public void setChakanBaogao(String chakanBaogao) {
this.chakanBaogao = chakanBaogao == null ? null : chakanBaogao.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.chepaihao
*
* @return the value of surveyinfo.chepaihao
*
* @mbggenerated
*/
public String getChepaihao() {
return chepaihao;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.chepaihao
*
* @param chepaihao the value for surveyinfo.chepaihao
*
* @mbggenerated
*/
public void setChepaihao(String chepaihao) {
this.chepaihao = chepaihao == null ? null : chepaihao.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.VIN
*
* @return the value of surveyinfo.VIN
*
* @mbggenerated
*/
public String getVin() {
return vin;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.VIN
*
* @param vin the value for surveyinfo.VIN
*
* @mbggenerated
*/
public void setVin(String vin) {
this.vin = vin == null ? null : vin.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.dingsun_chexing
*
* @return the value of surveyinfo.dingsun_chexing
*
* @mbggenerated
*/
public String getDingsunChexing() {
return dingsunChexing;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.dingsun_chexing
*
* @param dingsunChexing the value for surveyinfo.dingsun_chexing
*
* @mbggenerated
*/
public void setDingsunChexing(String dingsunChexing) {
this.dingsunChexing = dingsunChexing == null ? null : dingsunChexing.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.chezhu_xingming
*
* @return the value of surveyinfo.chezhu_xingming
*
* @mbggenerated
*/
public String getChezhuXingming() {
return chezhuXingming;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.chezhu_xingming
*
* @param chezhuXingming the value for surveyinfo.chezhu_xingming
*
* @mbggenerated
*/
public void setChezhuXingming(String chezhuXingming) {
this.chezhuXingming = chezhuXingming == null ? null : chezhuXingming.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.zhengjian_leixing
*
* @return the value of surveyinfo.zhengjian_leixing
*
* @mbggenerated
*/
public String getZhengjianLeixing() {
return zhengjianLeixing;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.zhengjian_leixing
*
* @param zhengjianLeixing the value for surveyinfo.zhengjian_leixing
*
* @mbggenerated
*/
public void setZhengjianLeixing(String zhengjianLeixing) {
this.zhengjianLeixing = zhengjianLeixing == null ? null : zhengjianLeixing.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.chuxian_jiashiyuan
*
* @return the value of surveyinfo.chuxian_jiashiyuan
*
* @mbggenerated
*/
public String getChuxianJiashiyuan() {
return chuxianJiashiyuan;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.chuxian_jiashiyuan
*
* @param chuxianJiashiyuan the value for surveyinfo.chuxian_jiashiyuan
*
* @mbggenerated
*/
public void setChuxianJiashiyuan(String chuxianJiashiyuan) {
this.chuxianJiashiyuan = chuxianJiashiyuan == null ? null : chuxianJiashiyuan.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.jiashizhenghao
*
* @return the value of surveyinfo.jiashizhenghao
*
* @mbggenerated
*/
public String getJiashizhenghao() {
return jiashizhenghao;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.jiashizhenghao
*
* @param jiashizhenghao the value for surveyinfo.jiashizhenghao
*
* @mbggenerated
*/
public void setJiashizhenghao(String jiashizhenghao) {
this.jiashizhenghao = jiashizhenghao == null ? null : jiashizhenghao.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.zhunjiachexing
*
* @return the value of surveyinfo.zhunjiachexing
*
* @mbggenerated
*/
public String getZhunjiachexing() {
return zhunjiachexing;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.zhunjiachexing
*
* @param zhunjiachexing the value for surveyinfo.zhunjiachexing
*
* @mbggenerated
*/
public void setZhunjiachexing(String zhunjiachexing) {
this.zhunjiachexing = zhunjiachexing == null ? null : zhunjiachexing.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.cheliangleibie
*
* @return the value of surveyinfo.cheliangleibie
*
* @mbggenerated
*/
public String getCheliangleibie() {
return cheliangleibie;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.cheliangleibie
*
* @param cheliangleibie the value for surveyinfo.cheliangleibie
*
* @mbggenerated
*/
public void setCheliangleibie(String cheliangleibie) {
this.cheliangleibie = cheliangleibie == null ? null : cheliangleibie.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.jiashiren_shifou_beibaoren
*
* @return the value of surveyinfo.jiashiren_shifou_beibaoren
*
* @mbggenerated
*/
public String getJiashirenShifouBeibaoren() {
return jiashirenShifouBeibaoren;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.jiashiren_shifou_beibaoren
*
* @param jiashirenShifouBeibaoren the value for surveyinfo.jiashiren_shifou_beibaoren
*
* @mbggenerated
*/
public void setJiashirenShifouBeibaoren(String jiashirenShifouBeibaoren) {
this.jiashirenShifouBeibaoren = jiashirenShifouBeibaoren == null ? null : jiashirenShifouBeibaoren.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.xingshizheng_zhuceriqi
*
* @return the value of surveyinfo.xingshizheng_zhuceriqi
*
* @mbggenerated
*/
public Date getXingshizhengZhuceriqi() {
return xingshizhengZhuceriqi;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.xingshizheng_zhuceriqi
*
* @param xingshizhengZhuceriqi the value for surveyinfo.xingshizheng_zhuceriqi
*
* @mbggenerated
*/
public void setXingshizhengZhuceriqi(Date xingshizhengZhuceriqi) {
this.xingshizhengZhuceriqi = xingshizhengZhuceriqi;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.xingshizheng_fazhengriqi
*
* @return the value of surveyinfo.xingshizheng_fazhengriqi
*
* @mbggenerated
*/
public Date getXingshizhengFazhengriqi() {
return xingshizhengFazhengriqi;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.xingshizheng_fazhengriqi
*
* @param xingshizhengFazhengriqi the value for surveyinfo.xingshizheng_fazhengriqi
*
* @mbggenerated
*/
public void setXingshizhengFazhengriqi(Date xingshizhengFazhengriqi) {
this.xingshizhengFazhengriqi = xingshizhengFazhengriqi;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.shifou_baoan_chuxiandidian
*
* @return the value of surveyinfo.shifou_baoan_chuxiandidian
*
* @mbggenerated
*/
public String getShifouBaoanChuxiandidian() {
return shifouBaoanChuxiandidian;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.shifou_baoan_chuxiandidian
*
* @param shifouBaoanChuxiandidian the value for surveyinfo.shifou_baoan_chuxiandidian
*
* @mbggenerated
*/
public void setShifouBaoanChuxiandidian(String shifouBaoanChuxiandidian) {
this.shifouBaoanChuxiandidian = shifouBaoanChuxiandidian == null ? null : shifouBaoanChuxiandidian.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column surveyinfo.chuxiandidian
*
* @return the value of surveyinfo.chuxiandidian
*
* @mbggenerated
*/
public String getChuxiandidian() {
return chuxiandidian;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column surveyinfo.chuxiandidian
*
* @param chuxiandidian the value for surveyinfo.chuxiandidian
*
* @mbggenerated
*/
public void setChuxiandidian(String chuxiandidian) {
this.chuxiandidian = chuxiandidian == null ? null : chuxiandidian.trim();
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.psi;
import static com.jetbrains.python.psi.PyUtil.as;
import java.util.Collections;
import java.util.List;
import javax.annotation.Nonnull;
import org.jetbrains.annotations.NonNls;
import javax.annotation.Nullable;
import com.google.common.collect.Iterables;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.LineTokenizer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiWhiteSpace;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.containers.ContainerUtil;
import com.jetbrains.python.PythonFileType;
/**
* Contains various methods for manipulation on indentation found in arbitrary text and individual lines:
* <ul>
* <li>calculating actual and expected indentation</li>
* <li>finding common indentation of several lines</li>
* <li>replacing and removing indentation of multiple lines</li>
* </ul>
* <p>
* It indented to be used primarily when one needs to modify content of Python files on document level and preserve valid block structure.
* Note that in most scenarios accurate indentation consistent with the code style settings is provided by automatic formatting pass
* that is performed each time you modify PSI tree directly.
*
* @author Mikhail Golubev
*/
public class PyIndentUtil
{
@NonNls
public static final String TWO_SPACES = " ";
@NonNls
public static final String FOUR_SPACES = " ";
private PyIndentUtil()
{
}
/**
* Returns indentation size as number of characters <tt>' '</tt> and <tt>'\t'</tt> in the beginning of a line.
* It doesn't perform any expansion of tabs.
*/
public static int getLineIndentSize(@Nonnull CharSequence line)
{
int stop;
for(stop = 0; stop < line.length(); stop++)
{
final char c = line.charAt(stop);
if(!(c == ' ' || c == '\t'))
{
break;
}
}
return stop;
}
@Nonnull
public static String getLineIndent(@Nonnull String line)
{
return line.substring(0, getLineIndentSize(line));
}
/**
* Useful version of {@link #getLineIndent(String)} for custom character sequences like {@link com.jetbrains.python.toolbox.Substring}.
*/
@Nonnull
public static CharSequence getLineIndent(@Nonnull CharSequence line)
{
return line.subSequence(0, getLineIndentSize(line));
}
@Nonnull
public static String getElementIndent(@Nonnull PsiElement anchor)
{
if(anchor instanceof PsiFile)
{
return "";
}
final PyStatementList statementList = getAnchorStatementList(anchor);
if(statementList == null)
{
return "";
}
final PsiElement prevSibling = statementList.getPrevSibling();
final String whitespace = prevSibling instanceof PsiWhiteSpace ? prevSibling.getText() : "";
final int i = whitespace.lastIndexOf("\n");
if(i >= 0 && statementList.getStatements().length != 0)
{
return whitespace.substring(i + 1);
}
else
{
return getExpectedBlockIndent(statementList);
}
}
@Nonnull
private static String getExpectedBlockIndent(@Nonnull PyStatementList anchor)
{
final String indentStep = getIndentFromSettings(anchor.getProject());
final PyStatementList parentBlock = PsiTreeUtil.getParentOfType(anchor, PyStatementList.class, true);
if(parentBlock != null)
{
return getElementIndent(parentBlock) + indentStep;
}
return indentStep;
}
@Nullable
private static PyStatementList getAnchorStatementList(@Nonnull PsiElement element)
{
PyStatementList statementList = null;
// First whitespace right before the statement list (right after ":")
if(element instanceof PsiWhiteSpace)
{
statementList = as(element.getNextSibling(), PyStatementList.class);
}
if(statementList == null)
{
statementList = PsiTreeUtil.getParentOfType(element, PyStatementList.class, false);
}
return statementList;
}
private static int getExpectedElementIndentSize(@Nonnull PsiElement anchor)
{
int depth = 0;
PyStatementList block = getAnchorStatementList(anchor);
while(block != null)
{
depth += 1;
block = PsiTreeUtil.getParentOfType(block, PyStatementList.class);
}
return depth * getIndentSizeFromSettings(anchor.getProject());
}
public static boolean areTabsUsedForIndentation(@Nonnull Project project)
{
final CodeStyleSettings codeStyleSettings = CodeStyleSettingsManager.getInstance(project).getCurrentSettings();
return codeStyleSettings.useTabCharacter(PythonFileType.INSTANCE);
}
public static char getIndentCharacter(@Nonnull Project project)
{
return areTabsUsedForIndentation(project) ? '\t' : ' ';
}
/**
* Returns indentation size configured in the Python code style settings.
*
* @see #getIndentFromSettings(Project)
*/
public static int getIndentSizeFromSettings(@Nonnull Project project)
{
final CodeStyleSettings codeStyleSettings = CodeStyleSettingsManager.getInstance(project).getCurrentSettings();
final CodeStyleSettings.IndentOptions indentOptions = codeStyleSettings.getIndentOptions(PythonFileType.INSTANCE);
return indentOptions.INDENT_SIZE;
}
/**
* Returns indentation configured in the Python code style settings either as space character repeated number times specified there
* or a single tab character if tabs are set to use for indentation.
*
* @see #getIndentSizeFromSettings(Project)
* @see #areTabsUsedForIndentation(Project)
*/
@Nonnull
public static String getIndentFromSettings(@Nonnull Project project)
{
final boolean useTabs = areTabsUsedForIndentation(project);
return useTabs ? "\t" : StringUtil.repeatSymbol(' ', getIndentSizeFromSettings(project));
}
@Nonnull
public static List<String> removeCommonIndent(@Nonnull Iterable<String> lines, boolean ignoreFirstLine)
{
return changeIndent(lines, ignoreFirstLine, "");
}
@Nonnull
public static String removeCommonIndent(@Nonnull String s, boolean ignoreFirstLine)
{
final List<String> trimmed = removeCommonIndent(LineTokenizer.tokenizeIntoList(s, false, false), ignoreFirstLine);
return StringUtil.join(trimmed, "\n");
}
@Nonnull
public static String changeIndent(@Nonnull String s, boolean ignoreFirstLine, String newIndent)
{
final List<String> trimmed = changeIndent(LineTokenizer.tokenizeIntoList(s, false, false), ignoreFirstLine, newIndent);
return StringUtil.join(trimmed, "\n");
}
/**
* Note that all empty lines will be trimmed regardless of their actual indentation.
*/
@Nonnull
public static List<String> changeIndent(@Nonnull Iterable<String> lines, boolean ignoreFirstLine, final String newIndent)
{
final String oldIndent = findCommonIndent(lines, ignoreFirstLine);
if(Iterables.isEmpty(lines))
{
return Collections.emptyList();
}
final List<String> result = ContainerUtil.map(Iterables.skip(lines, ignoreFirstLine ? 1 : 0), line -> {
if(StringUtil.isEmptyOrSpaces(line))
{
return "";
}
else
{
return newIndent + line.substring(oldIndent.length());
}
});
if(ignoreFirstLine)
{
return ContainerUtil.prepend(result, Iterables.get(lines, 0));
}
return result;
}
/**
* Finds maximum common indentation of the given lines. Indentation of empty lines and lines containing only whitespaces is ignored unless
* they're the only lines provided. In the latter case common indentation for such lines is returned. If mix of tabs and spaces was used
* for indentation and any two of lines taken into account contain incompatible combination of these symbols, i.e. it's impossible to
* decide which one can be used as prefix for another, empty string is returned.
*
* @param ignoreFirstLine whether the first line should be considered (useful for multiline string literals)
*/
@Nonnull
public static String findCommonIndent(@Nonnull Iterable<String> lines, boolean ignoreFirstLine)
{
String minIndent = null;
boolean allLinesEmpty = true;
if(Iterables.isEmpty(lines))
{
return "";
}
boolean hasBadEmptyLineIndent = false;
for(String line : Iterables.skip(lines, ignoreFirstLine ? 1 : 0))
{
final boolean lineEmpty = StringUtil.isEmptyOrSpaces(line);
if(lineEmpty && !allLinesEmpty)
{
continue;
}
final String indent = getLineIndent(line);
if(minIndent == null || (!lineEmpty && allLinesEmpty) || minIndent.startsWith(indent))
{
minIndent = indent;
}
else if(!indent.startsWith(minIndent))
{
if(lineEmpty)
{
hasBadEmptyLineIndent = true;
}
else
{
return "";
}
}
allLinesEmpty &= lineEmpty;
}
if(allLinesEmpty && hasBadEmptyLineIndent)
{
return "";
}
return StringUtil.notNullize(minIndent);
}
@Nonnull
public static String getLineIndent(@Nonnull Document document, int lineNumber)
{
final TextRange lineRange = TextRange.create(document.getLineStartOffset(lineNumber), document.getLineEndOffset(lineNumber));
final String line = document.getText(lineRange);
return getLineIndent(line);
}
}
| |
/*
* Copyright 2018 Gabor Varadi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zhuinden.simplestack;
import android.app.Activity;
import android.os.Parcel;
import com.zhuinden.simplestack.helpers.HasServices;
import com.zhuinden.simplestack.helpers.ServiceProvider;
import com.zhuinden.simplestack.helpers.TestKey;
import com.zhuinden.simplestack.helpers.TestKeyWithOnlyParentServices;
import com.zhuinden.statebundle.StateBundle;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import static org.assertj.core.api.Assertions.assertThat;
public class ScopingTest {
private static final Map<String, Object> services = new LinkedHashMap<>();
private static final String SERVICE_TAG = "service";
static {
services.put(SERVICE_TAG, new Service());
}
private static class Service
implements Bundleable, ScopedServices.Registered {
int blah = 2;
boolean didServiceRegister = false;
boolean didServiceUnregister = false;
@Nonnull
@Override
public StateBundle toBundle() {
StateBundle stateBundle = new StateBundle();
stateBundle.putInt("blah", 5);
return stateBundle;
}
@Override
public void fromBundle(@Nullable StateBundle bundle) {
if(bundle != null) {
blah = bundle.getInt("blah");
}
}
@Override
public void onServiceRegistered() {
didServiceRegister = true;
}
@Override
public void onServiceUnregistered() {
didServiceUnregister = true;
}
}
StateChanger stateChanger = new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
completionCallback.stateChangeComplete();
}
};
private static class TestKeyWithScope
extends TestKey
implements ScopeKey, HasServices {
TestKeyWithScope(String name) {
super(name);
}
protected TestKeyWithScope(Parcel in) {
super(in);
}
@Nonnull
@Override
public String getScopeTag() {
return name;
}
@Override
public void bindServices(ServiceBinder serviceBinder) {
serviceBinder.addService(SERVICE_TAG, services.get(SERVICE_TAG));
}
public static final Creator<TestKeyWithScope> CREATOR = new Creator<TestKeyWithScope>() {
@Override
public TestKeyWithScope createFromParcel(Parcel in) {
return new TestKeyWithScope(in);
}
@Override
public TestKeyWithScope[] newArray(int size) {
return new TestKeyWithScope[size];
}
};
}
private TestKey testKey1 = new TestKey("hello");
private TestKeyWithScope testKey2 = new TestKeyWithScope("world");
private TestKey testKey3 = new TestKey("!");
@Test
public void scopedServicesShouldNotBeNull() {
Backstack backstack = new Backstack();
try {
backstack.setScopedServices(null);
Assert.fail();
} catch(IllegalArgumentException e) {
// OK!
}
}
@Test
public void scopedServicesCanBeSetBeforeInitialStateChange() {
Backstack backstack = new Backstack();
backstack.setup(History.of(testKey1));
backstack.setScopedServices(new ServiceProvider());
backstack.setStateChanger(new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
completionCallback.stateChangeComplete();
}
});
try {
backstack.setScopedServices(new ServiceProvider());
Assert.fail();
} catch(IllegalStateException e) {
// OK!
}
}
@Test
public void serviceProviderCanBeSetWorksCorrectly() {
Backstack backstack = new Backstack();
assertThat(backstack.canSetScopeProviders()).isTrue();
backstack.setup(History.of(testKey1));
assertThat(backstack.canSetScopeProviders()).isTrue();
backstack.setScopedServices(new ServiceProvider());
backstack.setGlobalServices(GlobalServices.builder().build());
assertThat(backstack.canSetScopeProviders()).isTrue();
backstack.setStateChanger(new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
completionCallback.stateChangeComplete();
}
});
assertThat(backstack.canSetScopeProviders()).isFalse();
try {
backstack.setScopedServices(new ServiceProvider());
Assert.fail();
} catch(IllegalStateException e) {
// OK!
}
}
@Test
public void scopedServicesThrowIfNoScopedServicesAreDefinedAndServicesAreToBeBound() {
Backstack backstack = new Backstack();
backstack.setup(History.of(testKey2));
try {
backstack.setStateChanger(stateChanger);
Assert.fail();
} catch(IllegalStateException e) {
assertThat(e.getMessage()).contains("scoped services");
}
}
@Test
public void scopeIsCreatedForScopeKeys() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
backstack.setup(History.single(testKey2));
assertThat(backstack.hasService(testKey2, SERVICE_TAG)).isFalse();
backstack.setStateChanger(stateChanger);
assertThat(backstack.hasService(testKey2, SERVICE_TAG)).isTrue();
Service service = backstack.getService(testKey2, SERVICE_TAG);
assertThat(service).isSameAs(services.get(SERVICE_TAG));
}
@Test
public void gettingNonExistentServiceThrows() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
backstack.setup(History.single(testKey2));
assertThat(backstack.hasService(testKey2, SERVICE_TAG)).isFalse();
backstack.setStateChanger(stateChanger);
assertThat(backstack.hasService(testKey2, SERVICE_TAG)).isTrue();
try {
backstack.getService(testKey2, "d'oh");
Assert.fail();
} catch(IllegalArgumentException e) {
// OK!
}
}
@Test
public void serviceBinderAddThrowsForNullServiceTag() {
final String nullTag = null;
final Object service = new Service();
TestKeyWithScope testKeyWithScope = new TestKeyWithScope("blah") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(nullTag, service);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
backstack.setup(History.of(testKeyWithScope));
try {
backstack.setStateChanger(stateChanger);
Assert.fail();
} catch(Exception e) {
assertThat(e.getMessage()).isEqualTo("serviceTag cannot be null!");
}
}
@Test
public void serviceBinderThrowsIfRootBackstackIsAService() {
final String serviceTag = "backstack";
final Backstack backstack = new Backstack();
TestKeyWithScope testKeyWithScope = new TestKeyWithScope("blah") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(serviceTag, backstack);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
backstack.setScopedServices(new ServiceProvider());
backstack.setup(History.of(testKeyWithScope));
try {
backstack.setStateChanger(stateChanger);
Assert.fail("This would cause a save-state loop in toBundle()");
} catch(IllegalArgumentException e) {
// OK!
}
}
@Test
public void serviceBinderSucceedsIfRootBackstackIsAnAlias() {
final String serviceTag = "backstack";
final Backstack backstack = new Backstack();
TestKeyWithScope testKeyWithScope = new TestKeyWithScope("blah") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addAlias(serviceTag, backstack);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
backstack.setScopedServices(new ServiceProvider());
backstack.setup(History.of(testKeyWithScope));
backstack.setStateChanger(stateChanger);
assertThat(backstack.lookupService(serviceTag)).isSameAs(backstack);
}
@Test
public void serviceBinderAddThrowsForNullService() {
final String serviceTag = "serviceTag";
final Object nullService = null;
TestKeyWithScope testKeyWithScope = new TestKeyWithScope("blah") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(serviceTag, null);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
backstack.setup(History.of(testKeyWithScope));
try {
backstack.setStateChanger(stateChanger);
Assert.fail();
} catch(Exception e) {
assertThat(e.getMessage()).isEqualTo("service cannot be null!");
}
}
@Test
public void scopeIsDestroyedForClearedScopeKeys() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
backstack.setup(History.single(testKey2));
assertThat(backstack.hasService(testKey2, SERVICE_TAG)).isFalse();
backstack.setStateChanger(stateChanger);
assertThat(backstack.hasService(testKey2, SERVICE_TAG)).isTrue();
backstack.setHistory(History.of(testKey1), StateChange.REPLACE);
assertThat(backstack.hasService(testKey2, SERVICE_TAG)).isFalse();
}
@Test
public void scopeServicesArePersistedToStateBundle() {
final Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final Service service = new Service();
TestKeyWithScope testKeyWithScope = new TestKeyWithScope("blah") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(SERVICE_TAG, service);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
backstack.setup(History.of(testKeyWithScope));
backstack.setStateChanger(new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
completionCallback.stateChangeComplete();
}
});
assertThat(backstack.hasService(testKeyWithScope.getScopeTag(), SERVICE_TAG)).isTrue();
StateBundle stateBundle = backstack.toBundle();
//noinspection ConstantConditions
assertThat(stateBundle.getBundle(Backstack.getScopesTag()).getBundle(testKeyWithScope.getScopeTag()).getBundle(SERVICE_TAG).getInt("blah")).isEqualTo(5); // backstack.getScopesTag() is internal
}
@Test
public void scopeServicesArePersistedToStateBundleDelayedScopedServicesCall() {
final Backstack backstack = new Backstack();
final Service service = new Service();
TestKeyWithScope testKeyWithScope = new TestKeyWithScope("blah") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(SERVICE_TAG, service);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
backstack.setup(History.of(testKeyWithScope));
backstack.setScopedServices(new ServiceProvider()); // !
backstack.setStateChanger(new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
completionCallback.stateChangeComplete();
}
});
assertThat(backstack.hasService(testKeyWithScope.getScopeTag(), SERVICE_TAG)).isTrue();
StateBundle stateBundle = backstack.toBundle();
//noinspection ConstantConditions
assertThat(stateBundle.getBundle(Backstack.getScopesTag()).getBundle(testKeyWithScope.getScopeTag()).getBundle(SERVICE_TAG).getInt("blah")).isEqualTo(5); // backstack.getScopesTag() is internal
}
@Test
public void persistedStateOfScopedServicesIsRestored() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
backstack.setup(History.of(testKey2));
StateChanger stateChanger = new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
completionCallback.stateChangeComplete();
}
};
backstack.setStateChanger(stateChanger);
assertThat(backstack.hasService(testKey2.getScopeTag(), SERVICE_TAG)).isTrue();
StateBundle stateBundle = backstack.toBundle();
Backstack backstack2 = new Backstack();
backstack2.setScopedServices(new ServiceProvider());
StateChanger stateChanger2 = new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
completionCallback.stateChangeComplete();
}
};
backstack2.setup(History.of(testKey2));
backstack2.fromBundle(stateBundle);
backstack2.setStateChanger(stateChanger2);
assertThat(backstack2.<Service>getService(testKey2.getScopeTag(), SERVICE_TAG).blah).isEqualTo(5);
}
@Test
public void nonExistentServiceShouldReturnFalseAndThrow() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
backstack.setup(History.of(testKey2));
assertThat(backstack.hasService(testKey2, SERVICE_TAG)).isFalse();
try {
backstack.getService(testKey2, SERVICE_TAG);
Assert.fail();
} catch(IllegalArgumentException e) {
// OK!
}
}
@Test
public void scopedServiceCallbackIsCalledCorrectly() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final Service service = new Service();
TestKeyWithScope testKeyWithScope = new TestKeyWithScope("blah") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(SERVICE_TAG, service);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
backstack.setup(History.of(testKeyWithScope));
assertThat(service.didServiceRegister).isFalse();
assertThat(service.didServiceUnregister).isFalse();
backstack.setStateChanger(stateChanger);
assertThat(service.didServiceRegister).isTrue();
assertThat(service.didServiceUnregister).isFalse();
backstack.setHistory(History.single(testKey1), StateChange.REPLACE);
assertThat(service.didServiceRegister).isTrue();
assertThat(service.didServiceUnregister).isTrue();
}
@Test
public void scopesAreFinalizedWhenActivityIsFinishing() {
Activity activity = Mockito.mock(Activity.class);
Mockito.when(activity.isFinishing()).thenReturn(true);
final Service service = new Service();
TestKeyWithScope testKeyWithScope = new TestKeyWithScope("blah") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(SERVICE_TAG, service);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
StateChanger stateChanger = new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
completionCallback.stateChangeComplete();
}
};
BackstackDelegate backstackDelegate = new BackstackDelegate();
backstackDelegate.setScopedServices(activity, new ServiceProvider());
backstackDelegate.onCreate(null, null, History.of(testKeyWithScope));
backstackDelegate.setStateChanger(stateChanger);
backstackDelegate.onPostResume();
backstackDelegate.onPause();
assertThat(backstackDelegate.hasScope("beep")).isTrue();
assertThat(backstackDelegate.hasService(testKeyWithScope, SERVICE_TAG)).isTrue();
assertThat(service.didServiceRegister).isTrue();
assertThat(service.didServiceUnregister).isFalse();
backstackDelegate.onDestroy();
assertThat(backstackDelegate.hasScope("beep")).isFalse();
assertThat(backstackDelegate.hasService(testKeyWithScope, SERVICE_TAG)).isFalse();
assertThat(service.didServiceRegister).isTrue();
assertThat(service.didServiceUnregister).isTrue();
}
@Test
public void lookupServiceNoOverlapsWorks() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final Service service = new Service();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(SERVICE_TAG, service);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKeyWithScope boop = new TestKeyWithScope("boop") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
}
@Nonnull
@Override
public String getScopeTag() {
return "boop";
}
};
backstack.setup(History.of(beep, boop));
assertThat(backstack.hasService("beep", SERVICE_TAG)).isFalse();
backstack.setStateChanger(stateChanger);
assertThat(backstack.hasService("beep", SERVICE_TAG)).isTrue();
assertThat(backstack.hasService("boop", SERVICE_TAG)).isFalse();
assertThat(backstack.<Object>lookupService(SERVICE_TAG)).isSameAs(service);
backstack.goBack();
assertThat(backstack.hasService("beep", SERVICE_TAG)).isTrue();
assertThat(backstack.<Object>lookupService(SERVICE_TAG)).isSameAs(service);
backstack.setHistory(History.single(testKey1), StateChange.REPLACE);
assertThat(backstack.hasService("beep", SERVICE_TAG)).isFalse();
try {
backstack.lookupService(SERVICE_TAG);
Assert.fail();
} catch(IllegalStateException e) {
assertThat(e.getMessage()).contains("does not exist in any scope");
// OK!
}
}
@Test
public void lookupServiceWithOverlapsWorks() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final Service service1 = new Service();
final Service service2 = new Service();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(SERVICE_TAG, service1);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKeyWithScope boop = new TestKeyWithScope("boop") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(SERVICE_TAG, service2);
}
@Nonnull
@Override
public String getScopeTag() {
return "boop";
}
};
backstack.setup(History.of(beep, boop));
assertThat(backstack.hasScope("beep")).isFalse();
assertThat(backstack.hasScope("boop")).isFalse();
assertThat(backstack.hasService("beep", SERVICE_TAG)).isFalse();
assertThat(backstack.hasService("boop", SERVICE_TAG)).isFalse();
backstack.setStateChanger(stateChanger);
assertThat(backstack.hasScope("beep")).isTrue();
assertThat(backstack.hasScope("boop")).isTrue();
assertThat(backstack.hasService("beep", SERVICE_TAG)).isTrue();
assertThat(backstack.hasService("boop", SERVICE_TAG)).isTrue();
assertThat(backstack.<Object>lookupService(SERVICE_TAG)).isSameAs(service2);
backstack.goBack();
assertThat(backstack.hasScope("beep")).isTrue();
assertThat(backstack.hasScope("boop")).isFalse();
assertThat(backstack.hasService("beep", SERVICE_TAG)).isTrue();
assertThat(backstack.hasService("boop", SERVICE_TAG)).isFalse();
assertThat(backstack.<Object>lookupService(SERVICE_TAG)).isSameAs(service1);
backstack.setHistory(History.single(testKey1), StateChange.REPLACE);
assertThat(backstack.hasScope("beep")).isFalse();
assertThat(backstack.hasScope("boop")).isFalse();
assertThat(backstack.hasService("beep", SERVICE_TAG)).isFalse();
assertThat(backstack.hasService("boop", SERVICE_TAG)).isFalse();
try {
backstack.lookupService(SERVICE_TAG);
Assert.fail();
} catch(IllegalStateException e) {
assertThat(e.getMessage()).contains("does not exist in any scope");
// OK!
}
}
@Test
public void canFindServiceNoOverlapsWorks() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final Service service = new Service();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(SERVICE_TAG, service);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKeyWithScope boop = new TestKeyWithScope("boop") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
}
@Nonnull
@Override
public String getScopeTag() {
return "boop";
}
};
backstack.setup(History.of(beep, boop));
assertThat(backstack.hasService("beep", SERVICE_TAG)).isFalse();
backstack.setStateChanger(stateChanger);
assertThat(backstack.hasService("beep", SERVICE_TAG)).isTrue();
assertThat(backstack.hasService("boop", SERVICE_TAG)).isFalse();
assertThat(backstack.<Object>canFindService(SERVICE_TAG)).isTrue();
backstack.goBack();
assertThat(backstack.hasService("beep", SERVICE_TAG)).isTrue();
assertThat(backstack.<Object>canFindService(SERVICE_TAG)).isTrue();
backstack.setHistory(History.single(testKey1), StateChange.REPLACE);
assertThat(backstack.hasService("beep", SERVICE_TAG)).isFalse();
assertThat(backstack.canFindService(SERVICE_TAG)).isFalse();
}
@Test
public void canFindServiceWithOverlapsWorks() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final Service service1 = new Service();
final Service service2 = new Service();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(SERVICE_TAG, service1);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKeyWithScope boop = new TestKeyWithScope("boop") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(SERVICE_TAG, service2);
}
@Nonnull
@Override
public String getScopeTag() {
return "boop";
}
};
backstack.setup(History.of(beep, boop));
assertThat(backstack.hasService("beep", SERVICE_TAG)).isFalse();
assertThat(backstack.hasService("boop", SERVICE_TAG)).isFalse();
backstack.setStateChanger(stateChanger);
assertThat(backstack.hasService("beep", SERVICE_TAG)).isTrue();
assertThat(backstack.hasService("boop", SERVICE_TAG)).isTrue();
assertThat(backstack.<Object>canFindService(SERVICE_TAG)).isTrue();
backstack.goBack();
assertThat(backstack.hasService("beep", SERVICE_TAG)).isTrue();
assertThat(backstack.hasService("boop", SERVICE_TAG)).isFalse();
assertThat(backstack.<Object>canFindService(SERVICE_TAG)).isTrue();
backstack.setHistory(History.single(testKey1), StateChange.REPLACE);
assertThat(backstack.hasService("beep", SERVICE_TAG)).isFalse();
assertThat(backstack.hasService("boop", SERVICE_TAG)).isFalse();
assertThat(backstack.canFindService(SERVICE_TAG)).isFalse();
}
@Test
public void serviceBinderMethodsWork() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final Service service1 = new Service();
final Service service2 = new Service();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
assertThat(serviceBinder.hasService("SERVICE1")).isFalse();
assertThat(serviceBinder.canFindService("SERVICE1")).isFalse();
serviceBinder.addService("SERVICE1", service1);
assertThat(serviceBinder.hasService("SERVICE1")).isTrue();
assertThat(serviceBinder.canFindService("SERVICE1")).isTrue();
assertThat(serviceBinder.lookupService("SERVICE1")).isSameAs(service1);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKeyWithScope boop = new TestKeyWithScope("boop") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
assertThat(serviceBinder.hasService("SERVICE1")).isFalse();
assertThat(serviceBinder.canFindService("SERVICE1")).isTrue();
assertThat(serviceBinder.hasService("SERVICE2")).isFalse();
serviceBinder.addService("SERVICE2", service2);
assertThat(serviceBinder.hasService("SERVICE2")).isTrue();
assertThat(serviceBinder.canFindService("SERVICE2")).isTrue();
assertThat(serviceBinder.hasService("SERVICE2")).isTrue();
assertThat(serviceBinder.lookupService("SERVICE1")).isSameAs(service1);
assertThat(serviceBinder.lookupService("SERVICE2")).isSameAs(service2);
}
@Nonnull
@Override
public String getScopeTag() {
return "boop";
}
};
backstack.setup(History.of(beep, boop));
backstack.setStateChanger(stateChanger);
}
@Test
public void scopeCreationAndDestructionHappensInForwardAndReverseOrder() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final List<Object> serviceRegistered = new ArrayList<>();
final List<Object> serviceUnregistered = new ArrayList<>();
class MyService implements ScopedServices.Registered {
@Override
public void onServiceRegistered() {
serviceRegistered.add(this);
}
@Override
public void onServiceUnregistered() {
serviceUnregistered.add(this);
}
}
final MyService service1 = new MyService();
final MyService service2 = new MyService();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE1", service1);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKeyWithScope boop = new TestKeyWithScope("boop") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE2", service2);
}
@Nonnull
@Override
public String getScopeTag() {
return "boop";
}
};
TestKey bye = new TestKey("bye");
backstack.setup(History.of(beep, boop));
assertThat(serviceRegistered).isEmpty();
assertThat(serviceUnregistered).isEmpty();
backstack.setStateChanger(stateChanger);
assertThat(serviceRegistered).containsExactly(service1, service2);
assertThat(serviceUnregistered).isEmpty();
backstack.setHistory(History.of(bye), StateChange.REPLACE);
assertThat(serviceRegistered).containsExactly(service1, service2);
assertThat(serviceUnregistered).containsExactly(service2, service1);
}
@Test
public void serviceCreationAndDestructionHappensInForwardAndReverseOrder() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final List<Object> serviceRegistered = new ArrayList<>();
final List<Object> serviceUnregistered = new ArrayList<>();
class MyService implements ScopedServices.Registered {
@Override
public void onServiceRegistered() {
serviceRegistered.add(this);
}
@Override
public void onServiceUnregistered() {
serviceUnregistered.add(this);
}
}
final MyService service1 = new MyService();
final MyService service2 = new MyService();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE1", service1);
serviceBinder.addService("SERVICE2", service2);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKey bye = new TestKey("bye");
backstack.setup(History.of(beep));
assertThat(serviceRegistered).isEmpty();
assertThat(serviceUnregistered).isEmpty();
backstack.setStateChanger(stateChanger);
assertThat(serviceRegistered).containsExactly(service1, service2);
assertThat(serviceUnregistered).isEmpty();
backstack.setHistory(History.of(bye), StateChange.REPLACE);
assertThat(serviceRegistered).containsExactly(service1, service2);
assertThat(serviceUnregistered).containsExactly(service2, service1);
}
@Test
public void scopedServicesCanRetrieveBackstackFromServiceBinder() {
final Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final AtomicReference<Backstack> ref = new AtomicReference<>();
class MyService {
}
final MyService service1 = new MyService();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE1", service1);
ref.set(serviceBinder.getBackstack());
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
backstack.setup(History.of(beep));
backstack.setStateChanger(stateChanger);
assertThat(backstack).isSameAs(ref.get());
}
@Test
public void activatedWorks() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final List<Object> activatedServices = new ArrayList<>();
final List<Object> deactivatedServices = new ArrayList<>();
class MyService
implements ScopedServices.Activated {
@Override
public void onServiceActive() {
activatedServices.add(this);
}
@Override
public void onServiceInactive() {
deactivatedServices.add(this);
}
}
final MyService service1 = new MyService();
final MyService service2 = new MyService();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE1", service1);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKeyWithScope boop = new TestKeyWithScope("boop") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE2", service2);
}
@Nonnull
@Override
public String getScopeTag() {
return "boop";
}
};
TestKey bye = new TestKey("bye");
backstack.setup(History.of(beep, boop));
assertThat(activatedServices).isEmpty();
assertThat(deactivatedServices).isEmpty();
backstack.setStateChanger(stateChanger);
assertThat(activatedServices).containsExactly(service2);
assertThat(deactivatedServices).isEmpty();
backstack.goBack();
assertThat(activatedServices).containsExactly(service2, service1);
assertThat(deactivatedServices).containsExactly(service2);
backstack.setHistory(History.of(bye), StateChange.REPLACE);
assertThat(activatedServices).containsExactly(service2, service1);
assertThat(deactivatedServices).containsExactly(service2, service1);
}
@Test
public void activatedIsCalledInRightOrder() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final List<Object> activatedServices = new ArrayList<>();
final List<Object> deactivatedServices = new ArrayList<>();
class MyService
implements ScopedServices.Activated {
@Override
public void onServiceActive() {
activatedServices.add(this);
}
@Override
public void onServiceInactive() {
deactivatedServices.add(this);
}
}
final MyService service1 = new MyService();
final MyService service2 = new MyService();
final MyService service3 = new MyService();
final MyService service4 = new MyService();
final MyService service5 = new MyService();
final MyService service6 = new MyService();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE1", service1);
serviceBinder.addService("SERVICE2", service2);
serviceBinder.addService("SERVICE3", service3);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKeyWithScope boop = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE4", service4);
serviceBinder.addService("SERVICE5", service5);
serviceBinder.addService("SERVICE6", service6);
}
@Nonnull
@Override
public String getScopeTag() {
return "boop";
}
};
backstack.setup(History.of(beep));
assertThat(activatedServices).isEmpty();
backstack.setStateChanger(stateChanger);
assertThat(activatedServices).containsExactly(service1, service2, service3);
backstack.goTo(boop);
assertThat(activatedServices).containsExactly(service1, service2, service3, service4, service5, service6);
assertThat(deactivatedServices).containsExactly(service3, service2, service1);
}
@Test
public void deactivatedIsCalledInReverseOrder() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final List<Object> deactivatedServices = new ArrayList<>();
class MyService
implements ScopedServices.Activated {
@Override
public void onServiceActive() {
}
@Override
public void onServiceInactive() {
deactivatedServices.add(this);
}
}
final MyService service1 = new MyService();
final MyService service2 = new MyService();
final MyService service3 = new MyService();
final MyService service4 = new MyService();
final MyService service5 = new MyService();
final MyService service6 = new MyService();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE1", service1);
serviceBinder.addService("SERVICE2", service2);
serviceBinder.addService("SERVICE3", service3);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKeyWithScope boop = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE4", service4);
serviceBinder.addService("SERVICE5", service5);
serviceBinder.addService("SERVICE6", service6);
}
@Nonnull
@Override
public String getScopeTag() {
return "boop";
}
};
backstack.setup(History.of(beep));
assertThat(deactivatedServices).isEmpty();
backstack.setStateChanger(stateChanger);
backstack.goTo(boop);
assertThat(deactivatedServices).containsExactly(service3, service2, service1);
TestKey bye = new TestKey("bye");
backstack.setHistory(History.of(bye), StateChange.REPLACE);
assertThat(deactivatedServices).containsExactly(service3, service2, service1, service6, service5, service4);
}
@Test
public void activationIsCalledOnlyOnce() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final List<Object> activatedServices = new ArrayList<>();
final List<Object> deactivatedServices = new ArrayList<>();
class MyService
implements ScopedServices.Activated {
@Override
public void onServiceActive() {
activatedServices.add(this);
}
@Override
public void onServiceInactive() {
deactivatedServices.add(this);
}
}
final MyService service1 = new MyService();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE1", service1);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
backstack.setup(History.of(beep));
assertThat(activatedServices).isEmpty();
assertThat(deactivatedServices).isEmpty();
backstack.setStateChanger(stateChanger);
assertThat(activatedServices).containsExactly(service1);
backstack.removeStateChanger();
backstack.setStateChanger(stateChanger);
assertThat(activatedServices).containsExactly(service1);
}
@Test
public void deactivationIsCalledOnlyOnce() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final List<Object> activatedServices = new ArrayList<>();
final List<Object> deactivatedServices = new ArrayList<>();
class MyService
implements ScopedServices.Activated {
@Override
public void onServiceActive() {
activatedServices.add(this);
}
@Override
public void onServiceInactive() {
deactivatedServices.add(this);
}
}
final MyService service1 = new MyService();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE1", service1);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKey bye = new TestKey("bye");
backstack.setup(History.of(beep));
assertThat(activatedServices).isEmpty();
assertThat(deactivatedServices).isEmpty();
backstack.setStateChanger(stateChanger);
assertThat(activatedServices).containsExactly(service1);
assertThat(deactivatedServices).isEmpty();
backstack.removeStateChanger();
assertThat(deactivatedServices).isEmpty();
backstack.setHistory(History.of(bye), StateChange.REPLACE);
assertThat(deactivatedServices).isEmpty();
backstack.setStateChanger(stateChanger);
assertThat(deactivatedServices).containsExactly(service1);
backstack.removeStateChanger();
backstack.setStateChanger(stateChanger);
assertThat(deactivatedServices).containsExactly(service1);
}
@Test
public void activationHappensEvenWithForceExecutedStateChangeAndInitializeStateChange() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final List<Object> activatedServices = new ArrayList<>();
final List<Object> deactivatedServices = new ArrayList<>();
final AtomicReference<StateChanger.Callback> callback = new AtomicReference<>();
StateChanger pendingStateChanger = new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
callback.set(completionCallback);
}
};
class MyService
implements ScopedServices.Activated {
@Override
public void onServiceActive() {
activatedServices.add(this);
}
@Override
public void onServiceInactive() {
deactivatedServices.add(this);
}
}
final MyService service1 = new MyService();
final MyService service2 = new MyService();
final MyService service3 = new MyService();
final MyService service4 = new MyService();
final MyService service5 = new MyService();
final MyService service6 = new MyService();
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE1", service1);
serviceBinder.addService("SERVICE2", service2);
serviceBinder.addService("SERVICE3", service3);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKeyWithScope boop = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE4", service4);
serviceBinder.addService("SERVICE5", service5);
serviceBinder.addService("SERVICE6", service6);
}
@Nonnull
@Override
public String getScopeTag() {
return "boop";
}
};
backstack.setup(History.of(beep));
assertThat(activatedServices).isEmpty();
backstack.setStateChanger(pendingStateChanger);
callback.get().stateChangeComplete();
assertThat(activatedServices).containsExactly(service1, service2, service3);
backstack.setHistory(History.of(boop), StateChange.BACKWARD);
backstack.removeStateChanger();
backstack.executePendingStateChange();
backstack.setStateChanger(pendingStateChanger);
callback.get().stateChangeComplete();
assertThat(activatedServices).containsExactly(service1, service2, service3, service4, service5, service6);
assertThat(deactivatedServices).containsExactly(service3, service2, service1);
}
@Test
public void activeScopeIsDeactivatedWhenScopesAreFinalized() {
Activity activity = Mockito.mock(Activity.class);
Mockito.when(activity.isFinishing()).thenReturn(true);
class MyService
implements ScopedServices.Activated {
boolean didServiceActivate = false;
boolean didScopeDeactivate = false;
@Override
public void onServiceActive() {
didServiceActivate = true;
}
@Override
public void onServiceInactive() {
didScopeDeactivate = true;
}
}
final MyService service = new MyService();
TestKeyWithScope testKeyWithScope = new TestKeyWithScope("blah") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(SERVICE_TAG, service);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
StateChanger stateChanger = new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
completionCallback.stateChangeComplete();
}
};
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
backstack.setup(History.of(testKeyWithScope));
backstack.setStateChanger(stateChanger);
assertThat(backstack.canFindService(SERVICE_TAG)).isTrue();
backstack.reattachStateChanger();
backstack.detachStateChanger();
assertThat(service.didServiceActivate).isTrue();
assertThat(service.didScopeDeactivate).isFalse();
assertThat(backstack.hasScope("beep")).isTrue();
assertThat(backstack.hasService(testKeyWithScope, SERVICE_TAG)).isTrue();
backstack.finalizeScopes();
assertThat(backstack.hasScope("beep")).isFalse();
assertThat(backstack.hasService(testKeyWithScope, SERVICE_TAG)).isFalse();
assertThat(service.didServiceActivate).isTrue();
assertThat(service.didScopeDeactivate).isTrue();
}
private enum ServiceEvent {
CREATE,
ACTIVE,
INACTIVE,
DESTROY
}
private static class Pair<S, T> {
private S first;
private T second;
private Pair(S first, T second) {
this.first = first;
this.second = second;
}
public static <S, T> Pair<S, T> of(S first, T second) {
return new Pair<>(first, second);
}
@Override
public boolean equals(Object o) {
if(this == o) {
return true;
}
if(o == null || getClass() != o.getClass()) {
return false;
}
Pair<?, ?> pair = (Pair<?, ?>) o;
return Objects.equals(first, pair.first) &&
Objects.equals(second, pair.second);
}
@Override
public int hashCode() {
return Objects.hash(first, second);
}
@Override
public String toString() {
return "Pair{" +
"first=" + first +
", second=" + second +
'}';
}
}
@Test
public void registeredAndActivatedAreCalledInRightOrder() {
final List<Pair<Object, ServiceEvent>> events = new ArrayList<>();
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
class MyService
implements ScopedServices.Activated, ScopedServices.Registered {
private int id = 0;
MyService(int id) {
this.id = id;
}
@Override
public void onServiceActive() {
events.add(Pair.of((Object) this, ServiceEvent.ACTIVE));
}
@Override
public void onServiceInactive() {
events.add(Pair.of((Object) this, ServiceEvent.INACTIVE));
}
@Override
public void onServiceRegistered() {
events.add(Pair.of((Object) this, ServiceEvent.CREATE));
}
@Override
public void onServiceUnregistered() {
events.add(Pair.of((Object) this, ServiceEvent.DESTROY));
}
@Override
public String toString() {
return "MyService{" +
"id=" + id +
'}';
}
}
final Object service1 = new MyService(1);
final Object service2 = new MyService(2);
final Object service3 = new MyService(3);
final Object service4 = new MyService(4);
final Object service5 = new MyService(5);
final Object service6 = new MyService(6);
final Object service7 = new MyService(7);
final Object service8 = new MyService(8);
final Object service9 = new MyService(9);
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE1", service1);
serviceBinder.addService("SERVICE2", service2);
serviceBinder.addService("SERVICE3", service3);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKeyWithScope boop = new TestKeyWithScope("boop") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE4", service4);
serviceBinder.addService("SERVICE5", service5);
serviceBinder.addService("SERVICE6", service6);
}
@Nonnull
@Override
public String getScopeTag() {
return "boop";
}
};
TestKeyWithScope braap = new TestKeyWithScope("braap") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService("SERVICE7", service7);
serviceBinder.addService("SERVICE8", service8);
serviceBinder.addService("SERVICE9", service9);
}
@Nonnull
@Override
public String getScopeTag() {
return "braap";
}
};
backstack.setup(History.of(beep, boop));
backstack.setStateChanger(stateChanger);
backstack.goTo(braap);
backstack.removeStateChanger(); // just to make sure
backstack.setStateChanger(stateChanger); // just to make sure
backstack.goBack();
TestKey bye = new TestKey("bye");
backstack.setHistory(History.of(bye), StateChange.REPLACE);
assertThat(events).containsExactly(
Pair.of(service1, ServiceEvent.CREATE),
Pair.of(service2, ServiceEvent.CREATE),
Pair.of(service3, ServiceEvent.CREATE),
Pair.of(service4, ServiceEvent.CREATE),
Pair.of(service5, ServiceEvent.CREATE),
Pair.of(service6, ServiceEvent.CREATE),
Pair.of(service4, ServiceEvent.ACTIVE),
Pair.of(service5, ServiceEvent.ACTIVE),
Pair.of(service6, ServiceEvent.ACTIVE),
Pair.of(service7, ServiceEvent.CREATE),
Pair.of(service8, ServiceEvent.CREATE),
Pair.of(service9, ServiceEvent.CREATE),
Pair.of(service7, ServiceEvent.ACTIVE),
Pair.of(service8, ServiceEvent.ACTIVE),
Pair.of(service9, ServiceEvent.ACTIVE),
Pair.of(service6, ServiceEvent.INACTIVE),
Pair.of(service5, ServiceEvent.INACTIVE),
Pair.of(service4, ServiceEvent.INACTIVE),
Pair.of(service4, ServiceEvent.ACTIVE),
Pair.of(service5, ServiceEvent.ACTIVE),
Pair.of(service6, ServiceEvent.ACTIVE),
Pair.of(service9, ServiceEvent.INACTIVE),
Pair.of(service8, ServiceEvent.INACTIVE),
Pair.of(service7, ServiceEvent.INACTIVE),
Pair.of(service9, ServiceEvent.DESTROY),
Pair.of(service8, ServiceEvent.DESTROY),
Pair.of(service7, ServiceEvent.DESTROY),
Pair.of(service6, ServiceEvent.INACTIVE),
Pair.of(service5, ServiceEvent.INACTIVE),
Pair.of(service4, ServiceEvent.INACTIVE),
Pair.of(service6, ServiceEvent.DESTROY),
Pair.of(service5, ServiceEvent.DESTROY),
Pair.of(service4, ServiceEvent.DESTROY),
Pair.of(service3, ServiceEvent.DESTROY),
Pair.of(service2, ServiceEvent.DESTROY),
Pair.of(service1, ServiceEvent.DESTROY)
);
}
@Test
public void navigationIsPossibleAndEnqueuedDuringActivationDispatch() {
final TestKey destination = new TestKey("destination");
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
class MyService
implements ScopedServices.Activated {
private final Backstack backstack;
public MyService(Backstack backstack) {
this.backstack = backstack;
}
@Override
public void onServiceActive() {
backstack.setHistory(History.of(destination), StateChange.REPLACE);
}
@Override
public void onServiceInactive() {
}
}
final MyService service = new MyService(backstack);
TestKeyWithOnlyParentServices beep = new TestKeyWithOnlyParentServices("beep",
History.of(
"registration")) {
@Override
public void bindServices(ServiceBinder serviceBinder) {
if(serviceBinder.getScopeTag().equals("registration")) {
serviceBinder.addService("SERVICE", service);
}
}
};
TestKeyWithScope boop = new TestKeyWithScope("boop") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
}
};
backstack.setup(History.of(boop));
backstack.setStateChanger(stateChanger);
backstack.setHistory(History.of(beep), StateChange.REPLACE);
assertThat(backstack.getHistory()).containsExactly(destination);
}
@Test
public void lookupServiceFromScopeWorks() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final Object service1 = new Object();
final Object service2 = new Object();
final Object service0 = new Object();
final Object service3 = new Object();
class Key1
extends TestKey
implements HasServices {
Key1(String name) {
super(name);
}
protected Key1(Parcel in) {
super(in);
}
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.hasService("service0")).isFalse();
assertThat(serviceBinder.canFindFromScope(serviceBinder.getScopeTag(), "service0")).isFalse();
serviceBinder.addService("service0", service0);
assertThat(serviceBinder.hasService("service0")).isTrue();
assertThat(serviceBinder.canFindFromScope(serviceBinder.getScopeTag(), "service0")).isTrue();
assertThat(serviceBinder.getService("service0")).isSameAs(service0);
assertThat(serviceBinder.lookupService("service0")).isSameAs(service0);
assertThat(serviceBinder.lookupFromScope(serviceBinder.getScopeTag(), "service0")).isSameAs(service0);
assertThat(serviceBinder.canFindFromScope(serviceBinder.getScopeTag(), "service")).isFalse();
serviceBinder.addService("service", service1);
assertThat(serviceBinder.canFindFromScope(serviceBinder.getScopeTag(), "service")).isTrue();
assertThat(serviceBinder.lookupFromScope(serviceBinder.getScopeTag(), "service")).isSameAs(service1);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
}
class Key2
extends TestKey
implements HasServices {
Key2(String name) {
super(name);
}
protected Key2(Parcel in) {
super(in);
}
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.lookupService("service0")).isSameAs(service0);
assertThat(serviceBinder.canFindFromScope(serviceBinder.getScopeTag(), "service")).isTrue();
assertThat(serviceBinder.lookupService("service")).isSameAs(service1);
serviceBinder.addService("service", service2);
// the mostimportant assertion here
assertThat(serviceBinder.lookupService("service")).isSameAs(service2);
assertThat(serviceBinder.lookupFromScope("beep", "service")).isSameAs(service1);
serviceBinder.addService("service3", service3);
}
@Nonnull
@Override
public String getScopeTag() {
return "boop";
}
}
backstack.setup(History.of(new Key1("beep"), new Key2("boop")));
backstack.setStateChanger(new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
completionCallback.stateChangeComplete();
}
});
assertThat(backstack.lookupService("service")).isSameAs(service2);
assertThat(backstack.canFindService("service3")).isTrue();
assertThat(backstack.canFindFromScope("boop", "service3")).isTrue();
assertThat(backstack.lookupFromScope("boop", "service3")).isSameAs(service3);
assertThat(backstack.lookupFromScope("beep", "service")).isSameAs(service1);
assertThat(backstack.lookupFromScope("boop", "service")).isSameAs(service2);
backstack.goBack();
assertThat(backstack.canFindFromScope("boop", "service3")).isFalse();
assertThat(backstack.lookupService("service")).isSameAs(service1);
}
@Test
public void sameServiceRegisteredInScopeMultipleTimesReceivesCallbackOnlyOnce() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
final List<Object> activated = new ArrayList<>();
final List<Object> inactivated = new ArrayList<>();
final List<Object> registered = new ArrayList<>();
final List<Object> unregistered = new ArrayList<>();
class MyService
implements ScopedServices.Activated, ScopedServices.Registered {
@Override
public void onServiceActive() {
activated.add(this);
}
@Override
public void onServiceInactive() {
inactivated.add(this);
}
@Override
public void onServiceRegistered() {
registered.add(this);
}
@Override
public void onServiceUnregistered() {
unregistered.add(this);
}
}
final MyService service = new MyService();
final String serviceTag1 = "service1";
final String serviceTag2 = "service2";
TestKeyWithScope beep = new TestKeyWithScope("beep") {
@Override
public void bindServices(ServiceBinder serviceBinder) {
assertThat(serviceBinder.getScopeTag()).isEqualTo(getScopeTag());
serviceBinder.addService(serviceTag1, service);
serviceBinder.addService(serviceTag2, service);
}
@Nonnull
@Override
public String getScopeTag() {
return "beep";
}
};
TestKey clear = new TestKey("clear");
backstack.setup(History.of(beep));
assertThat(activated).isEmpty();
assertThat(inactivated).isEmpty();
assertThat(registered).isEmpty();
assertThat(unregistered).isEmpty();
backstack.setStateChanger(stateChanger);
assertThat(activated).isNotEmpty();
assertThat(inactivated).isEmpty();
assertThat(registered).isNotEmpty();
assertThat(unregistered).isEmpty();
assertThat(activated).containsOnlyOnce(service);
assertThat(registered).containsOnlyOnce(service);
backstack.setHistory(History.of(clear), StateChange.REPLACE);
assertThat(activated).isNotEmpty();
assertThat(inactivated).isNotEmpty();
assertThat(registered).isNotEmpty();
assertThat(unregistered).isNotEmpty();
assertThat(inactivated).containsOnlyOnce(service);
assertThat(unregistered).containsOnlyOnce(service);
}
@Test
public void finalizingScopeTwiceShouldBeNoOp() {
Backstack backstack = new Backstack();
backstack.setScopedServices(new ServiceProvider());
backstack.setup(History.of(testKey2));
backstack.setStateChanger(new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
completionCallback.stateChangeComplete();
}
});
assertThat(backstack.hasService(testKey2, SERVICE_TAG)).isTrue();
backstack.finalizeScopes();
try {
backstack.finalizeScopes();
} catch(Throwable e) {
Assert.fail("Should be no-op.");
}
}
@Test
public void scopeBuiltByNavigationButNotInLatestKeysShouldBeAccessibleByLookup() {
Backstack backstack = new Backstack();
final Object helloService = new Object();
final Object worldService = new Object();
final Object kappaService = new Object();
backstack.setScopedServices(new ScopedServices() {
@Override
public void bindServices(@Nonnull ServiceBinder serviceBinder) {
if("hello".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("hello", helloService);
} else if("world".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("world", worldService);
} else if("kappa".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("kappa", kappaService);
}
}
});
TestKeyWithScope scopeKey1 = new TestKeyWithScope("hello");
TestKeyWithScope scopeKey2 = new TestKeyWithScope("world");
TestKeyWithScope scopeKey3 = new TestKeyWithScope("kappa");
backstack.setup(History.of(scopeKey1, scopeKey2));
final AtomicReference<StateChanger.Callback> callbackRef = new AtomicReference<>();
backstack.setStateChanger(new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
callbackRef.set(completionCallback);
}
});
callbackRef.get().stateChangeComplete();
backstack.setHistory(History.of(scopeKey1, scopeKey3), StateChange.REPLACE);
assertThat(backstack.lookupService("hello")).isSameAs(helloService);
assertThat(backstack.lookupService("kappa")).isSameAs(kappaService);
assertThat(backstack.lookupService("world")).isSameAs(worldService);
assertThat(backstack.canFindService("hello")).isTrue();
assertThat(backstack.canFindService("kappa")).isTrue();
assertThat(backstack.canFindService("world")).isTrue();
callbackRef.get().stateChangeComplete();
assertThat(backstack.canFindService("hello")).isTrue();
assertThat(backstack.canFindService("kappa")).isTrue();
assertThat(backstack.canFindService("world")).isFalse();
}
@Test
public void scopeBuiltByNavigationButNotInLatestKeysCanBeFoundByKey() {
Backstack backstack = new Backstack();
final Object helloService = new Object();
final Object worldService = new Object();
final Object kappaService = new Object();
backstack.setScopedServices(new ScopedServices() {
@Override
public void bindServices(@Nonnull ServiceBinder serviceBinder) {
if("hello".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("hello", helloService);
} else if("world".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("world", worldService);
} else if("kappa".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("kappa", kappaService);
}
}
});
class TestKeyWithExplicitParent extends TestKeyWithScope implements ScopeKey.Child {
private String[] parentScopes;
TestKeyWithExplicitParent(String name, String... parentScopes) {
super(name);
this.parentScopes = parentScopes;
}
protected TestKeyWithExplicitParent(Parcel in) {
super(in);
}
@Nonnull
@Override
public List<String> getParentScopes() {
return History.from(Arrays.asList(parentScopes));
}
}
TestKeyWithScope scopeKey1 = new TestKeyWithScope("hello");
TestKeyWithScope scopeKey2 = new TestKeyWithExplicitParent("world", "parent");
TestKeyWithScope scopeKey3 = new TestKeyWithScope("kappa");
TestKey scopeKey4 = new TestKey("aaaaaa");
backstack.setup(History.of(scopeKey1, scopeKey2));
final AtomicReference<StateChanger.Callback> callbackRef = new AtomicReference<>();
backstack.setStateChanger(new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
callbackRef.set(completionCallback);
}
});
callbackRef.get().stateChangeComplete();
backstack.setHistory(History.of(scopeKey1, scopeKey4, scopeKey3), StateChange.REPLACE);
assertThat(backstack.findScopesForKey(scopeKey1, ScopeLookupMode.ALL)).containsExactly("hello");
assertThat(backstack.findScopesForKey(scopeKey2, ScopeLookupMode.ALL)).containsExactly("world", "parent", "hello");
assertThat(backstack.findScopesForKey(scopeKey3, ScopeLookupMode.ALL)).containsExactly("kappa", "world", "parent", "hello");
assertThat(backstack.findScopesForKey(scopeKey1, ScopeLookupMode.EXPLICIT)).containsExactly("hello");
assertThat(backstack.findScopesForKey(scopeKey2, ScopeLookupMode.EXPLICIT)).containsExactly("world", "parent");
assertThat(backstack.findScopesForKey(scopeKey3, ScopeLookupMode.EXPLICIT)).containsExactly("kappa");
callbackRef.get().stateChangeComplete();
assertThat(backstack.findScopesForKey(scopeKey1, ScopeLookupMode.ALL)).containsExactly("hello");
assertThat(backstack.findScopesForKey(scopeKey2, ScopeLookupMode.ALL)).isEmpty();
assertThat(backstack.findScopesForKey(scopeKey3, ScopeLookupMode.ALL)).containsExactly("kappa", "hello");
assertThat(backstack.findScopesForKey(scopeKey4, ScopeLookupMode.ALL)).containsExactly(
"hello");
assertThat(backstack.findScopesForKey(scopeKey1, ScopeLookupMode.EXPLICIT)).containsExactly("hello");
assertThat(backstack.findScopesForKey(scopeKey2, ScopeLookupMode.EXPLICIT)).isEmpty();
assertThat(backstack.findScopesForKey(scopeKey3, ScopeLookupMode.EXPLICIT)).containsExactly("kappa");
}
@Test
public void scopeBuiltByNavigationButNotInLatestKeysCanBeFoundFromScope() {
Backstack backstack = new Backstack();
final Object helloService = new Object();
final Object worldService = new Object();
final Object kappaService = new Object();
final Object parentService = new Object();
backstack.setScopedServices(new ScopedServices() {
@Override
public void bindServices(@Nonnull ServiceBinder serviceBinder) {
if("hello".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("hello", helloService);
} else if("world".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("world", worldService);
} else if("kappa".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("kappa", kappaService);
} else if("parent".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("parent", parentService);
}
}
});
class TestKeyWithExplicitParent extends TestKeyWithScope implements ScopeKey.Child {
private String[] parentScopes;
TestKeyWithExplicitParent(String name, String... parentScopes) {
super(name);
this.parentScopes = parentScopes;
}
protected TestKeyWithExplicitParent(Parcel in) {
super(in);
}
@Nonnull
@Override
public List<String> getParentScopes() {
return History.from(Arrays.asList(parentScopes));
}
}
TestKeyWithScope scopeKey1 = new TestKeyWithScope("hello");
TestKeyWithScope scopeKey2 = new TestKeyWithExplicitParent("world", "parent");
TestKeyWithScope scopeKey3 = new TestKeyWithScope("kappa");
backstack.setup(History.of(scopeKey1, scopeKey2));
final AtomicReference<StateChanger.Callback> callbackRef = new AtomicReference<>();
backstack.setStateChanger(new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
callbackRef.set(completionCallback);
}
});
callbackRef.get().stateChangeComplete();
backstack.setHistory(History.of(scopeKey1, scopeKey3), StateChange.REPLACE);
assertThat(backstack.lookupFromScope("hello", "hello")).isSameAs(helloService);
assertThat(backstack.lookupFromScope("world", "world")).isSameAs(worldService);
assertThat(backstack.lookupFromScope("kappa", "kappa")).isSameAs(kappaService);
assertThat(backstack.lookupFromScope("hello", "hello", ScopeLookupMode.ALL)).isSameAs(helloService);
assertThat(backstack.lookupFromScope("world", "world", ScopeLookupMode.ALL)).isSameAs(worldService);
assertThat(backstack.lookupFromScope("kappa", "kappa", ScopeLookupMode.ALL)).isSameAs(kappaService);
assertThat(backstack.lookupFromScope("hello", "hello", ScopeLookupMode.EXPLICIT)).isSameAs(helloService);
assertThat(backstack.lookupFromScope("world", "world", ScopeLookupMode.EXPLICIT)).isSameAs(worldService);
assertThat(backstack.lookupFromScope("kappa", "kappa", ScopeLookupMode.EXPLICIT)).isSameAs(kappaService);
assertThat(backstack.lookupFromScope("world", "hello")).isSameAs(helloService);
assertThat(backstack.lookupFromScope("kappa", "hello")).isSameAs(helloService);
assertThat(backstack.lookupFromScope("kappa", "world")).isSameAs(worldService);
assertThat(backstack.lookupFromScope("world", "hello", ScopeLookupMode.ALL)).isSameAs(helloService);
assertThat(backstack.lookupFromScope("kappa", "hello", ScopeLookupMode.ALL)).isSameAs(helloService);
assertThat(backstack.lookupFromScope("kappa", "world", ScopeLookupMode.ALL)).isSameAs(worldService);
assertThat(backstack.lookupFromScope("world", "parent", ScopeLookupMode.EXPLICIT)).isSameAs(parentService);
//
assertThat(backstack.canFindFromScope("hello", "hello")).isTrue();
assertThat(backstack.canFindFromScope("world", "world")).isTrue();
assertThat(backstack.canFindFromScope("kappa", "kappa")).isTrue();
assertThat(backstack.canFindFromScope("hello", "hello", ScopeLookupMode.ALL)).isTrue();
assertThat(backstack.canFindFromScope("world", "world", ScopeLookupMode.ALL)).isTrue();
assertThat(backstack.canFindFromScope("kappa", "kappa", ScopeLookupMode.ALL)).isTrue();
assertThat(backstack.canFindFromScope("hello", "hello", ScopeLookupMode.EXPLICIT)).isTrue();
assertThat(backstack.canFindFromScope("world", "world", ScopeLookupMode.EXPLICIT)).isTrue();
assertThat(backstack.canFindFromScope("kappa", "kappa", ScopeLookupMode.EXPLICIT)).isTrue();
assertThat(backstack.canFindFromScope("world", "hello")).isTrue();
assertThat(backstack.canFindFromScope("kappa", "hello")).isTrue();
assertThat(backstack.canFindFromScope("kappa", "world")).isTrue();
assertThat(backstack.canFindFromScope("world", "hello", ScopeLookupMode.ALL)).isTrue();
assertThat(backstack.canFindFromScope("kappa", "hello", ScopeLookupMode.ALL)).isTrue();
assertThat(backstack.canFindFromScope("kappa", "world", ScopeLookupMode.ALL)).isTrue();
assertThat(backstack.canFindFromScope("world", "parent", ScopeLookupMode.EXPLICIT)).isTrue();
assertThat(backstack.canFindFromScope("parent", "parent", ScopeLookupMode.EXPLICIT)).isTrue();
callbackRef.get().stateChangeComplete();
assertThat(backstack.canFindFromScope("world", "world", ScopeLookupMode.ALL)).isFalse();
assertThat(backstack.canFindFromScope("world", "parent", ScopeLookupMode.ALL)).isFalse();
assertThat(backstack.canFindFromScope("parent", "parent", ScopeLookupMode.ALL)).isFalse();
assertThat(backstack.canFindFromScope("world", "world", ScopeLookupMode.EXPLICIT)).isFalse();
assertThat(backstack.canFindFromScope("world", "parent", ScopeLookupMode.EXPLICIT)).isFalse();
assertThat(backstack.canFindFromScope("parent", "parent", ScopeLookupMode.EXPLICIT)).isFalse();
}
@Test
public void keyWithinNavigationButWithoutScopeStillAbleToFindScopes() {
Backstack backstack = new Backstack();
final Object helloService = new Object();
final Object worldService = new Object();
final Object kappaService = new Object();
final Object parentService = new Object();
backstack.setScopedServices(new ScopedServices() {
@Override
public void bindServices(@Nonnull ServiceBinder serviceBinder) {
if("hello".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("hello", helloService);
} else if("world".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("world", worldService);
} else if("kappa".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("kappa", kappaService);
} else if("parent".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("parent", parentService);
}
}
});
class TestKeyWithExplicitParent extends TestKeyWithScope implements ScopeKey.Child {
private String[] parentScopes;
TestKeyWithExplicitParent(String name, String... parentScopes) {
super(name);
this.parentScopes = parentScopes;
}
protected TestKeyWithExplicitParent(Parcel in) {
super(in);
}
@Nonnull
@Override
public List<String> getParentScopes() {
return History.from(Arrays.asList(parentScopes));
}
}
TestKeyWithScope scopeKey1 = new TestKeyWithScope("hello");
TestKeyWithScope scopeKey2 = new TestKeyWithExplicitParent("world", "parent");
TestKeyWithScope scopeKey3 = new TestKeyWithScope("kappa");
TestKey key4 = new TestKey("360noscope");
TestKey key5 = new TestKey("180noscope");
backstack.setup(History.of(scopeKey1, scopeKey2, key4));
final AtomicReference<StateChanger.Callback> callbackRef = new AtomicReference<>();
backstack.setStateChanger(new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
callbackRef.set(completionCallback);
}
});
callbackRef.get().stateChangeComplete();
assertThat(backstack.findScopesForKey(key4, ScopeLookupMode.EXPLICIT)).isEmpty();
assertThat(backstack.findScopesForKey(key4, ScopeLookupMode.ALL)).containsExactly("world", "parent", "hello");
backstack.setHistory(History.of(scopeKey1, scopeKey3, key5), StateChange.REPLACE);
assertThat(backstack.findScopesForKey(key4, ScopeLookupMode.EXPLICIT)).isEmpty();
assertThat(backstack.findScopesForKey(key4, ScopeLookupMode.ALL)).containsExactly("world", "parent", "hello");
assertThat(backstack.findScopesForKey(key5, ScopeLookupMode.EXPLICIT)).isEmpty();
assertThat(backstack.findScopesForKey(key5, ScopeLookupMode.ALL)).containsExactly("kappa", "world", "parent", "hello");
callbackRef.get().stateChangeComplete();
assertThat(backstack.findScopesForKey(key4, ScopeLookupMode.EXPLICIT)).isEmpty();
assertThat(backstack.findScopesForKey(key4, ScopeLookupMode.ALL)).isEmpty();
assertThat(backstack.findScopesForKey(key5, ScopeLookupMode.EXPLICIT)).isEmpty();
assertThat(backstack.findScopesForKey(key5, ScopeLookupMode.ALL)).containsExactly("kappa", "hello");
}
@Test
public void keyWithinNavigationWithOnlyExplicitScopeStillAbleToFindScopes() {
Backstack backstack = new Backstack();
final Object helloService = new Object();
final Object worldService = new Object();
final Object kappaService = new Object();
final Object parentService = new Object();
final Object parent2Service = new Object();
backstack.setScopedServices(new ScopedServices() {
@Override
public void bindServices(@Nonnull ServiceBinder serviceBinder) {
if("hello".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("hello", helloService);
} else if("world".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("world", worldService);
} else if("kappa".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("kappa", kappaService);
} else if("parent".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("parent", parentService);
} else if("parent2".equals(serviceBinder.getScopeTag())) {
serviceBinder.addService("parent2", parent2Service);
}
}
});
class TestKeyWithExplicitParent extends TestKeyWithScope implements ScopeKey.Child {
private String[] parentScopes;
TestKeyWithExplicitParent(String name, String... parentScopes) {
super(name);
this.parentScopes = parentScopes;
}
protected TestKeyWithExplicitParent(Parcel in) {
super(in);
}
@Nonnull
@Override
public List<String> getParentScopes() {
return History.from(Arrays.asList(parentScopes));
}
}
class TestKeyWithOnlyExplicitParent extends TestKey implements ScopeKey.Child {
private String[] parentScopes;
TestKeyWithOnlyExplicitParent(String name, String... parentScopes) {
super(name);
this.parentScopes = parentScopes;
}
protected TestKeyWithOnlyExplicitParent(Parcel in) {
super(in);
}
@Nonnull
@Override
public List<String> getParentScopes() {
return History.from(Arrays.asList(parentScopes));
}
}
TestKeyWithScope scopeKey1 = new TestKeyWithScope("hello");
TestKeyWithScope scopeKey2 = new TestKeyWithExplicitParent("world", "parent");
TestKeyWithScope scopeKey3 = new TestKeyWithScope("kappa");
TestKey key4 = new TestKeyWithOnlyExplicitParent("parentpls", "parent2");
backstack.setup(History.of(scopeKey1, scopeKey2, key4));
final AtomicReference<StateChanger.Callback> callbackRef = new AtomicReference<>();
backstack.setStateChanger(new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
callbackRef.set(completionCallback);
}
});
callbackRef.get().stateChangeComplete();
assertThat(backstack.findScopesForKey(key4, ScopeLookupMode.EXPLICIT)).containsExactly("parent2");
assertThat(backstack.findScopesForKey(key4, ScopeLookupMode.ALL)).containsExactly("parent2", "world", "parent", "hello");
backstack.setHistory(History.of(scopeKey1, scopeKey3), StateChange.REPLACE);
assertThat(backstack.findScopesForKey(key4, ScopeLookupMode.EXPLICIT)).containsExactly("parent2");
assertThat(backstack.findScopesForKey(key4, ScopeLookupMode.ALL)).containsExactly("parent2", "world", "parent", "hello");
callbackRef.get().stateChangeComplete();
assertThat(backstack.findScopesForKey(key4, ScopeLookupMode.EXPLICIT)).isEmpty();
assertThat(backstack.findScopesForKey(key4, ScopeLookupMode.ALL)).isEmpty();
}
@Test
public void reproduceCrashIssue220() {
Backstack backstack = new Backstack();
Object key1 = new Object();
ScopeKey key2 = new ScopeKey() {
@Nonnull
@Override
public String getScopeTag() {
return "key2";
}
@Override
public String toString() {
return "KEY2";
}
};
ScopeKey key3 = new ScopeKey() {
@Nonnull
@Override
public String getScopeTag() {
return "key3";
}
@Override
public String toString() {
return "KEY3";
}
};
backstack.setScopedServices(new ScopedServices() {
@Override
public void bindServices(@Nonnull ServiceBinder serviceBinder) {
// just be there
}
});
backstack.setup(History.of(key1));
final StateChanger stateChanger = new StateChanger() {
@Override
public void handleStateChange(@Nonnull StateChange stateChange, @Nonnull Callback completionCallback) {
completionCallback.stateChangeComplete();
}
};
backstack.setStateChanger(stateChanger);
backstack.removeStateChanger();
backstack.setHistory(History.of(key2), StateChange.REPLACE);
backstack.setHistory(History.of(key3), StateChange.REPLACE);
backstack.setStateChanger(stateChanger); // <-- crash
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package storm.mesos;
import backtype.storm.generated.StormTopology;
import backtype.storm.scheduler.Topologies;
import backtype.storm.scheduler.TopologyDetails;
import backtype.storm.scheduler.WorkerSlot;
import org.apache.mesos.Protos.Offer;
import org.apache.mesos.Protos.OfferID;
import org.junit.Test;
import storm.mesos.util.MesosCommon;
import storm.mesos.util.RotatingMap;
import java.net.URI;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
// TODO(dskarthick) : Leverage the build methods defined in TestUtils function.
public class MesosNimbusTest {
@Test
public void testGetResourcesScalar() throws Exception {
MesosNimbus mesosNimbus = new MesosNimbus();
assertEquals(
Arrays.asList(TestUtils.buildScalarResource("cpus", 1.0)),
mesosNimbus.getResourcesScalar(
TestUtils.buildResourceList(1, 2, 3, 4),
1.0,
"cpus"
)
);
assertEquals(
Arrays.asList(TestUtils.buildScalarResource("mem", 2.0)),
mesosNimbus.getResourcesScalar(
TestUtils.buildResourceList(1, 2, 3, 4),
2.0,
"mem"
)
);
assertEquals(
Arrays.asList(
TestUtils.buildScalarResource("cpus", 1.0),
TestUtils.buildScalarResourceWithRole("cpus", 1.0, "reserved")
),
mesosNimbus.getResourcesScalar(
TestUtils.buildResourceList(1, 2, 3, 4),
2.0,
"cpus"
)
);
assertEquals(
Arrays.asList(
TestUtils.buildScalarResource("mem", 2.0),
TestUtils.buildScalarResourceWithRole("mem", 1.0, "reserved")
),
mesosNimbus.getResourcesScalar(
TestUtils.buildResourceList(1, 2, 3, 4),
3.0,
"mem"
)
);
assertEquals(
Arrays.asList(
TestUtils.buildScalarResource("cpus", 1.0),
TestUtils.buildScalarResourceWithRole("cpus", 3.0, "reserved")
),
mesosNimbus.getResourcesScalar(
TestUtils.buildResourceList(1, 2, 3, 4),
4.0,
"cpus"
)
);
assertEquals(
Arrays.asList(
TestUtils.buildScalarResource("mem", 2.0),
TestUtils.buildScalarResourceWithRole("mem", 4.0, "reserved")
),
mesosNimbus.getResourcesScalar(
TestUtils.buildResourceList(1, 2, 3, 4),
6.0,
"mem"
)
);
assertEquals(
Arrays.asList(
TestUtils.buildScalarResource("cpus", 1.0),
TestUtils.buildScalarResourceWithRole("cpus", 0.5, "reserved")
),
mesosNimbus.getResourcesScalar(
TestUtils.buildResourceList(1, 2, 3, 4),
1.5,
"cpus"
)
);
assertEquals(
Arrays.asList(
TestUtils.buildScalarResource("mem", 2.0),
TestUtils.buildScalarResourceWithRole("mem", 0.5, "reserved")
),
mesosNimbus.getResourcesScalar(
TestUtils.buildResourceList(1, 2, 3, 4),
2.5,
"mem"
)
);
}
@Test
public void testSubtractResourcesScalar() throws Exception {
MesosNimbus mesosNimbus = new MesosNimbus();
assertEquals(
Arrays.asList(
TestUtils.buildScalarResource("mem", 2.0),
TestUtils.buildScalarResourceWithRole("cpus", 3.0, "reserved"),
TestUtils.buildScalarResourceWithRole("mem", 4.0, "reserved")
),
mesosNimbus.subtractResourcesScalar(
TestUtils.buildResourceList(1, 2, 3, 4),
1.0,
"cpus"
)
);
assertEquals(
Arrays.asList(
TestUtils.buildScalarResource("cpus", 1.0),
TestUtils.buildScalarResource("mem", 1.0),
TestUtils.buildScalarResourceWithRole("cpus", 3.0, "reserved"),
TestUtils.buildScalarResourceWithRole("mem", 4.0, "reserved")
),
mesosNimbus.subtractResourcesScalar(
TestUtils.buildResourceList(1, 2, 3, 4),
1.0,
"mem"
)
);
assertEquals(
Arrays.asList(
TestUtils.buildScalarResource("mem", 2.0),
TestUtils.buildScalarResourceWithRole("cpus", 2.5, "reserved"),
TestUtils.buildScalarResourceWithRole("mem", 4.0, "reserved")
),
mesosNimbus.subtractResourcesScalar(
TestUtils.buildResourceList(1, 2, 3, 4),
1.5,
"cpus"
)
);
assertEquals(
Arrays.asList(
TestUtils.buildScalarResource("cpus", 1.0),
TestUtils.buildScalarResourceWithRole("cpus", 3.0, "reserved"),
TestUtils.buildScalarResourceWithRole("mem", 3.5, "reserved")
),
mesosNimbus.subtractResourcesScalar(
TestUtils.buildResourceList(1, 2, 3, 4),
2.5,
"mem"
)
);
}
@Test
public void testGetResourcesRange() throws Exception {
MesosNimbus mesosNimbus = new MesosNimbus();
assertEquals(
Arrays.asList(
TestUtils.buildRangeResource("ports", 100, 100)
),
mesosNimbus.getResourcesRange(
TestUtils.buildRangeResourceList(100, 100),
100,
"ports"
)
);
assertEquals(
Arrays.asList(
TestUtils.buildRangeResource("ports", 105, 105)
),
mesosNimbus.getResourcesRange(
TestUtils.buildRangeResourceList(100, 200),
105,
"ports"
)
);
assertEquals(
0,
mesosNimbus.getResourcesRange(
TestUtils.buildRangeResourceList(100, 100),
200,
"ports"
).size()
);
}
@Test
public void testSubtractResourcesRange() throws Exception {
MesosNimbus mesosNimbus = new MesosNimbus();
assertEquals(
Arrays.asList(
TestUtils.buildScalarResource("cpus", 1.0),
TestUtils.buildScalarResource("mem", 2.0),
TestUtils.buildScalarResourceWithRole("cpus", 3.0, "reserved"),
TestUtils.buildScalarResourceWithRole("mem", 4.0, "reserved")
),
mesosNimbus.subtractResourcesRange(
TestUtils.buildRangeResourceList(100, 100),
100,
"ports"
)
);
assertEquals(
Arrays.asList(
TestUtils.buildMultiRangeResource("ports", 100, 104, 106, 200),
TestUtils.buildMultiRangeResourceWithRole("ports", 100, 104, 106, 200, "reserved"),
TestUtils.buildScalarResource("cpus", 1.0),
TestUtils.buildScalarResource("mem", 2.0),
TestUtils.buildScalarResourceWithRole("cpus", 3.0, "reserved"),
TestUtils.buildScalarResourceWithRole("mem", 4.0, "reserved")
),
mesosNimbus.subtractResourcesRange(
TestUtils.buildRangeResourceList(100, 200),
105,
"ports"
)
);
}
@Test
public void testComputeResourcesForSlot() throws Exception {
MesosNimbus mesosNimbus = new MesosNimbus();
mesosNimbus._configUrl = new URI("http://127.0.0.1/");
OfferID offerId = OfferID.newBuilder().setValue("derp").build();
RotatingMap<OfferID, Offer> offers = new RotatingMap<>(
new RotatingMap.ExpiredCallback<OfferID, Offer>() {
@Override
public void expire(OfferID key, Offer val) {
}
}
);
offers.put(
offerId,
TestUtils.buildOfferWithPorts("offer1", "host1.west", 2.0, 2048, 1000, 1000)
);
HashMap<String, TopologyDetails> topologyMap = new HashMap<>();
Map conf = new HashMap<>();
conf.put(MesosCommon.WORKER_CPU_CONF, 1);
conf.put(MesosCommon.WORKER_MEM_CONF, 1024);
conf.put(MesosCommon.EXECUTOR_CPU_CONF, 1);
conf.put(MesosCommon.EXECUTOR_MEM_CONF, 1024);
conf.put(MesosNimbus.CONF_EXECUTOR_URI, "");
mesosNimbus._conf = conf;
topologyMap.put("t1", new TopologyDetails("t1", conf, new StormTopology(), 5));
HashMap<OfferID, List<LaunchTask>> launchList = new HashMap<>();
HashMap<OfferID, List<WorkerSlot>> slotList = new HashMap<>();
slotList.put(offerId, Arrays.asList(new WorkerSlot("", 1000)));
Topologies topologies = new Topologies(topologyMap);
mesosNimbus.computeResourcesForSlot(
offers,
topologies,
launchList,
"t1",
slotList,
OfferID.newBuilder().setValue("derp").build()
);
assertEquals(1, launchList.size());
assertEquals(1, launchList.get(offerId).size());
assertEquals(
TestUtils.buildScalarResource("cpus", 1.0),
launchList.get(offerId).get(0).getTask().getResources(0)
);
assertEquals(0, offers.get(offerId).getResourcesCount());
}
}
| |
/****************************************************************
* Licensed to the AOS Community (AOS) under one or more *
* contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The AOS licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package aos.data.yaml.collections;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.SortedMap;
import java.util.TreeMap;
import junit.framework.TestCase;
import org.junit.Ignore;
import org.yaml.snakeyaml.Yaml;
import aos.data.yaml.Util;
/**
* Test different Map implementations as JavaBean properties
*/
@Ignore
public class TypeSafeMapImplementationsTest extends TestCase {
public void testDumpMap() {
MapBean bean = new MapBean();
SortedMap<String, String> sortedMap = new TreeMap<String, String>();
sortedMap.put("2", "two");
sortedMap.put("1", "one");
bean.setSorted(sortedMap);
Properties props = new Properties();
props.setProperty("key1", "value1");
props.setProperty("key2", "value2");
bean.setProperties(props);
Yaml yaml = new Yaml();
String output = yaml.dumpAsMap(bean);
// System.out.println(output);
String etalon = Util.getLocalResource("examples/map-bean-1.yaml");
assertEquals(etalon, output);
}
public void testLoadMap() {
String output = Util.getLocalResource("examples/map-bean-1.yaml");
// System.out.println(output);
Yaml beanLoader = new Yaml();
MapBean parsed = beanLoader.loadAs(output, MapBean.class);
assertNotNull(parsed);
SortedMap<String, String> sortedMap = parsed.getSorted();
assertEquals(2, sortedMap.size());
assertEquals("one", sortedMap.get("1"));
assertEquals("two", sortedMap.get("2"));
String first = sortedMap.keySet().iterator().next();
assertEquals("1", first);
//
Properties props = parsed.getProperties();
assertEquals(2, props.size());
assertEquals("value1", props.getProperty("key1"));
assertEquals("value2", props.getProperty("key2"));
}
public static class MapBean {
private SortedMap<String, String> sorted;
private Properties properties;
private String name;
public MapBean() {
name = "Bean123";
}
public SortedMap<String, String> getSorted() {
return sorted;
}
public void setSorted(SortedMap<String, String> sorted) {
this.sorted = sorted;
}
public Properties getProperties() {
return properties;
}
public void setProperties(Properties properties) {
this.properties = properties;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@SuppressWarnings("unchecked")
public void testNoJavaBeanMap() {
List<Object> list = new ArrayList<Object>(3);
SortedMap<String, String> sortedMap = new TreeMap<String, String>();
sortedMap.put("2", "two");
sortedMap.put("1", "one");
list.add(sortedMap);
Properties props = new Properties();
props.setProperty("key1", "value1");
props.setProperty("key2", "value2");
list.add(props);
list.add("aaa");
Yaml yaml = new Yaml();
String output = yaml.dump(list);
// System.out.println(output);
String etalon = Util.getLocalResource("examples/map-bean-2.yaml");
assertEquals(etalon, output);
// load
List<Object> list2 = (List<Object>) yaml.load(output);
assertEquals(3, list2.size());
Map<Object, Object> map1 = (Map<Object, Object>) list.get(0);// it was
// SortedMap
assertEquals(2, map1.size());
assertEquals("one", map1.get("1"));
assertEquals("two", map1.get("2"));
Map<Object, Object> map2 = (Map<Object, Object>) list.get(1);// it was
// Properties
assertEquals(2, map2.size());
assertEquals("value1", map2.get("key1"));
assertEquals("value2", map2.get("key2"));
assertEquals("aaa", list.get(2));
}
public void testRecursiveNoJavaBeanMap1() {
SortedMap<String, Object> sortedMap = new TreeMap<String, Object>();
sortedMap.put("2", "two");
sortedMap.put("1", "one");
sortedMap.put("3", sortedMap);
Yaml yaml = new Yaml();
String output = yaml.dump(sortedMap);
// System.out.println(output);
String etalon = Util.getLocalResource("examples/map-recursive-1.yaml");
assertEquals(etalon, output);
// load with different order
@SuppressWarnings("unchecked")
Map<Object, Object> map1 = (Map<Object, Object>) yaml.load(Util
.getLocalResource("examples/map-recursive-1_1.yaml"));
assertEquals(3, map1.size());
assertEquals("one", map1.get("1"));
assertEquals("two", map1.get("2"));
// test that the order is taken from YAML instead of sorting
String first = (String) map1.keySet().iterator().next();
assertEquals("2", first);
}
@SuppressWarnings("unchecked")
public void testRecursiveNoJavaBeanProperties2() {
Properties props = new Properties();
props.setProperty("key1", "value1");
props.setProperty("key2", "value2");
Map<Object, Object> map = props;
map.put("key3", props);
Yaml yaml = new Yaml();
String output = yaml.dump(props);
// System.out.println(output);
String etalon = Util.getLocalResource("examples/map-recursive-2.yaml");
assertEquals(etalon, output);
// load
Map<Object, Object> map2 = (Map<Object, Object>) yaml.load(output);
assertEquals(3, map2.size());
assertEquals("value1", map2.get("key1"));
assertEquals("value2", map2.get("key2"));
}
public void testRecursiveNoJavaBeanMap3() {
Yaml yaml = new Yaml();
String output = Util.getLocalResource("examples/map-recursive-3.yaml");
// System.out.println(output);
@SuppressWarnings("unchecked")
SortedMap<Object, Object> map1 = (SortedMap<Object, Object>) yaml.load(output);
assertEquals(3, map1.size());
assertEquals("one", map1.get("1"));
assertEquals("two", map1.get("2"));
// test that the order is NOT taken from YAML but sorted
String first = (String) map1.keySet().iterator().next();
assertEquals("1", first);
}
public void testRecursiveNoJavaBeanProperties4() {
Yaml yaml = new Yaml();
String output = Util.getLocalResource("examples/map-recursive-4.yaml");
// System.out.println(output);
try {
yaml.load(output);
fail("Recursive Properties are not supported.");
} catch (Exception e) {
assertTrue(e.getMessage(), e.getMessage().contains("Properties must not be recursive."));
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.transfer.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Specifies the error message and type, for an error that occurs during the execution of the workflow.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/transfer-2018-11-05/ExecutionError" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ExecutionError implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Specifies the error type: currently, the only valid value is <code>PERMISSION_DENIED</code>, which occurs if your
* policy does not contain the correct permissions to complete one or more of the steps in the workflow.
* </p>
*/
private String type;
/**
* <p>
* Specifies the descriptive message that corresponds to the <code>ErrorType</code>.
* </p>
*/
private String message;
/**
* <p>
* Specifies the error type: currently, the only valid value is <code>PERMISSION_DENIED</code>, which occurs if your
* policy does not contain the correct permissions to complete one or more of the steps in the workflow.
* </p>
*
* @param type
* Specifies the error type: currently, the only valid value is <code>PERMISSION_DENIED</code>, which occurs
* if your policy does not contain the correct permissions to complete one or more of the steps in the
* workflow.
* @see ExecutionErrorType
*/
public void setType(String type) {
this.type = type;
}
/**
* <p>
* Specifies the error type: currently, the only valid value is <code>PERMISSION_DENIED</code>, which occurs if your
* policy does not contain the correct permissions to complete one or more of the steps in the workflow.
* </p>
*
* @return Specifies the error type: currently, the only valid value is <code>PERMISSION_DENIED</code>, which occurs
* if your policy does not contain the correct permissions to complete one or more of the steps in the
* workflow.
* @see ExecutionErrorType
*/
public String getType() {
return this.type;
}
/**
* <p>
* Specifies the error type: currently, the only valid value is <code>PERMISSION_DENIED</code>, which occurs if your
* policy does not contain the correct permissions to complete one or more of the steps in the workflow.
* </p>
*
* @param type
* Specifies the error type: currently, the only valid value is <code>PERMISSION_DENIED</code>, which occurs
* if your policy does not contain the correct permissions to complete one or more of the steps in the
* workflow.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ExecutionErrorType
*/
public ExecutionError withType(String type) {
setType(type);
return this;
}
/**
* <p>
* Specifies the error type: currently, the only valid value is <code>PERMISSION_DENIED</code>, which occurs if your
* policy does not contain the correct permissions to complete one or more of the steps in the workflow.
* </p>
*
* @param type
* Specifies the error type: currently, the only valid value is <code>PERMISSION_DENIED</code>, which occurs
* if your policy does not contain the correct permissions to complete one or more of the steps in the
* workflow.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ExecutionErrorType
*/
public ExecutionError withType(ExecutionErrorType type) {
this.type = type.toString();
return this;
}
/**
* <p>
* Specifies the descriptive message that corresponds to the <code>ErrorType</code>.
* </p>
*
* @param message
* Specifies the descriptive message that corresponds to the <code>ErrorType</code>.
*/
public void setMessage(String message) {
this.message = message;
}
/**
* <p>
* Specifies the descriptive message that corresponds to the <code>ErrorType</code>.
* </p>
*
* @return Specifies the descriptive message that corresponds to the <code>ErrorType</code>.
*/
public String getMessage() {
return this.message;
}
/**
* <p>
* Specifies the descriptive message that corresponds to the <code>ErrorType</code>.
* </p>
*
* @param message
* Specifies the descriptive message that corresponds to the <code>ErrorType</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ExecutionError withMessage(String message) {
setMessage(message);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getType() != null)
sb.append("Type: ").append(getType()).append(",");
if (getMessage() != null)
sb.append("Message: ").append(getMessage());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ExecutionError == false)
return false;
ExecutionError other = (ExecutionError) obj;
if (other.getType() == null ^ this.getType() == null)
return false;
if (other.getType() != null && other.getType().equals(this.getType()) == false)
return false;
if (other.getMessage() == null ^ this.getMessage() == null)
return false;
if (other.getMessage() != null && other.getMessage().equals(this.getMessage()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode());
hashCode = prime * hashCode + ((getMessage() == null) ? 0 : getMessage().hashCode());
return hashCode;
}
@Override
public ExecutionError clone() {
try {
return (ExecutionError) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.transfer.model.transform.ExecutionErrorMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2010-2013 Ning, Inc.
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.subscription.engine.dao.model;
import java.util.UUID;
import org.joda.time.DateTime;
import org.killbill.billing.subscription.events.EventBaseBuilder;
import org.killbill.billing.subscription.events.SubscriptionBaseEvent;
import org.killbill.billing.subscription.events.SubscriptionBaseEvent.EventType;
import org.killbill.billing.subscription.events.bcd.BCDEvent;
import org.killbill.billing.subscription.events.bcd.BCDEventBuilder;
import org.killbill.billing.subscription.events.phase.PhaseEvent;
import org.killbill.billing.subscription.events.phase.PhaseEventBuilder;
import org.killbill.billing.subscription.events.user.ApiEvent;
import org.killbill.billing.subscription.events.user.ApiEventBuilder;
import org.killbill.billing.subscription.events.user.ApiEventType;
import org.killbill.billing.subscription.exceptions.SubscriptionBaseError;
import org.killbill.billing.util.dao.TableName;
import org.killbill.billing.util.entity.dao.EntityModelDao;
import org.killbill.billing.util.entity.dao.EntityModelDaoBase;
public class SubscriptionEventModelDao extends EntityModelDaoBase implements EntityModelDao<SubscriptionBaseEvent> {
private long totalOrdering;
private EventType eventType;
private ApiEventType userType;
private DateTime effectiveDate;
private UUID subscriptionId;
private String planName;
private String phaseName;
private String priceListName;
private int billingCycleDayLocal;
private boolean isActive;
public SubscriptionEventModelDao() {
/* For the DAO mapper */
}
public SubscriptionEventModelDao(final UUID id, final long totalOrdering, final EventType eventType, final ApiEventType userType,
final DateTime effectiveDate, final UUID subscriptionId,
final String planName, final String phaseName, final String priceListName, final int billingCycleDayLocal,
final boolean active, final DateTime createDate, final DateTime updateDate) {
super(id, createDate, updateDate);
this.totalOrdering = totalOrdering;
this.eventType = eventType;
this.userType = userType;
this.effectiveDate = effectiveDate;
this.subscriptionId = subscriptionId;
this.planName = planName;
this.phaseName = phaseName;
this.priceListName = priceListName;
this.billingCycleDayLocal = billingCycleDayLocal;
this.isActive = active;
}
public SubscriptionEventModelDao(final SubscriptionBaseEvent src) {
super(src.getId(), src.getCreatedDate(), src.getUpdatedDate());
this.totalOrdering = src.getTotalOrdering();
this.eventType = src.getType();
this.userType = eventType == EventType.API_USER ? ((ApiEvent) src).getApiEventType() : null;
this.effectiveDate = src.getEffectiveDate();
this.subscriptionId = src.getSubscriptionId();
this.planName = eventType == EventType.API_USER ? ((ApiEvent) src).getEventPlan() : null;
if (eventType == EventType.API_USER) {
this.phaseName = ((ApiEvent) src).getEventPlanPhase();
} else if (eventType == EventType.PHASE) {
this.phaseName = ((PhaseEvent) src).getPhase();
} else {
this.phaseName = null;
}
this.priceListName = eventType == EventType.API_USER ? ((ApiEvent) src).getPriceList() : null;
this.billingCycleDayLocal = eventType == EventType.BCD_UPDATE ? ((BCDEvent) src).getBillCycleDayLocal() : 0;
this.isActive = src.isActive();
}
public long getTotalOrdering() {
return totalOrdering;
}
public EventType getEventType() {
return eventType;
}
public ApiEventType getUserType() {
return userType;
}
public DateTime getEffectiveDate() {
return effectiveDate;
}
public UUID getSubscriptionId() {
return subscriptionId;
}
public String getPlanName() {
return planName;
}
public String getPhaseName() {
return phaseName;
}
public String getPriceListName() {
return priceListName;
}
public int getBillingCycleDayLocal() {
return billingCycleDayLocal;
}
// TODO required for jdbi binder
public boolean getIsActive() {
return isActive;
}
public boolean isActive() {
return isActive;
}
public void setTotalOrdering(final long totalOrdering) {
this.totalOrdering = totalOrdering;
}
public void setEventType(final EventType eventType) {
this.eventType = eventType;
}
public void setUserType(final ApiEventType userType) {
this.userType = userType;
}
public void setEffectiveDate(final DateTime effectiveDate) {
this.effectiveDate = effectiveDate;
}
public void setSubscriptionId(final UUID subscriptionId) {
this.subscriptionId = subscriptionId;
}
public void setPlanName(final String planName) {
this.planName = planName;
}
public void setPhaseName(final String phaseName) {
this.phaseName = phaseName;
}
public void setPriceListName(final String priceListName) {
this.priceListName = priceListName;
}
public void setBillingCycleDayLocal(final int billingCycleDayLocal) {
this.billingCycleDayLocal = billingCycleDayLocal;
}
public void setIsActive(final boolean isActive) {
this.isActive = isActive;
}
public static SubscriptionBaseEvent toSubscriptionEvent(final SubscriptionEventModelDao src) {
if (src == null) {
return null;
}
final EventBaseBuilder<?> base;
if (src.getEventType() == EventType.PHASE) {
base = new PhaseEventBuilder();
} else if (src.getEventType() == EventType.BCD_UPDATE) {
base = new BCDEventBuilder();
} else {
base = new ApiEventBuilder();
}
base.setTotalOrdering(src.getTotalOrdering())
.setUuid(src.getId())
.setSubscriptionId(src.getSubscriptionId())
.setCreatedDate(src.getCreatedDate())
.setUpdatedDate(src.getUpdatedDate())
.setEffectiveDate(src.getEffectiveDate())
.setActive(src.isActive());
SubscriptionBaseEvent result;
if (src.getEventType() == EventType.PHASE) {
result = (new PhaseEventBuilder(base).setPhaseName(src.getPhaseName())).build();
} else if (src.getEventType() == EventType.API_USER) {
final ApiEventBuilder builder = new ApiEventBuilder(base)
.setEventPlan(src.getPlanName())
.setEventPlanPhase(src.getPhaseName())
.setEventPriceList(src.getPriceListName())
.setApiEventType(src.getUserType())
.setApiEventType(src.getUserType())
.setFromDisk(true);
result = builder.build();
} else if (src.getEventType() == EventType.BCD_UPDATE) {
result = (new BCDEventBuilder(base).setBillCycleDayLocal(src.getBillingCycleDayLocal())).build();
} else {
throw new SubscriptionBaseError(String.format("Can't figure out event %s", src.getEventType()));
}
return result;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("SubscriptionEventModelDao");
sb.append("{totalOrdering=").append(totalOrdering);
sb.append(", eventType=").append(eventType);
sb.append(", userType=").append(userType);
sb.append(", effectiveDate=").append(effectiveDate);
sb.append(", subscriptionId=").append(subscriptionId);
sb.append(", planName='").append(planName).append('\'');
sb.append(", phaseName='").append(phaseName).append('\'');
sb.append(", priceListName='").append(priceListName).append('\'');
sb.append(", billingCycleDayLocal=").append(billingCycleDayLocal);
sb.append(", isActive=").append(isActive);
sb.append('}');
return sb.toString();
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
final SubscriptionEventModelDao that = (SubscriptionEventModelDao) o;
if (isActive != that.isActive) {
return false;
}
if (totalOrdering != that.totalOrdering) {
return false;
}
if (effectiveDate != null ? !effectiveDate.equals(that.effectiveDate) : that.effectiveDate != null) {
return false;
}
if (eventType != that.eventType) {
return false;
}
if (phaseName != null ? !phaseName.equals(that.phaseName) : that.phaseName != null) {
return false;
}
if (planName != null ? !planName.equals(that.planName) : that.planName != null) {
return false;
}
if (priceListName != null ? !priceListName.equals(that.priceListName) : that.priceListName != null) {
return false;
}
if (subscriptionId != null ? !subscriptionId.equals(that.subscriptionId) : that.subscriptionId != null) {
return false;
}
if (userType != that.userType) {
return false;
}
if (billingCycleDayLocal != that.billingCycleDayLocal) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (int) (totalOrdering ^ (totalOrdering >>> 32));
result = 31 * result + (eventType != null ? eventType.hashCode() : 0);
result = 31 * result + (userType != null ? userType.hashCode() : 0);
result = 31 * result + (effectiveDate != null ? effectiveDate.hashCode() : 0);
result = 31 * result + (subscriptionId != null ? subscriptionId.hashCode() : 0);
result = 31 * result + (planName != null ? planName.hashCode() : 0);
result = 31 * result + (phaseName != null ? phaseName.hashCode() : 0);
result = 31 * result + (priceListName != null ? priceListName.hashCode() : 0);
result = 31 * result + (isActive ? 1 : 0);
return result;
}
@Override
public TableName getTableName() {
return TableName.SUBSCRIPTION_EVENTS;
}
@Override
public TableName getHistoryTableName() {
return null;
}
}
| |
package cyclops.data;
import com.oath.cyclops.types.traversable.IterableX;
import cyclops.data.*;
import cyclops.companion.Semigroups;
import cyclops.control.Maybe;
import cyclops.control.Option;
import cyclops.data.basetests.BaseImmutableSetTest;
import cyclops.data.tuple.Tuple2;
import cyclops.reactive.ReactiveSeq;
import org.hamcrest.CoreMatchers;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.function.UnaryOperator;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertEquals;
public class BagTest extends BaseImmutableSetTest {
@Test @Ignore
public void printNull() {
}
@Test
public void testBag(){
Bag<Integer> bag1 = Bag.of(1,2,3,4,10,1,1,2);
Bag<Integer> bag2 = Bag.fromStream(Stream.of(1,2,3,4,10,1,1,2));
assertThat(bag1,equalTo(bag2));
}
@Test
public void minus(){
Bag<Integer> bag1 = Bag.of(1,2,3,4,10,1,1,2);
assertThat(bag1.removeValue(1).instances(1),equalTo(2));
}
@Test
@Override
public void insertAtIterable(){
List<String> result = of(1,2,3).insertAt(1,of(100,200,300))
.map(it ->it+"!!").collect(Collectors.toList());
assertThat(result, hasItems("1!!","100!!","200!!","300!!","2!!","3!!"));
}
@Test
public void stream(){
List<Integer> l = Bag.of(1, 2, 3, 4, 10, 1, 1, 2).stream().toList();
assertThat(l.size(),equalTo(8));
}
@Override
protected <T> ImmutableSet<T> fromStream(Stream<T> s) {
return Bag.fromStream(s);
}
@Override
public <T> Bag<T> empty() {
return Bag.empty();
}
@Override
public <T> ImmutableSet<T> of(T... values) {
return Bag.of(values);
}
@Override
public IterableX<Integer> range(int start, int end) {
return Bag.fromStream(ReactiveSeq.range(start,end));
}
@Override
public IterableX<Long> rangeLong(long start, long end) {
return Bag.fromStream(ReactiveSeq.rangeLong(start,end));
}
@Override
public <T> IterableX<T> iterate(int times, T seed, UnaryOperator<T> fn) {
return Bag.fromStream(ReactiveSeq.iterate(seed,fn).take(times));
}
@Override
public <T> IterableX<T> generate(int times, Supplier<T> fn) {
return Bag.fromStream(ReactiveSeq.generate(fn).take(times));
}
@Override
public <U, T> IterableX<T> unfold(U seed, Function<? super U, Option<Tuple2<T, U>>> unfolder) {
return Bag.fromStream(ReactiveSeq.unfold(seed,unfolder));
}
@Test
public void duplicates(){
assertThat(of(1,2,1,2,1,2).size(),equalTo(6));
}
@Test
public void forEach2() {
assertThat(of(1, 2, 3).forEach2(a -> Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9), (a , b) -> a + b).toList().size(),
equalTo(30));
}
@Test
public void testCycleNoOrd() {
assertEquals(asList(1, 2, 1, 2, 1, 2).size(),of(1, 2).cycle(3).toList().size());
assertEquals(asList(1, 2, 3, 1, 2, 3).size(), of(1, 2, 3).cycle(2).toList().size());
}
@Test
public void testCycleTimesNoOrd() {
assertEquals(asList(1, 2, 1, 2, 1, 2).size(),of(1, 2).cycle(3).toList().size());
}
int count =0;
@Test
public void testCycleWhileNoOrd() {
count =0;
assertEquals(asList(1, 2,3, 1, 2,3).size(),of(1, 2, 3).cycleWhile(next->count++<6).toList().size());
}
@Test
public void testCycleUntilNoOrd() {
count =0;
assertEquals(asList(1, 2,3, 1, 2,3).size(),of(1, 2, 3).cycleUntil(next->count++==6).toList().size());
}
@Test
public void removeFirst(){
IterableX<Integer> vec = this.of(1,2,2,2,3);
assertThat(vec.removeFirst(i->i==2),equalTo(of(1,2,2,3)));
}
@Test
public void combine(){
assertThat(of(1,1,2,3)
.combine((a, b)->a.equals(b), Semigroups.intSum)
.toSet(),equalTo(new java.util.HashSet<>(Arrays.asList(3,4))));
}
@Test
public void testCycleUntil() {
count =0;
System.out.println("List " + of(1, 2, 3).peek(System.out::println).cycleUntil(next->count++==6).toList());
count =0;
assertEquals(6,of(1, 2, 3).cycleUntil(next->count++==6).toList().size());
}
@Test
public void testCycleWhile() {
count =0;
assertEquals(6,of(1, 2, 3).cycleWhile(next->count++<6).toList().size());
}
@Test
public void testCycleTimesNoOrder() {
assertEquals(6,of(1, 2).cycle(3).toList().size());
}
@Test
public void combineNoOrd(){
assertThat(of(1,1,2,3)
.combine((a, b)->a.equals(b), Semigroups.intSum)
.toList(),equalTo(Arrays.asList(3,4)));
}
@Test
public void batchBySizeSet(){
System.out.println("List = " + of(1,1,1,1,1,1).grouped(3,()->TreeSet.empty()).toList());
assertThat(of(1,1,1,1,1,1).grouped(3,()->TreeSet.empty()).toList().get(0).size(),is(1));
assertThat(of(1,1,1,1,1,1).grouped(3,()->TreeSet.empty()).toList().size(),is(1));
}
@Test
public void testCycleNoOrder() {
assertEquals(6,of(1, 2).cycle(3).toList().size());
assertEquals(6, of(1, 2, 3).cycle(2).toList().size());
}
@Test
public void lastIndexOf(){
assertThat(empty().lastIndexOf(e->true),equalTo(Maybe.nothing()));
assertThat(of(1).lastIndexOf(e->true),equalTo(Maybe.just(0l)));
assertThat(of(1).lastIndexOf(e->false),equalTo(Maybe.nothing()));
assertThat(of(1,2,3).lastIndexOf(e-> Objects.equals(2,e)),equalTo(Maybe.just(1l)));
assertThat(of(1,2,3,2).lastIndexOf(e->Objects.equals(2,e)),equalTo(Maybe.just(2l)));
}
@Test
public void lastIndexOfSlize(){
assertThat(empty().lastIndexOfSlice(Seq.of(1,2,3)),equalTo(Maybe.nothing()));
assertThat(of(1,2,3).lastIndexOfSlice(Seq.of(1,2,3)),equalTo(Maybe.just(0l)));
assertThat(of(1).lastIndexOfSlice(Seq.of(1,2,3)),equalTo(Maybe.nothing()));
assertThat(of(0,1,2,3,4,5,6,1,2,3).lastIndexOfSlice(Seq.of(1,2,3)),equalTo(Maybe.nothing()));
}
}
| |
package org.ovirt.engine.core.bll.storage.domain;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.ovirt.engine.core.bll.AbstractQueryTest;
import org.ovirt.engine.core.bll.interfaces.BackendInternal;
import org.ovirt.engine.core.common.action.StorageServerConnectionParametersBase;
import org.ovirt.engine.core.common.action.VdcReturnValueBase;
import org.ovirt.engine.core.common.businessentities.StorageDomain;
import org.ovirt.engine.core.common.businessentities.StorageDomainStatic;
import org.ovirt.engine.core.common.businessentities.StorageServerConnections;
import org.ovirt.engine.core.common.businessentities.storage.LUNs;
import org.ovirt.engine.core.common.businessentities.storage.StorageType;
import org.ovirt.engine.core.common.interfaces.VDSBrokerFrontend;
import org.ovirt.engine.core.common.queries.GetDeviceListQueryParameters;
import org.ovirt.engine.core.common.queries.GetUnregisteredBlockStorageDomainsParameters;
import org.ovirt.engine.core.common.queries.VdcQueryReturnValue;
import org.ovirt.engine.core.common.utils.Pair;
import org.ovirt.engine.core.common.vdscommands.GetVGInfoVDSCommandParameters;
import org.ovirt.engine.core.common.vdscommands.HSMGetStorageDomainInfoVDSCommandParameters;
import org.ovirt.engine.core.common.vdscommands.VDSReturnValue;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.dao.LunDao;
import org.ovirt.engine.core.dao.StorageDomainDao;
@RunWith(MockitoJUnitRunner.class)
public class GetUnregisteredBlockStorageDomainsQueryTest extends
AbstractQueryTest<GetUnregisteredBlockStorageDomainsParameters, GetUnregisteredBlockStorageDomainsQuery<GetUnregisteredBlockStorageDomainsParameters>> {
private final String connectionIqn1 = Guid.newGuid().toString();
private final String connectionIqn2 = Guid.newGuid().toString();
private final String vgId = Guid.newGuid().toString();
private final String existingVgId = Guid.newGuid().toString();
private final Guid storageDomainId = Guid.newGuid();
private final Guid existingStorageDomainId = Guid.newGuid();
@Mock
private StorageDomainDao storageDomainDao;
@Mock
private LunDao lunDao;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
prepareMocks();
}
private void prepareMocks() {
VDSBrokerFrontend vdsBrokerFrontendMock = mock(VDSBrokerFrontend.class);
doReturn(vdsBrokerFrontendMock).when(getQuery()).getVdsBroker();
BackendInternal backendMock = mock(BackendInternal.class);
doReturn(backendMock).when(getQuery()).getBackend();
doReturn(storageDomainDao).when(getQuery()).getStorageDomainDao();
doReturn(getExistingStorageDomains()).when(storageDomainDao).getAll();
doReturn(lunDao).when(getQuery()).getLunDao();
}
@Test
public void testIscsiFoundUnregisteredDomain() {
when(getQueryParameters().getStorageType()).thenReturn(StorageType.ISCSI);
when(getQueryParameters().getStorageServerConnections()).thenReturn(getConnections());
when(getQueryParameters().getVdsId()).thenReturn(Guid.newGuid());
List<LUNs> luns = getLUNs(storageDomainId, vgId);
doReturn(Collections.emptyList()).when(lunDao).getAll();
doReturn(createSuccessVdcReturnValue()).when(getQuery()).
executeConnectStorageToVds(any(StorageServerConnectionParametersBase.class));
doReturn(createGetDeviceListReturnValue(luns)).when(getQuery()).
executeGetDeviceList(any(GetDeviceListQueryParameters.class));
doReturn(createGetVGInfoReturnValue(luns)).when(getQuery()).
executeGetVGInfo(any(GetVGInfoVDSCommandParameters.class));
doReturn(createGetStorageDomainInfoReturnValue()).when(getQuery()).
executeHSMGetStorageDomainInfo(any(HSMGetStorageDomainInfoVDSCommandParameters.class));
// Execute query
getQuery().executeQueryCommand();
// Assert query's results
Pair<List<StorageDomain>, List<StorageServerConnections>> returnValue =
getQuery().getQueryReturnValue().getReturnValue();
List<StorageDomain> storageDomains = returnValue.getFirst();
assertEquals(1, storageDomains.size());
assertEquals(storageDomains.get(0).getId(), storageDomainId);
List<StorageServerConnections> connections = returnValue.getSecond();
assertEquals(2, connections.size());
}
@Test
public void testIscsiExternalLunDiskPartOfUnregisteredDomain() {
when(getQueryParameters().getStorageType()).thenReturn(StorageType.ISCSI);
when(getQueryParameters().getStorageServerConnections()).thenReturn(getConnections());
when(getQueryParameters().getVdsId()).thenReturn(Guid.newGuid());
List<LUNs> luns = getLUNs(storageDomainId, vgId);
doReturn(luns).when(lunDao).getAll();
doReturn(createSuccessVdcReturnValue()).when(getQuery()).
executeConnectStorageToVds(any(StorageServerConnectionParametersBase.class));
doReturn(createGetDeviceListReturnValue(luns)).when(getQuery()).
executeGetDeviceList(any(GetDeviceListQueryParameters.class));
doReturn(createGetVGInfoReturnValue(luns)).when(getQuery()).
executeGetVGInfo(any(GetVGInfoVDSCommandParameters.class));
doReturn(createGetStorageDomainInfoReturnValue()).when(getQuery()).
executeHSMGetStorageDomainInfo(any(HSMGetStorageDomainInfoVDSCommandParameters.class));
// Execute query
getQuery().executeQueryCommand();
// Assert query's results
Pair<List<StorageDomain>, List<StorageServerConnections>> returnValue =
getQuery().getQueryReturnValue().getReturnValue();
List<StorageDomain> storageDomains = returnValue.getFirst();
assertEquals(0, storageDomains.size());
}
@Test
public void testIscsiNotFoundUnregisteredDomain() {
when(getQueryParameters().getStorageType()).thenReturn(StorageType.ISCSI);
when(getQueryParameters().getStorageServerConnections()).thenReturn(getConnections());
when(getQueryParameters().getVdsId()).thenReturn(Guid.newGuid());
List<LUNs> luns = getLUNs(existingStorageDomainId, existingVgId);
doReturn(Collections.emptyList()).when(lunDao).getAll();
doReturn(createSuccessVdcReturnValue()).when(getQuery()).
executeConnectStorageToVds(any(StorageServerConnectionParametersBase.class));
doReturn(createGetDeviceListReturnValue(luns)).when(getQuery()).
executeGetDeviceList(any(GetDeviceListQueryParameters.class));
doReturn(createGetVGInfoReturnValue(luns)).when(getQuery()).
executeGetVGInfo(any(GetVGInfoVDSCommandParameters.class));
// Execute query
getQuery().executeQueryCommand();
// Assert query's results
Pair<List<StorageDomain>, List<StorageServerConnections>> returnValue =
getQuery().getQueryReturnValue().getReturnValue();
List<StorageDomain> storageDomains = returnValue.getFirst();
assertEquals(0, storageDomains.size());
List<StorageServerConnections> connections = returnValue.getSecond();
assertEquals(2, connections.size());
}
@Test
public void testFcpFoundUnregisteredDomain() {
when(getQueryParameters().getStorageType()).thenReturn(StorageType.FCP);
when(getQueryParameters().getVdsId()).thenReturn(Guid.newGuid());
List<LUNs> luns = getLUNs(storageDomainId, vgId);
doReturn(Collections.emptyList()).when(lunDao).getAll();
doReturn(createGetDeviceListReturnValue(luns)).when(getQuery()).
executeGetDeviceList(any(GetDeviceListQueryParameters.class));
doReturn(createGetVGInfoReturnValue(luns)).when(getQuery()).
executeGetVGInfo(any(GetVGInfoVDSCommandParameters.class));
doReturn(createGetStorageDomainInfoReturnValue()).when(getQuery()).
executeHSMGetStorageDomainInfo(any(HSMGetStorageDomainInfoVDSCommandParameters.class));
// Execute query
getQuery().executeQueryCommand();
// Assert query's results
Pair<List<StorageDomain>, List<StorageServerConnections>> returnValue =
getQuery().getQueryReturnValue().getReturnValue();
List<StorageDomain> storageDomains = returnValue.getFirst();
assertEquals(1, storageDomains.size());
assertEquals(storageDomains.get(0).getId(), storageDomainId);
}
@Test
public void testFcpNotFoundUnregisteredDomain() {
when(getQueryParameters().getStorageType()).thenReturn(StorageType.FCP);
when(getQueryParameters().getStorageServerConnections()).thenReturn(getConnections());
when(getQueryParameters().getVdsId()).thenReturn(Guid.newGuid());
List<LUNs> luns = getLUNs(existingStorageDomainId, existingVgId);
doReturn(Collections.emptyList()).when(lunDao).getAll();
doReturn(createSuccessVdcReturnValue()).when(getQuery()).
executeConnectStorageToVds(any(StorageServerConnectionParametersBase.class));
doReturn(createGetDeviceListReturnValue(luns)).when(getQuery()).
executeGetDeviceList(any(GetDeviceListQueryParameters.class));
doReturn(createGetVGInfoReturnValue(luns)).when(getQuery()).
executeGetVGInfo(any(GetVGInfoVDSCommandParameters.class));
// Execute query
getQuery().executeQueryCommand();
// Assert query's results
Pair<List<StorageDomain>, List<StorageServerConnections>> returnValue =
getQuery().getQueryReturnValue().getReturnValue();
List<StorageDomain> storageDomains = returnValue.getFirst();
assertEquals(0, storageDomains.size());
}
private List<StorageServerConnections> getConnections() {
StorageServerConnections connection1 = new StorageServerConnections();
connection1.setIqn(connectionIqn1);
StorageServerConnections connection2 = new StorageServerConnections();
connection2.setIqn(connectionIqn2);
return new ArrayList<>(Arrays.asList(connection1, connection2));
}
private List<LUNs> getLUNs(Guid sdId, String vgId) {
LUNs lun1 = new LUNs();
lun1.setStorageDomainId(sdId);
lun1.setVolumeGroupId(vgId);
lun1.setLunConnections(new ArrayList<>(getConnections()));
LUNs lun2 = new LUNs();
lun2.setStorageDomainId(sdId);
lun2.setVolumeGroupId(vgId);
lun2.setLunConnections(new ArrayList<>(getConnections()));
return new ArrayList<>(Arrays.asList(lun1, lun2));
}
private List<StorageDomain> getExistingStorageDomains() {
StorageDomain storageDomain = new StorageDomain();
storageDomain.setId(existingStorageDomainId);
return Collections.singletonList(storageDomain);
}
private static VdcReturnValueBase createSuccessVdcReturnValue() {
VdcReturnValueBase returnValue = new VdcReturnValueBase();
returnValue.setSucceeded(true);
return returnValue;
}
private static VdcQueryReturnValue createGetDeviceListReturnValue(List<LUNs> luns) {
VdcQueryReturnValue returnValue = new VdcQueryReturnValue();
returnValue.setSucceeded(true);
returnValue.setReturnValue(luns);
return returnValue;
}
private static VDSReturnValue createGetVGInfoReturnValue(List<LUNs> luns) {
VDSReturnValue returnValue = new VDSReturnValue();
returnValue.setSucceeded(true);
returnValue.setReturnValue(luns);
return returnValue;
}
private VDSReturnValue createGetStorageDomainInfoReturnValue() {
VDSReturnValue returnValue = new VDSReturnValue();
returnValue.setSucceeded(true);
StorageDomain storageDomain = new StorageDomain();
storageDomain.setId(storageDomainId);
Pair<StorageDomainStatic, Object> pair = new Pair<>(storageDomain.getStorageStaticData(), null);
returnValue.setReturnValue(pair);
return returnValue;
}
}
| |
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apereo.portal.rendering;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.portlet.WindowState;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apereo.portal.character.stream.CharacterEventReader;
import org.apereo.portal.character.stream.FilteringCharacterEventReader;
import org.apereo.portal.character.stream.events.CharacterDataEventImpl;
import org.apereo.portal.character.stream.events.CharacterEvent;
import org.apereo.portal.events.PortalEvent;
import org.apereo.portal.events.PortletRenderExecutionEvent;
import org.apereo.portal.events.RequestScopedEventsTracker;
import org.apereo.portal.events.aggr.tabs.AggregatedTabLookupDao;
import org.apereo.portal.events.aggr.tabs.AggregatedTabMapping;
import org.apereo.portal.portlet.om.IPortletWindowId;
import org.apereo.portal.spring.beans.factory.ObjectMapperFactoryBean;
import org.apereo.portal.url.IPortalRequestInfo;
import org.apereo.portal.url.IUrlSyntaxProvider;
import org.apereo.portal.utils.cache.CacheKey;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import com.fasterxml.jackson.annotation.JsonFilter;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.ser.BeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
public class AnalyticsIncorporationComponent extends CharacterPipelineComponentWrapper implements InitializingBean {
private ObjectMapper mapper;
private ObjectWriter portletEventWriter;
private AggregatedTabLookupDao aggregatedTabLookupDao;
private IUrlSyntaxProvider urlSyntaxProvider;
private RequestScopedEventsTracker requestScopedEventsTracker;
@JsonFilter(PortletRenderExecutionEventFilterMixIn.FILTER_NAME)
private interface PortletRenderExecutionEventFilterMixIn {
static final String FILTER_NAME = "PortletRenderExecutionEventFilter";
}
//Ignored until https://github.com/FasterXML/jackson-databind/issues/245 is fixed
//Delete the mapper related code in afterPropertiesSet once the issue is fixed
// @Autowired
// public void setMapper(ObjectMapper mapper) {
//
// //Clone the mapper so that our mixins don't break other code
// this.mapper = mapper.copy();
// initMapper();
// }
@Override
public void afterPropertiesSet() throws Exception {
final ObjectMapperFactoryBean omfb = new ObjectMapperFactoryBean();
omfb.afterPropertiesSet();
this.mapper = omfb.getObject();
initMapper();
}
/**
* Configure the ObjectMapper to filter out all fields on the events except
* those that are actually needed for the analytics reporting
*/
private void initMapper() {
final BeanPropertyFilter filterOutAllExcept = SimpleBeanPropertyFilter.filterOutAllExcept("fname", "executionTimeNano");
this.mapper.addMixInAnnotations(PortalEvent.class, PortletRenderExecutionEventFilterMixIn.class);
final SimpleFilterProvider filterProvider = new SimpleFilterProvider();
filterProvider.addFilter(PortletRenderExecutionEventFilterMixIn.FILTER_NAME, filterOutAllExcept);
this.portletEventWriter = this.mapper.writer(filterProvider);
}
@Autowired
public void setAggregatedTabLookupDao(AggregatedTabLookupDao aggregatedTabLookupDao) {
this.aggregatedTabLookupDao = aggregatedTabLookupDao;
}
@Autowired
public void setUrlSyntaxProvider(IUrlSyntaxProvider urlSyntaxProvider) {
this.urlSyntaxProvider = urlSyntaxProvider;
}
@Autowired
public void setRequestScopedEventsTracker(RequestScopedEventsTracker requestScopedEventsTracker) {
this.requestScopedEventsTracker = requestScopedEventsTracker;
}
@Override
public CacheKey getCacheKey(HttpServletRequest request, HttpServletResponse response) {
return this.wrappedComponent.getCacheKey(request, response);
}
@Override
public PipelineEventReader<CharacterEventReader, CharacterEvent> getEventReader(HttpServletRequest request, HttpServletResponse response) {
final long startTime = System.nanoTime();
final PipelineEventReader<CharacterEventReader, CharacterEvent> pipelineEventReader = this.wrappedComponent.getEventReader(request, response);
final CharacterEventReader eventReader = pipelineEventReader.getEventReader();
final AnalyticsIncorporatingEventReader portletIncorporatingEventReader = new AnalyticsIncorporatingEventReader(eventReader, request, startTime);
final Map<String, String> outputProperties = pipelineEventReader.getOutputProperties();
return new PipelineEventReaderImpl<CharacterEventReader, CharacterEvent>(portletIncorporatingEventReader, outputProperties);
}
protected String serializePortletRenderExecutionEvents(final Set<PortalEvent> portalEvents) {
//Filter to include just portlet render events
final Map<String, PortletRenderExecutionEvent> renderEvents = new HashMap<String, PortletRenderExecutionEvent>();
for (final PortalEvent portalEvent : portalEvents) {
if (portalEvent instanceof PortletRenderExecutionEvent) {
final PortletRenderExecutionEvent portletRenderEvent = (PortletRenderExecutionEvent) portalEvent;
//Don't write out info for minimized portlets
if (!WindowState.MINIMIZED.equals(portletRenderEvent.getWindowState())) {
final IPortletWindowId portletWindowId = portletRenderEvent.getPortletWindowId();
final String eventKey = portletWindowId != null ? portletWindowId.getStringId() : portletRenderEvent.getFname();
renderEvents.put(eventKey, portletRenderEvent);
}
}
}
try {
return portletEventWriter.writeValueAsString(renderEvents);
}
catch (JsonParseException e) {
logger.warn("Failed to convert this request's render events to JSON, no portlet level analytics will be included", e);
}
catch (JsonMappingException e) {
logger.warn("Failed to convert this request's render events to JSON, no portlet level analytics will be included", e);
}
catch (IOException e) {
logger.warn("Failed to convert this request's render events to JSON, no portlet level analytics will be included", e);
}
return "{}";
}
protected String serializePageData(HttpServletRequest request, long startTime) {
final Map<String, Object> pageData = new HashMap<String, Object>();
pageData.put("executionTimeNano", System.nanoTime() - startTime);
final IPortalRequestInfo portalRequestInfo = urlSyntaxProvider.getPortalRequestInfo(request);
pageData.put("urlState", portalRequestInfo.getUrlState());
final String targetedLayoutNodeId = portalRequestInfo.getTargetedLayoutNodeId();
if (targetedLayoutNodeId != null) {
final AggregatedTabMapping mappedTabForLayoutId = aggregatedTabLookupDao.getMappedTabForLayoutId(targetedLayoutNodeId);
pageData.put("tab", mappedTabForLayoutId);
}
try {
return mapper.writeValueAsString(pageData);
}
catch (JsonParseException e) {
logger.warn("Failed to convert this request's page data to JSON, no page level analytics will be included", e);
}
catch (JsonMappingException e) {
logger.warn("Failed to convert this request's page data to JSON, no page level analytics will be included", e);
}
catch (IOException e) {
logger.warn("Failed to convert this request's page data to JSON, no page level analytics will be included", e);
}
return "{}";
}
private class AnalyticsIncorporatingEventReader extends FilteringCharacterEventReader {
private final HttpServletRequest request;
private final long startTime;
public AnalyticsIncorporatingEventReader(CharacterEventReader delegate, HttpServletRequest request, final long startTime) {
super(delegate);
this.request = request;
this.startTime = startTime;
}
@Override
protected CharacterEvent filterEvent(CharacterEvent event, boolean peek) {
switch (event.getEventType()) {
case PORTLET_ANALYTICS_DATA: {
//Get the set of events for the request
final Set<PortalEvent> portalEvents = requestScopedEventsTracker.getRequestEvents(request);
final String data = serializePortletRenderExecutionEvents(portalEvents);
return CharacterDataEventImpl.create(data);
}
case PAGE_ANALYTICS_DATA: {
final String data = serializePageData(request, startTime);
return CharacterDataEventImpl.create(data);
}
default: {
return event;
}
}
}
}
}
| |
/*
*/
package org.apache.taverna.server.master;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import static javax.ws.rs.core.MediaType.APPLICATION_OCTET_STREAM;
import static javax.ws.rs.core.Response.created;
import static javax.ws.rs.core.Response.noContent;
import static javax.ws.rs.core.Response.ok;
import static javax.ws.rs.core.Response.seeOther;
import static javax.ws.rs.core.Response.status;
import static org.apache.commons.logging.LogFactory.getLog;
import static org.apache.taverna.server.master.api.ContentTypes.APPLICATION_ZIP_TYPE;
import static org.apache.taverna.server.master.api.ContentTypes.DIRECTORY_VARIANTS;
import static org.apache.taverna.server.master.api.ContentTypes.INITIAL_FILE_VARIANTS;
import static org.apache.taverna.server.master.common.Roles.SELF;
import static org.apache.taverna.server.master.common.Roles.USER;
import static org.apache.taverna.server.master.common.Uri.secure;
import static org.apache.taverna.server.master.utils.RestUtils.opt;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.security.RolesAllowed;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.PathSegment;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.Variant;
import javax.xml.ws.Holder;
import org.apache.commons.logging.Log;
import org.springframework.beans.factory.annotation.Required;
import org.apache.taverna.server.master.api.DirectoryBean;
import org.apache.taverna.server.master.exceptions.FilesystemAccessException;
import org.apache.taverna.server.master.exceptions.NoDirectoryEntryException;
import org.apache.taverna.server.master.exceptions.NoUpdateException;
import org.apache.taverna.server.master.interfaces.Directory;
import org.apache.taverna.server.master.interfaces.DirectoryEntry;
import org.apache.taverna.server.master.interfaces.File;
import org.apache.taverna.server.master.interfaces.TavernaRun;
import org.apache.taverna.server.master.rest.DirectoryContents;
import org.apache.taverna.server.master.rest.FileSegment;
import org.apache.taverna.server.master.rest.MakeOrUpdateDirEntry;
import org.apache.taverna.server.master.rest.MakeOrUpdateDirEntry.MakeDirectory;
import org.apache.taverna.server.master.rest.TavernaServerDirectoryREST;
import org.apache.taverna.server.master.utils.FilenameUtils;
import org.apache.taverna.server.master.utils.CallTimeLogger.PerfLogged;
import org.apache.taverna.server.master.utils.InvocationCounter.CallCounted;
/**
* RESTful access to the filesystem.
*
* @author Donal Fellows
*/
class DirectoryREST implements TavernaServerDirectoryREST, DirectoryBean {
private Log log = getLog("Taverna.Server.Webapp");
private TavernaServerSupport support;
private TavernaRun run;
private FilenameUtils fileUtils;
@Override
public void setSupport(TavernaServerSupport support) {
this.support = support;
}
@Override
@Required
public void setFileUtils(FilenameUtils fileUtils) {
this.fileUtils = fileUtils;
}
@Override
public DirectoryREST connect(TavernaRun run) {
this.run = run;
return this;
}
@Override
@CallCounted
@PerfLogged
@RolesAllowed({ USER, SELF })
public Response destroyDirectoryEntry(List<PathSegment> path)
throws NoUpdateException, FilesystemAccessException,
NoDirectoryEntryException {
support.permitUpdate(run);
fileUtils.getDirEntry(run, path).destroy();
return noContent().build();
}
@Override
@CallCounted
@PerfLogged
@RolesAllowed({ USER, SELF })
public DirectoryContents getDescription(UriInfo ui)
throws FilesystemAccessException {
return new DirectoryContents(ui, run.getWorkingDirectory()
.getContents());
}
@Override
@CallCounted
public Response options(List<PathSegment> path) {
return opt("PUT", "POST", "DELETE");
}
/*
* // Nasty! This can have several different responses...
*
* @Override @CallCounted private Response
* getDirectoryOrFileContents(List<PathSegment> path, UriInfo ui, Request
* req) throws FilesystemAccessException, NoDirectoryEntryException {
*
* DirectoryEntry de = fileUtils.getDirEntry(run, path);
*
* // How did the user want the result?
*
* List<Variant> variants = getVariants(de); Variant v =
* req.selectVariant(variants); if (v == null) return
* notAcceptable(variants).type(TEXT_PLAIN)
* .entity("Do not know what type of response to produce.") .build();
*
* // Produce the content to deliver up
*
* Object result; if
* (v.getMediaType().equals(APPLICATION_OCTET_STREAM_TYPE))
*
* // Only for files...
*
* result = de; else if (v.getMediaType().equals(APPLICATION_ZIP_TYPE))
*
* // Only for directories...
*
* result = ((Directory) de).getContentsAsZip(); else
*
* // Only for directories... // XML or JSON; let CXF pick what to do
*
* result = new DirectoryContents(ui, ((Directory) de).getContents());
* return ok(result).type(v.getMediaType()).build();
*
* }
*/
private boolean matchType(MediaType a, MediaType b) {
if (log.isDebugEnabled())
log.debug("comparing " + a.getType() + "/" + a.getSubtype()
+ " and " + b.getType() + "/" + b.getSubtype());
return (a.isWildcardType() || b.isWildcardType() || a.getType().equals(
b.getType()))
&& (a.isWildcardSubtype() || b.isWildcardSubtype() || a
.getSubtype().equals(b.getSubtype()));
}
/**
* What are we willing to serve up a directory or file as?
*
* @param de
* The reference to the object to serve.
* @return The variants we can serve it as.
* @throws FilesystemAccessException
* If we fail to read data necessary to detection of its media
* type.
*/
private List<Variant> getVariants(DirectoryEntry de)
throws FilesystemAccessException {
if (de instanceof Directory)
return DIRECTORY_VARIANTS;
else if (!(de instanceof File))
throw new FilesystemAccessException("not a directory or file!");
File f = (File) de;
List<Variant> variants = new ArrayList<>(INITIAL_FILE_VARIANTS);
String contentType = support.getEstimatedContentType(f);
if (!contentType.equals(APPLICATION_OCTET_STREAM)) {
String[] ct = contentType.split("/");
variants.add(0,
new Variant(new MediaType(ct[0], ct[1]), (String) null, null));
}
return variants;
}
/** How did the user want the result? */
private MediaType pickType(HttpHeaders headers, DirectoryEntry de)
throws FilesystemAccessException, NegotiationFailedException {
List<Variant> variants = getVariants(de);
// Manual content negotiation!!! Ugh!
for (MediaType mt : headers.getAcceptableMediaTypes())
for (Variant v : variants)
if (matchType(mt, v.getMediaType()))
return v.getMediaType();
throw new NegotiationFailedException(
"Do not know what type of response to produce.", variants);
}
@Override
@CallCounted
@PerfLogged
@RolesAllowed({ USER, SELF })
public Response getDirectoryOrFileContents(List<PathSegment> path,
UriInfo ui, HttpHeaders headers) throws FilesystemAccessException,
NoDirectoryEntryException, NegotiationFailedException {
DirectoryEntry de = fileUtils.getDirEntry(run, path);
// How did the user want the result?
MediaType wanted = pickType(headers, de);
log.info("producing content of type " + wanted);
// Produce the content to deliver up
Object result;
if (de instanceof File) {
// Only for files...
result = de;
List<String> range = headers.getRequestHeader("Range");
if (range != null && range.size() == 1)
return new FileSegment((File) de, range.get(0))
.toResponse(wanted);
} else {
// Only for directories...
Directory d = (Directory) de;
if (wanted.getType().equals(APPLICATION_ZIP_TYPE.getType())
&& wanted.getSubtype().equals(
APPLICATION_ZIP_TYPE.getSubtype()))
result = d.getContentsAsZip();
else
// XML or JSON; let CXF pick what to do
result = new DirectoryContents(ui, d.getContents());
}
return ok(result).type(wanted).build();
}
@Override
@CallCounted
@PerfLogged
@RolesAllowed({ USER, SELF })
public Response makeDirectoryOrUpdateFile(List<PathSegment> parent,
MakeOrUpdateDirEntry op, UriInfo ui) throws NoUpdateException,
FilesystemAccessException, NoDirectoryEntryException {
support.permitUpdate(run);
DirectoryEntry container = fileUtils.getDirEntry(run, parent);
if (!(container instanceof Directory))
throw new FilesystemAccessException("You may not "
+ ((op instanceof MakeDirectory) ? "make a subdirectory of"
: "place a file in") + " a file.");
if (op.name == null || op.name.length() == 0)
throw new FilesystemAccessException("missing name attribute");
Directory d = (Directory) container;
UriBuilder ub = secure(ui).path("{name}");
// Make a directory in the context directory
if (op instanceof MakeDirectory) {
Directory target = d.makeSubdirectory(support.getPrincipal(),
op.name);
return created(ub.build(target.getName())).build();
}
// Make or set the contents of a file
File f = null;
for (DirectoryEntry e : d.getContents()) {
if (e.getName().equals(op.name)) {
if (e instanceof Directory)
throw new FilesystemAccessException(
"You may not overwrite a directory with a file.");
f = (File) e;
break;
}
}
if (f == null) {
f = d.makeEmptyFile(support.getPrincipal(), op.name);
f.setContents(op.contents);
return created(ub.build(f.getName())).build();
}
f.setContents(op.contents);
return seeOther(ub.build(f.getName())).build();
}
private File getFileForWrite(List<PathSegment> filePath,
Holder<Boolean> isNew) throws FilesystemAccessException,
NoDirectoryEntryException, NoUpdateException {
support.permitUpdate(run);
if (filePath == null || filePath.size() == 0)
throw new FilesystemAccessException(
"Cannot create a file that is not in a directory.");
List<PathSegment> dirPath = new ArrayList<>(filePath);
String name = dirPath.remove(dirPath.size() - 1).getPath();
DirectoryEntry de = fileUtils.getDirEntry(run, dirPath);
if (!(de instanceof Directory)) {
throw new FilesystemAccessException(
"Cannot create a file that is not in a directory.");
}
Directory d = (Directory) de;
File f = null;
isNew.value = false;
for (DirectoryEntry e : d.getContents())
if (e.getName().equals(name)) {
if (e instanceof File) {
f = (File) e;
break;
}
throw new FilesystemAccessException(
"Cannot create a file that is not in a directory.");
}
if (f == null) {
f = d.makeEmptyFile(support.getPrincipal(), name);
isNew.value = true;
} else
f.setContents(new byte[0]);
return f;
}
@Override
@CallCounted
@PerfLogged
@RolesAllowed({ USER, SELF })
public Response setFileContents(List<PathSegment> filePath,
InputStream contents, UriInfo ui) throws NoDirectoryEntryException,
NoUpdateException, FilesystemAccessException {
Holder<Boolean> isNew = new Holder<>(true);
support.copyStreamToFile(contents, getFileForWrite(filePath, isNew));
if (isNew.value)
return created(ui.getAbsolutePath()).build();
else
return noContent().build();
}
@Override
@CallCounted
@PerfLogged
@RolesAllowed(USER)
public Response setFileContentsFromURL(List<PathSegment> filePath,
List<URI> referenceList, UriInfo ui)
throws NoDirectoryEntryException, NoUpdateException,
FilesystemAccessException {
support.permitUpdate(run);
if (referenceList.isEmpty() || referenceList.size() > 1)
return status(422).entity("URI list must have single URI in it")
.build();
URI uri = referenceList.get(0);
try {
uri.toURL();
} catch (MalformedURLException e) {
return status(422).entity("URI list must have value URL in it")
.build();
}
Holder<Boolean> isNew = new Holder<>(true);
File f = getFileForWrite(filePath, isNew);
try {
support.copyDataToFile(uri, f);
} catch (MalformedURLException ex) {
// Should not happen; called uri.toURL() successfully above
throw new NoUpdateException("failed to parse URI", ex);
} catch (IOException ex) {
throw new FilesystemAccessException(
"failed to transfer data from URI", ex);
}
if (isNew.value)
return created(ui.getAbsolutePath()).build();
else
return noContent().build();
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.mdm.api;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.CarbonConstants;
import org.wso2.carbon.base.MultitenantConstants;
import org.wso2.carbon.mdm.api.common.MDMAPIException;
import org.wso2.carbon.mdm.api.util.MDMAPIUtils;
import org.wso2.carbon.mdm.api.util.ResponsePayload;
import org.wso2.carbon.mdm.beans.RoleWrapper;
import org.wso2.carbon.mdm.util.SetReferenceTransformer;
import org.wso2.carbon.user.api.*;
import org.wso2.carbon.user.core.common.AbstractUserStoreManager;
import org.wso2.carbon.user.mgt.UserRealmProxy;
import org.wso2.carbon.user.mgt.common.UIPermissionNode;
import org.wso2.carbon.user.mgt.common.UserAdminException;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class Role {
private static Log log = LogFactory.getLog(Role.class);
/**
* Get user roles (except all internal roles) from system.
*
* @return A list of users
* @throws org.wso2.carbon.mdm.api.common.MDMAPIException
*/
@GET
@Produces ({MediaType.APPLICATION_JSON})
public Response getRoles() throws MDMAPIException {
UserStoreManager userStoreManager = MDMAPIUtils.getUserStoreManager();
String[] roles;
try {
if (log.isDebugEnabled()) {
log.debug("Getting the list of user roles");
}
roles = userStoreManager.getRoleNames();
} catch (UserStoreException e) {
String msg = "Error occurred while retrieving the list of user roles.";
log.error(msg, e);
throw new MDMAPIException(msg, e);
}
// removing all internal roles and roles created for Service-providers
List<String> filteredRoles = new ArrayList<String>();
for (String role : roles) {
if (!(role.startsWith("Internal/") || role.startsWith("Application/"))) {
filteredRoles.add(role);
}
}
ResponsePayload responsePayload = new ResponsePayload();
responsePayload.setStatusCode(HttpStatus.SC_OK);
responsePayload.setMessageFromServer("All user roles were successfully retrieved.");
responsePayload.setResponseContent(filteredRoles);
return Response.status(HttpStatus.SC_OK).entity(responsePayload).build();
}
/**
* Get user roles by user store(except all internal roles) from system.
*
* @return A list of users
* @throws org.wso2.carbon.mdm.api.common.MDMAPIException
*/
@GET
@Path ("{userStore}")
@Produces ({MediaType.APPLICATION_JSON})
public Response getRoles(@PathParam ("userStore") String userStore) throws MDMAPIException {
AbstractUserStoreManager abstractUserStoreManager = (AbstractUserStoreManager) MDMAPIUtils.getUserStoreManager();
String[] roles;
try {
if (log.isDebugEnabled()) {
log.debug("Getting the list of user roles");
}
roles = abstractUserStoreManager.getRoleNames(userStore+"/*", -1, false, true, true);
} catch (UserStoreException e) {
String msg = "Error occurred while retrieving the list of user roles.";
log.error(msg, e);
throw new MDMAPIException(msg, e);
}
// removing all internal roles and roles created for Service-providers
List<String> filteredRoles = new ArrayList<String>();
for (String role : roles) {
if (!(role.startsWith("Internal/") || role.startsWith("Application/"))) {
filteredRoles.add(role);
}
}
ResponsePayload responsePayload = new ResponsePayload();
responsePayload.setStatusCode(HttpStatus.SC_OK);
responsePayload.setMessageFromServer("All user roles were successfully retrieved.");
responsePayload.setResponseContent(filteredRoles);
return Response.status(HttpStatus.SC_OK).entity(responsePayload).build();
}
/**
* Get user roles by providing a filtering criteria(except all internal roles & system roles) from system.
*
* @return A list of users
* @throws org.wso2.carbon.mdm.api.common.MDMAPIException
*/
@GET
@Path ("search")
@Produces ({MediaType.APPLICATION_JSON})
public Response getMatchingRoles(@QueryParam ("filter") String filter) throws MDMAPIException {
AbstractUserStoreManager abstractUserStoreManager = (AbstractUserStoreManager) MDMAPIUtils.getUserStoreManager();
String[] roles;
try {
if (log.isDebugEnabled()) {
log.debug("Getting the list of user roles using filter : " + filter);
}
roles = abstractUserStoreManager.getRoleNames("*" + filter + "*", -1, true, true, true);
} catch (UserStoreException e) {
String msg = "Error occurred while retrieving the list of user roles using the filter : " + filter;
log.error(msg, e);
throw new MDMAPIException(msg, e);
}
// removing all internal roles and roles created for Service-providers
List<String> filteredRoles = new ArrayList<String>();
for (String role : roles) {
if (!(role.startsWith("Internal/") || role.startsWith("Application/"))) {
filteredRoles.add(role);
}
}
ResponsePayload responsePayload = new ResponsePayload();
responsePayload.setStatusCode(HttpStatus.SC_OK);
responsePayload.setMessageFromServer("All matching user roles were successfully retrieved.");
responsePayload.setResponseContent(filteredRoles);
return Response.status(HttpStatus.SC_OK).entity(responsePayload).build();
}
/**
* Get role permissions.
*
* @return list of permissions
* @throws MDMAPIException
*/
@GET
@Path ("permissions")
@Produces ({MediaType.APPLICATION_JSON})
public ResponsePayload getPermissions(@QueryParam ("rolename") String roleName) throws MDMAPIException {
final UserRealm userRealm = MDMAPIUtils.getUserRealm();
org.wso2.carbon.user.core.UserRealm userRealmCore = null;
final UIPermissionNode rolePermissions;
if (userRealm instanceof org.wso2.carbon.user.core.UserRealm) {
userRealmCore = (org.wso2.carbon.user.core.UserRealm) userRealm;
}
try {
final UserRealmProxy userRealmProxy = new UserRealmProxy(userRealmCore);
rolePermissions = userRealmProxy.getRolePermissions(roleName, MultitenantConstants.SUPER_TENANT_ID);
UIPermissionNode[] deviceMgtPermissions = new UIPermissionNode[2];
for (UIPermissionNode permissionNode : rolePermissions.getNodeList()) {
if (permissionNode.getResourcePath().equals("/permission/admin")) {
for (UIPermissionNode node : permissionNode.getNodeList()) {
if (node.getResourcePath().equals("/permission/admin/device-mgt")) {
deviceMgtPermissions[0] = node;
} else if (node.getResourcePath().equals("/permission/admin/login")) {
deviceMgtPermissions[1] = node;
}
}
}
}
rolePermissions.setNodeList(deviceMgtPermissions);
} catch (UserAdminException e) {
String msg = "Error occurred while retrieving the user role";
log.error(msg, e);
throw new MDMAPIException(msg, e);
}
ResponsePayload responsePayload = new ResponsePayload();
responsePayload.setStatusCode(HttpStatus.SC_OK);
responsePayload.setMessageFromServer("All permissions retrieved");
responsePayload.setResponseContent(rolePermissions);
return responsePayload;
}
/**
* Get user role of the system
*
* @return user role
* @throws org.wso2.carbon.mdm.api.common.MDMAPIException
*/
@GET
@Path("role")
@Produces ({MediaType.APPLICATION_JSON})
public ResponsePayload getRole(@QueryParam ("rolename") String roleName) throws MDMAPIException {
final UserStoreManager userStoreManager = MDMAPIUtils.getUserStoreManager();
final UserRealm userRealm = MDMAPIUtils.getUserRealm();
org.wso2.carbon.user.core.UserRealm userRealmCore = null;
if (userRealm instanceof org.wso2.carbon.user.core.UserRealm) {
userRealmCore = (org.wso2.carbon.user.core.UserRealm) userRealm;
}
RoleWrapper roleWrapper = new RoleWrapper();
try {
final UserRealmProxy userRealmProxy = new UserRealmProxy(userRealmCore);
if (log.isDebugEnabled()) {
log.debug("Getting the list of user roles");
}
if (userStoreManager.isExistingRole(roleName)) {
roleWrapper.setRoleName(roleName);
roleWrapper.setUsers(userStoreManager.getUserListOfRole(roleName));
// Get the permission nodes and hand picking only device management and login perms
final UIPermissionNode rolePermissions =
userRealmProxy.getRolePermissions(roleName, MultitenantConstants.SUPER_TENANT_ID);
UIPermissionNode[] deviceMgtPermissions = new UIPermissionNode[2];
for (UIPermissionNode permissionNode : rolePermissions.getNodeList()) {
if (permissionNode.getResourcePath().equals("/permission/admin")) {
for (UIPermissionNode node : permissionNode.getNodeList()) {
if (node.getResourcePath().equals("/permission/admin/device-mgt")) {
deviceMgtPermissions[0] = node;
} else if (node.getResourcePath().equals("/permission/admin/login")) {
deviceMgtPermissions[1] = node;
}
}
}
}
rolePermissions.setNodeList(deviceMgtPermissions);
ArrayList<String> permList = new ArrayList<String>();
iteratePermissions(rolePermissions, permList);
roleWrapper.setPermissionList(rolePermissions);
String[] permListAr = new String[permList.size()];
roleWrapper.setPermissions(permList.toArray(permListAr));
}
} catch (UserStoreException e) {
String msg = "Error occurred while retrieving the user role";
log.error(msg, e);
throw new MDMAPIException(msg, e);
} catch (UserAdminException e) {
String msg = "Error occurred while retrieving the user role";
log.error(msg, e);
throw new MDMAPIException(msg, e);
}
ResponsePayload responsePayload = new ResponsePayload();
responsePayload.setStatusCode(HttpStatus.SC_OK);
responsePayload.setMessageFromServer("All user roles were successfully retrieved.");
responsePayload.setResponseContent(roleWrapper);
return responsePayload;
}
/**
* API is used to persist a new Role
*
* @param roleWrapper
* @return
* @throws MDMAPIException
*/
@POST
@Produces ({MediaType.APPLICATION_JSON})
public Response addRole(RoleWrapper roleWrapper) throws MDMAPIException {
UserStoreManager userStoreManager = MDMAPIUtils.getUserStoreManager();
try {
if (log.isDebugEnabled()) {
log.debug("Persisting the role to user store");
}
Permission[] permissions = null;
if (roleWrapper.getPermissions() != null && roleWrapper.getPermissions().length > 0) {
permissions = new Permission[roleWrapper.getPermissions().length];
for (int i = 0; i < permissions.length; i++) {
String permission = roleWrapper.getPermissions()[i];
permissions[i] = new Permission(permission, CarbonConstants.UI_PERMISSION_ACTION);
}
}
userStoreManager.addRole(roleWrapper.getRoleName(), roleWrapper.getUsers(), permissions);
} catch (UserStoreException e) {
String msg = e.getMessage();
log.error(msg, e);
throw new MDMAPIException(msg, e);
}
return Response.status(HttpStatus.SC_CREATED).build();
}
/**
* API is used to update a role Role
*
* @param roleWrapper
* @return
* @throws MDMAPIException
*/
@PUT
@Produces ({MediaType.APPLICATION_JSON})
public Response updateRole(@QueryParam ("rolename") String roleName, RoleWrapper roleWrapper) throws
MDMAPIException {
final UserStoreManager userStoreManager = MDMAPIUtils.getUserStoreManager();
final AuthorizationManager authorizationManager = MDMAPIUtils.getAuthorizationManager();
String newRoleName = roleWrapper.getRoleName();
try {
if (log.isDebugEnabled()) {
log.debug("Updating the role to user store");
}
if (newRoleName != null && !roleName.equals(newRoleName)) {
userStoreManager.updateRoleName(roleName, newRoleName);
}
if (roleWrapper.getUsers() != null) {
SetReferenceTransformer transformer = new SetReferenceTransformer();
transformer.transform(Arrays.asList(userStoreManager.getUserListOfRole(newRoleName)),
Arrays.asList(roleWrapper.getUsers()));
final String[] usersToAdd = (String[])
transformer.getObjectsToAdd().toArray(new String[transformer.getObjectsToAdd().size()]);
final String[] usersToDelete = (String[])
transformer.getObjectsToRemove().toArray(new String[transformer.getObjectsToRemove().size()]);
userStoreManager.updateUserListOfRole(newRoleName, usersToDelete, usersToAdd);
}
if (roleWrapper.getPermissions() != null) {
// Delete all authorizations for the current role before authorizing the permission tree
authorizationManager.clearRoleAuthorization(roleName);
if (roleWrapper.getPermissions().length > 0) {
for (int i = 0; i < roleWrapper.getPermissions().length; i++) {
String permission = roleWrapper.getPermissions()[i];
authorizationManager.authorizeRole(roleName, permission, CarbonConstants.UI_PERMISSION_ACTION);
}
}
}
} catch (UserStoreException e) {
String msg = e.getMessage();
log.error(msg, e);
throw new MDMAPIException(msg, e);
}
return Response.status(HttpStatus.SC_OK).build();
}
/**
* API is used to delete a role and authorizations
*
* @param roleName
* @return
* @throws MDMAPIException
*/
@DELETE
@Produces ({MediaType.APPLICATION_JSON})
public Response deleteRole(@QueryParam ("rolename") String roleName) throws MDMAPIException {
final UserStoreManager userStoreManager = MDMAPIUtils.getUserStoreManager();
final AuthorizationManager authorizationManager = MDMAPIUtils.getAuthorizationManager();
try {
if (log.isDebugEnabled()) {
log.debug("Deleting the role in user store");
}
userStoreManager.deleteRole(roleName);
// Delete all authorizations for the current role before deleting
authorizationManager.clearRoleAuthorization(roleName);
} catch (UserStoreException e) {
String msg = "Error occurred while deleting the role: " + roleName;
log.error(msg, e);
throw new MDMAPIException(msg, e);
}
return Response.status(HttpStatus.SC_OK).build();
}
/**
* API is used to update users of a role
*
* @param roleName
* @param userList
* @return
* @throws MDMAPIException
*/
@PUT
@Path ("users")
@Produces ({MediaType.APPLICATION_JSON})
public Response updateUsers(@QueryParam ("rolename") String roleName, List<String> userList)
throws MDMAPIException {
final UserStoreManager userStoreManager = MDMAPIUtils.getUserStoreManager();
try {
if (log.isDebugEnabled()) {
log.debug("Updating the users of a role");
}
SetReferenceTransformer transformer = new SetReferenceTransformer();
transformer.transform(Arrays.asList(userStoreManager.getUserListOfRole(roleName)),
userList);
final String[] usersToAdd = (String[])
transformer.getObjectsToAdd().toArray(new String[transformer.getObjectsToAdd().size()]);
final String[] usersToDelete = (String[])
transformer.getObjectsToRemove().toArray(new String[transformer.getObjectsToRemove().size()]);
userStoreManager.updateUserListOfRole(roleName, usersToDelete, usersToAdd);
} catch (UserStoreException e) {
String msg = "Error occurred while saving the users of the role: " + roleName;
log.error(msg, e);
throw new MDMAPIException(e.getMessage(), e);
}
return Response.status(HttpStatus.SC_OK).build();
}
public ArrayList<String> iteratePermissions(UIPermissionNode uiPermissionNode, ArrayList<String> list) {
for (UIPermissionNode permissionNode : uiPermissionNode.getNodeList()) {
list.add(permissionNode.getResourcePath());
if (permissionNode.getNodeList() != null && permissionNode.getNodeList().length > 0) {
iteratePermissions(permissionNode, list);
}
}
return list;
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dashbuilder.client.navigation.widget;
import java.util.ArrayList;
import java.util.List;
import javax.enterprise.event.Observes;
import javax.inject.Inject;
import com.google.gwt.user.client.ui.Widget;
import org.dashbuilder.client.navigation.NavigationManager;
import org.dashbuilder.client.navigation.event.NavTreeChangedEvent;
import org.dashbuilder.navigation.NavDivider;
import org.dashbuilder.navigation.NavGroup;
import org.dashbuilder.navigation.NavItem;
import org.dashbuilder.navigation.NavTree;
import org.uberfire.ext.security.management.client.widgets.management.events.SaveGroupEvent;
import org.uberfire.ext.security.management.client.widgets.management.events.SaveRoleEvent;
import org.uberfire.mvp.Command;
public abstract class BaseNavWidget implements NavWidget {
NavigationManager navigationManager;
boolean secure = true;
boolean hideEmptyGroups = true;
Command onItemSelectedCommand;
Command onStaleCommand;
NavItem itemSelected;
NavGroup navGroup;
NavWidget parent;
NavWidgetView view;
int maxLevels = -1;
List<NavItem> navItemList = null;
NavWidget activeNavSubgroup = null;
List<NavWidget> navSubgroupList = new ArrayList<>();
@Inject
public BaseNavWidget(NavWidgetView view, NavigationManager navigationManager) {
this.view = view;
this.navigationManager = navigationManager;
view.init(this);
}
public NavigationManager getNavigationManager() {
return navigationManager;
}
@Override
public Widget asWidget() {
return view.asWidget();
}
@Override
public void setSecure(boolean secure) {
this.secure = secure;
}
@Override
public void setHideEmptyGroups(boolean hide) {
this.hideEmptyGroups = hide;
}
@Override
public void setOnItemSelectedCommand(Command onItemSelected) {
this.onItemSelectedCommand = onItemSelected;
}
@Override
public void setOnStaleCommand(Command onStaleCommand) {
this.onStaleCommand = onStaleCommand;
}
public NavItem getItem(String id) {
if (navItemList == null || id == null) {
return null;
}
for (NavItem navItem : navItemList) {
if (id.equals(navItem.getId())) {
return navItem;
}
}
return null;
}
@Override
public int getLevel() {
int level = 0;
NavWidget root = parent;
while (root != null) {
level++;
root = root.getParent();
}
return level;
}
@Override
public NavWidget getParent() {
return parent;
}
@Override
public void setParent(NavWidget parent) {
this.parent = parent;
}
@Override
public int getMaxLevels() {
return maxLevels;
}
@Override
public void setMaxLevels(int maxLevels) {
this.maxLevels = maxLevels;
}
@Override
public NavGroup getNavGroup() {
return navGroup;
}
public boolean areSubGroupsSupported() {
return maxLevels < 1 || getLevel() < maxLevels-1;
}
protected NavWidget getSubgroupNavWidget(String groupId) {
for (NavWidget navWidget : navSubgroupList) {
if (navWidget.getNavGroup().getId().equals(groupId)) {
return navWidget;
}
}
return null;
}
protected NavWidget lookupNavGroupWidget() {
return null;
}
@Override
public void hide() {
view.clearItems();
navSubgroupList.forEach(NavWidget::hide);
}
@Override
public void show(NavGroup navGroup) {
this.navGroup = navGroup;
if (navGroup == null) {
view.errorNavGroupNotFound();
} else {
this.show(navGroup.getChildren());
}
}
@Override
public void show(NavTree navTree) {
if (navTree == null) {
view.errorNavItemsEmpty();
} else {
this.show(navTree.getRootItems());
}
}
@Override
public void show(List<NavItem> itemList) {
this.hide();
this.navItemList = itemList;
this.navSubgroupList.clear();
// Make sure the items shown comply with the authz policy defined
if (secure) {
navItemList = new ArrayList<>(itemList);
navigationManager.secure(navItemList, hideEmptyGroups);
}
if (navItemList.isEmpty()) {
view.errorNavItemsEmpty();
}
for (NavItem navChild : navItemList) {
// A subgroup
if (navChild instanceof NavGroup) {
// Ensure to not exceed the maximum number of levels
if (areSubGroupsSupported()) {
showGroup((NavGroup) navChild);
} else {
showItem(navChild);
}
}
// A divider
else if (navChild instanceof NavDivider) {
view.addDivider();
}
// A regular item
else {
showItem(navChild);
}
}
}
protected void showGroup(NavGroup navGroup) {
NavWidget subGroupNavWidget = lookupNavGroupWidget();
if (subGroupNavWidget != null) {
subGroupNavWidget.setParent(this);
subGroupNavWidget.setMaxLevels(maxLevels > 0 ? maxLevels - 1 : -1);
subGroupNavWidget.setSecure(secure);
subGroupNavWidget.setHideEmptyGroups(hideEmptyGroups);
subGroupNavWidget.setOnItemSelectedCommand(() -> onSubGroupItemClicked(subGroupNavWidget));
subGroupNavWidget.show(navGroup);
navSubgroupList.add(subGroupNavWidget);
view.addGroupItem(navGroup.getId(), navGroup.getName(), navGroup.getDescription(), subGroupNavWidget);
}
}
protected void showItem(NavItem navItem) {
view.addItem(navItem.getId(), navItem.getName(), navItem.getDescription(), () -> {
onItemClicked(navItem);
});
}
@Override
public NavItem getItemSelected() {
return itemSelected;
}
@Override
public boolean setSelectedItem(String id) {
clearSelectedItem();
NavItem navItem = getItem(id);
if (navItem != null) {
itemSelected = navItem;
view.setSelectedItem(navItem.getId());
return true;
}
for (NavWidget navWidget : navSubgroupList) {
if (navWidget.setSelectedItem(id)) {
itemSelected = navWidget.getItemSelected();
activeNavSubgroup = navWidget;
return true;
}
}
return false;
}
@Override
public void clearSelectedItem() {
itemSelected = null;
view.clearSelectedItem();
if (activeNavSubgroup != null) {
activeNavSubgroup.clearSelectedItem();
activeNavSubgroup = null;
}
}
public void onSubGroupItemClicked(NavWidget subGroup) {
if (activeNavSubgroup != null && activeNavSubgroup != subGroup) {
activeNavSubgroup.clearSelectedItem();
}
activeNavSubgroup = subGroup;
view.clearSelectedItem();
view.setSelectedItem(subGroup.getNavGroup().getId());
itemSelected = subGroup.getItemSelected();
if (onItemSelectedCommand != null) {
onItemSelectedCommand.execute();
}
}
public void onItemClicked(NavItem navItem) {
clearSelectedItem();
itemSelected = navItem;
view.setSelectedItem(navItem.getId());
navigationManager.navItemClicked(navItem);
if (onItemSelectedCommand != null) {
onItemSelectedCommand.execute();
}
}
protected void refresh() {
if (onStaleCommand != null) {
onStaleCommand.execute();
}
}
@Override
public void dispose() {
view.clearItems();
navSubgroupList.forEach(NavWidget::dispose);
}
// Listen to changes in the navigation tree
public void onNavTreeChanged(@Observes final NavTreeChangedEvent event) {
refresh();
}
// Listen to authorization policy changes as it might impact the menu items shown
public void onAuthzPolicyChanged(@Observes final SaveRoleEvent event) {
if (secure) {
refresh();
}
}
public void onAuthzPolicyChanged(@Observes final SaveGroupEvent event) {
if (secure) {
refresh();
}
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.jdbc;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.sql.Blob;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.Arrays;
import java.util.concurrent.CyclicBarrier;
import java.util.zip.CRC32;
import com.gemstone.gemfire.internal.cache.PartitionedRegion;
import junit.framework.TestSuite;
import junit.textui.TestRunner;
import org.apache.derbyTesting.functionTests.tests.jdbc4.BlobClobTestSetup;
import org.apache.derbyTesting.functionTests.tests.jdbc4.ClobTest;
import org.apache.derbyTesting.functionTests.util.streams.CharAlphabet;
import org.apache.derbyTesting.functionTests.util.streams.LoopingAlphabetReader;
import org.apache.derbyTesting.functionTests.util.streams.LoopingAlphabetStream;
public class BlobTest extends JdbcTestBase {
private static final byte[] BYTES1 = { 0x65, 0x66, 0x67, 0x68, 0x69, 0x69,
0x68, 0x67, 0x66, 0x65 };
private static final byte[] BYTES2 = { 0x69, 0x68, 0x67, 0x66, 0x65, 0x65,
0x66, 0x67, 0x68, 0x69 };
/** Default row identifier used by the tests. */
private final int key = -1;
public static void main(String[] args) {
TestRunner.run(new TestSuite(BlobTest.class));
}
public BlobTest(String name) {
super(name);
}
@Override
protected String reduceLogging() {
return "config";
}
public void testBlobAsString() throws SQLException {
setupConnection();
Statement stmt = getStatement();
stmt.executeUpdate("create table b (blob blob(3K))" + getSuffix());
stmt.executeUpdate("insert into b values(cast(X'0031' as blob(3K)))");
ResultSet rs = stmt.executeQuery("select blob from b");
assertEquals(true, rs.next());
assertEquals("0031", rs.getString(1));
rs.close();
stmt.close();
}
private static final int JOBDATA_LEN = 500000;
/** Test for projection on LOB/non-LOB columns */
public void testBug43623() throws Exception {
setupConnection();
final int netPort = startNetserverAndReturnPort();
Connection conn = getNetConnection(netPort, "", null);
final String tableDDL = "create table QUARTZ_TRIGGERS (TRIGGER_NAME "
+ "varchar(80) not null, TRIGGER_GROUP varchar(80) not null, "
+ "JOB_NAME varchar(80) not null, JOB_GROUP varchar(80) not null, "
+ "IS_VOLATILE smallint, DESCRIPTION varchar(120), "
+ "NEXT_FIRE_TIME bigint, PREV_FIRE_TIME bigint, PRIORITY integer, "
+ "TRIGGER_STATE varchar(16) not null, TRIGGER_TYPE varchar(8), "
+ "START_TIME bigint, END_TIME bigint, CALENDAR_NAME varchar(80), "
+ "MISFIRE_INSTR integer, JOB_DATA blob, "
+ "primary key(TRIGGER_NAME, TRIGGER_GROUP))"+ getSuffix();
final Statement stmt = conn.createStatement();
stmt.execute(tableDDL);
final int numThreads = 50;
Thread[] ts = new Thread[numThreads];
final Exception[] failure = new Exception[1];
final CyclicBarrier barrier = new CyclicBarrier(numThreads);
for (int i = 0; i < numThreads; i++) {
ts[i] = new Thread(new Runnable() {
@Override
public void run() {
try {
final Connection c = getNetConnection(netPort, "", null);
barrier.await();
// perform some inserts into the table
final byte[] jobData = new byte[JOBDATA_LEN];
PartitionedRegion.rand.nextBytes(jobData);
final String group = Thread.currentThread().getName();
try {
for (int i = 1; i <= 200; i++) {
insertData_43623_2(c, jobData, group);
selectData_43623_2(c, group);
deleteData_43623_2(c, group);
if ((i % 40) == 0) {
Statement s = c.createStatement();
ResultSet rs = s
.executeQuery("SELECT COUNT(*) FROM QUARTZ_TRIGGERS");
rs.next();
System.out.println(group + ": completed " + i
+ " ops with size=" + rs.getInt(1));
rs.close();
s.close();
}
}
} finally {
c.close();
}
} catch (Exception e) {
e.printStackTrace();
failure[0] = e;
}
}
});
}
for (int i = 0; i < numThreads; i++) {
ts[i].start();
}
for (int i = 0; i < numThreads; i++) {
ts[i].join();
}
if (failure[0] != null) {
throw failure[0];
}
stmt.execute("delete from QUARTZ_TRIGGERS where 1=1");
conn.commit();
// perform some inserts into the table
final byte[] jobData = new byte[10000000];
insertData_43623(conn, jobData);
// now fire some selects (get convertible, non-get convertible)
checkQueries_43623(conn, jobData);
// now do the same for partitioned table on pk
stmt.execute("drop table QUARTZ_TRIGGERS");
this.waitTillAllClear();
stmt.execute(tableDDL + " partition by primary key");
insertData_43623(conn, jobData);
// now fire some selects (get convertible, non-get convertible)
checkQueries_43623(conn, jobData);
// now do the same for partitioned table on non-pk
stmt.execute("drop table QUARTZ_TRIGGERS");
this.waitTillAllClear();
stmt.execute(tableDDL + " partition by column (JOB_NAME)");
insertData_43623(conn, jobData);
// now fire some selects (get convertible, non-get convertible)
checkQueries_43623(conn, jobData);
// now the same as above in a transaction
stmt.execute("drop table QUARTZ_TRIGGERS");
this.waitTillAllClear();
stmt.execute(tableDDL);
insertData_43623(conn, jobData);
// now fire some selects (get convertible, non-get convertible)
// checking for transactional data
checkQueries_43623(conn, jobData);
conn.commit();
// now fire some selects (get convertible, non-get convertible)
// after commit
checkQueries_43623(conn, jobData);
conn.commit();
stmt.execute("drop table QUARTZ_TRIGGERS");
this.waitTillAllClear();
stmt.execute(tableDDL + " partition by primary key");
insertData_43623(conn, jobData);
// now fire some selects (get convertible, non-get convertible)
// checking for transactional data
checkQueries_43623(conn, jobData);
conn.commit();
// now fire some selects (get convertible, non-get convertible)
// after commit
checkQueries_43623(conn, jobData);
conn.commit();
// now do the same for partitioned table on non-pk
stmt.execute("drop table QUARTZ_TRIGGERS");
this.waitTillAllClear();
stmt.execute(tableDDL + " partition by column (JOB_NAME)");
insertData_43623(conn, jobData);
// now fire some selects (get convertible, non-get convertible)
// checking for transactional data
checkQueries_43623(conn, jobData);
conn.commit();
// now fire some selects (get convertible, non-get convertible)
// after commit
checkQueries_43623(conn, jobData);
conn.commit();
}
/** Test for bug #42711 from derby's ResultSetTest. */
public void testUpdateBinaryStream() throws Exception {
setupConnection();
final Statement stmt = getStatement();
stmt.execute("create table UpdateTestTableResultSet ("
+ "sno int not null unique," + "dBlob BLOB," + "dClob CLOB,"
+ "dLongVarchar LONG VARCHAR," + "dLongBit LONG VARCHAR FOR BIT DATA)"+ getSuffix());
// Byte array in which the returned bytes from
// the Database after the update are stored. This
// array is then checked to determine if it
// has the same elements of the Byte array used for
// the update operation
byte[] bytes_ret = new byte[10];
// Input Stream inserted initially
InputStream is = new java.io.ByteArrayInputStream(BYTES1);
// InputStream that is used for update
InputStream is_for_update = new java.io.ByteArrayInputStream(BYTES2);
// Prepared Statement used to insert the data
PreparedStatement ps_sb = prep("dLongBit");
ps_sb.setInt(1, key);
ps_sb.setBinaryStream(2, is, BYTES1.length);
ps_sb.executeUpdate();
ps_sb.close();
// Update operation
// use a different ResultSet variable so that the
// other tests can go on unimpacted
ResultSet rs1 = fetchUpd("dLongBit", key);
rs1.next();
rs1.updateBinaryStream(1, is_for_update, BYTES2.length);
rs1.updateRow();
rs1.close();
// Query to see whether the data that has been updated
// using the updateBinaryStream method is the same
// data that we expected
rs1 = fetch("dLongBit", key);
rs1.next();
InputStream is_ret = rs1.getBinaryStream(1);
is_ret.read(bytes_ret);
is_ret.close();
for (int i = 0; i < BYTES2.length; i++) {
assertEquals("Error in updateBinaryStream", BYTES2[i], bytes_ret[i]);
}
rs1.close();
stmt.execute("drop table UpdateTestTableResultSet");
this.waitTillAllClear();
stmt.close();
}
/**
* Adapted from Derby's BlobClob4BlobTest#testGetBinaryStream().
*/
public void testGetBinaryStream() throws Exception {
setupConnection();
createBlobClobTables();
insertDefaultData();
Statement stmt = getStatement();
ResultSet rs = stmt.executeQuery("select a, b, crc32 from testBlob");
checkBlobContents(rs);
stmt.close();
}
/**
* Adapted from Derby's BlobClobTestSetup#testFreeandMethodsAfterCallingFree
*/
public void testFreeandMethodsAfterCallingFree() throws Exception {
setupConnection();
createBlobClobTable();
new ClobTest("tmp").testFreeandMethodsAfterCallingFree();
}
/**
* Prepare commonly used statement to insert a row.
*
* @param colName
* name of the column to insert into
*/
private PreparedStatement prep(String colName) throws SQLException {
return getPreparedStatement("insert into UpdateTestTableResultSet "
+ "(sno, " + colName + ") values (?,?)");
}
/**
* Fetch the specified row for update.
*
* @param colName
* name of the column to fetch
* @param key
* identifier for row to fetch
* @return a <code>ResultSet</code> with zero or one row, depending on the key
* used
*/
private ResultSet fetchUpd(String colName, int key) throws SQLException {
Statement stmt = jdbcConn.createStatement(ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_UPDATABLE);
return stmt.executeQuery("select " + colName
+ " from UpdateTestTableResultSet where sno = " + key + " for update");
}
/**
* Fetch the specified row.
*
* @param colName
* name of the column to fetch
* @param key
* identifier for row to fetch
* @return a <code>ResultSet</code> with zero or one row, depending on the key
* used
*/
private ResultSet fetch(String colName, int key) throws SQLException {
Statement stmt = getStatement();
return stmt.executeQuery("select " + colName
+ " from UpdateTestTableResultSet where sno = " + key);
}
private void createBlobClobTables() throws SQLException {
Statement stmt = getStatement();
stmt.executeUpdate("CREATE TABLE testClob (b INT, c INT)"+ getSuffix());
stmt.executeUpdate("ALTER TABLE testClob ADD COLUMN a CLOB(300K)");
//stmt.executeUpdate("CREATE TABLE testClob (a CLOB(300K), b INT, c INT)");
stmt.executeUpdate("CREATE TABLE testBlob (b INT)"+ getSuffix());
stmt.executeUpdate("ALTER TABLE testBlob ADD COLUMN a blob(300k)");
stmt.executeUpdate("ALTER TABLE testBlob ADD COLUMN crc32 BIGINT");
//stmt.executeUpdate("CREATE TABLE testBlob (a blob(300k), b INT, crc32 BIGINT)");
stmt.close();
}
private void createBlobClobTable() throws SQLException {
Statement stmt = getStatement();
stmt.execute("create table BLOBCLOB (ID int primary key, "
+ "BLOBDATA blob," + "CLOBDATA clob)"+ getSuffix());
stmt.execute("insert into BLOBCLOB VALUES " + "("
+ BlobClobTestSetup.ID_NULLVALUES + ", null, null)");
// Actual data is inserted in the getSample* methods.
stmt.execute("insert into BLOBCLOB VALUES " + "("
+ BlobClobTestSetup.ID_SAMPLEVALUES + ", null, null)");
stmt.close();
}
private void insertDefaultData() throws Exception {
PreparedStatement ps = getPreparedStatement("INSERT INTO testClob "
+ "(a, b, c) VALUES (?, ?, ?)");
String clobValue = "";
ps.setString(1, clobValue);
ps.setInt(2, clobValue.length());
ps.setLong(3, 0);
ps.addBatch();
clobValue = "you can lead a horse to water but you can't form it "
+ "into beverage";
ps.setString(1, clobValue);
ps.setInt(2, clobValue.length());
ps.setLong(3, 0);
ps.addBatch();
clobValue = "a stitch in time says ouch";
ps.setString(1, clobValue);
ps.setInt(2, clobValue.length());
ps.setLong(3, 0);
ps.addBatch();
clobValue = "here is a string with a return \n character";
ps.setString(1, clobValue);
ps.setInt(2, clobValue.length());
ps.setLong(3, 0);
ps.addBatch();
ps.executeBatch();
ps.clearBatch();
insertLoopingAlphabetStreamData(ps, CharAlphabet.modernLatinLowercase(), 0);
insertLoopingAlphabetStreamData(ps, CharAlphabet.modernLatinLowercase(), 56);
insertLoopingAlphabetStreamData(ps, CharAlphabet.modernLatinLowercase(),
5000);
insertLoopingAlphabetStreamData(ps, CharAlphabet.modernLatinLowercase(),
10000);
insertLoopingAlphabetStreamData(ps, CharAlphabet.modernLatinLowercase(),
300000);
ps.setNull(1, Types.CLOB);
ps.setInt(2, 0);
ps.setLong(3, 0);
ps.executeUpdate();
ps.close();
ps = getPreparedStatement("INSERT INTO testBlob (a, b, crc32) "
+ "VALUES (?, ?, ?)");
byte[] blobValue = "".getBytes("US-ASCII");
ps.setBytes(1, blobValue);
ps.setInt(2, blobValue.length);
ps.setLong(3, getStreamCheckSum(new ByteArrayInputStream(blobValue)));
ps.addBatch();
blobValue = ("you can lead a horse to water but you can't form it "
+ "into beverage").getBytes("US-ASCII");
ps.setBytes(1, blobValue);
ps.setInt(2, blobValue.length);
ps.setLong(3, getStreamCheckSum(new ByteArrayInputStream(blobValue)));
ps.addBatch();
blobValue = "a stitch in time says ouch".getBytes("US-ASCII");
ps.setBytes(1, blobValue);
ps.setInt(2, blobValue.length);
ps.setLong(3, getStreamCheckSum(new ByteArrayInputStream(blobValue)));
ps.addBatch();
blobValue = "here is a string with a return \n character"
.getBytes("US-ASCII");
ps.setBytes(1, blobValue);
ps.setInt(2, blobValue.length);
ps.setLong(3, getStreamCheckSum(new ByteArrayInputStream(blobValue)));
ps.addBatch();
ps.executeBatch();
ps.clearBatch();
insertLoopingAlphabetStreamData(ps, 0);
insertLoopingAlphabetStreamData(ps, 56);
insertLoopingAlphabetStreamData(ps, 5000);
insertLoopingAlphabetStreamData(ps, 10000);
insertLoopingAlphabetStreamData(ps, 300000);
ps.setNull(1, Types.BLOB);
ps.setInt(2, 0);
ps.setNull(3, Types.BIGINT);
ps.executeUpdate();
ps.close();
}
/**
* Test the contents of the testBlob table or ResultSet with identical shape.
*/
public void checkBlobContents(ResultSet rs) throws Exception {
int nullCount = 0;
int rowCount = 0;
byte[] buff = new byte[128];
// fetch row back, get the long varbinary column as a blob.
Blob blob;
int blobLength = 0, i = 0;
while (rs.next()) {
i++;
// get the first column as a clob
blob = rs.getBlob(1);
long crc32 = rs.getLong(3);
boolean crc2Null = rs.wasNull();
if (blob == null) {
assertTrue("FAIL - NULL BLOB but non-NULL checksum", crc2Null);
nullCount++;
}
else {
rowCount++;
long blobcrc32 = getStreamCheckSum(blob.getBinaryStream());
assertEquals("FAIL - mismatched checksums for blob with " + "length "
+ blob.length(), blobcrc32, crc32);
InputStream fin = blob.getBinaryStream();
int columnSize = 0;
for (;;) {
int size = fin.read(buff);
if (size == -1)
break;
columnSize += size;
}
blobLength = rs.getInt(2);
assertEquals("FAIL - wrong column size", blobLength, columnSize);
assertEquals("FAIL - wrong column length", blobLength, blob.length());
}
}
assertEquals("FAIL - wrong not null row count null:" + nullCount, 9,
rowCount);
assertEquals("FAIL - wrong null blob count", 1, nullCount);
}
private void insertLoopingAlphabetStreamData(PreparedStatement ps,
int lobLength) throws Exception {
ps.setBinaryStream(1, new LoopingAlphabetStream(lobLength), lobLength);
ps.setInt(2, lobLength);
ps.setLong(3, getStreamCheckSum(new LoopingAlphabetStream(lobLength)));
ps.executeUpdate();
}
private void insertLoopingAlphabetStreamData(PreparedStatement ps,
CharAlphabet alphabet, int lobLength) throws Exception {
ps.setCharacterStream(1, new LoopingAlphabetReader(lobLength, alphabet),
lobLength);
ps.setInt(2, lobLength);
ps.setLong(3, -1);
ps.executeUpdate();
}
/**
* Get the checksum of a stream, reading its contents entirely and closing it.
*/
private long getStreamCheckSum(InputStream in) throws Exception {
CRC32 sum = new CRC32();
byte[] buf = new byte[32 * 1024];
for (;;) {
int read = in.read(buf);
if (read == -1) {
break;
}
sum.update(buf, 0, read);
}
in.close();
return sum.getValue();
}
public static void insertData_43623(final Connection conn,
final byte[] jobData) throws SQLException {
PreparedStatement pstmt = conn.prepareStatement("insert into "
+ "QUARTZ_TRIGGERS(TRIGGER_NAME, TRIGGER_GROUP, TRIGGER_STATE, "
+ "JOB_NAME, JOB_GROUP, JOB_DATA) values(?, ?, ?, ?, ?, ?)");
PartitionedRegion.rand.nextBytes(jobData);
final int numRows = 10;
for (int cnt = 1; cnt <= numRows; cnt++) {
pstmt.setString(1, "trig" + cnt);
pstmt.setString(2, "grp" + cnt);
pstmt.setString(3, "st" + cnt);
pstmt.setString(4, "job" + cnt);
pstmt.setString(5, "jgrp" + cnt);
pstmt.setBytes(6, jobData);
pstmt.execute();
}
}
public static void insertData_43623_2(final Connection conn,
final byte[] jobData, final String group) throws SQLException {
PreparedStatement pstmt = conn.prepareStatement("insert into "
+ "QUARTZ_TRIGGERS (TRIGGER_NAME, TRIGGER_GROUP, TRIGGER_STATE, "
+ "JOB_NAME, JOB_GROUP, JOB_DATA) values(?, ?, ?, ?, ?, ?)");
final int numRows = 10;
for (int cnt = 1; cnt <= numRows; cnt++) {
pstmt.setString(1, "trig" + cnt);
pstmt.setString(2, "grp" + '_' + group + '_' + cnt);
pstmt.setString(3, "st" + cnt);
pstmt.setString(4, "job" + cnt);
pstmt.setString(5, "jgrp" + cnt);
pstmt.setBytes(6, jobData);
pstmt.execute();
}
}
public static void selectData_43623_2(final Connection conn,
final String group) throws SQLException {
PreparedStatement pstmt = conn
.prepareStatement("select TRIGGER_STATE, JOB_DATA from "
+ "QUARTZ_TRIGGERS where TRIGGER_NAME = ? AND TRIGGER_GROUP = ?");
final int numRows = 10;
for (int cnt = 1; cnt <= numRows; cnt++) {
pstmt.setString(1, "trig" + cnt);
pstmt.setString(2, "grp" + '_' + group + '_' + cnt);
ResultSet rs = pstmt.executeQuery();
assertTrue(rs.next());
assertEquals("st" + cnt, rs.getString(1));
byte[] jobData = rs.getBytes(2);
assertEquals(JOBDATA_LEN, jobData.length);
assertFalse(rs.next());
}
}
public static void deleteData_43623_2(final Connection conn,
final String group) throws SQLException {
PreparedStatement pstmt = conn.prepareStatement("delete from "
+ "QUARTZ_TRIGGERS where TRIGGER_NAME = ? AND TRIGGER_GROUP = ?");
final int numRows = 10;
for (int cnt = 1; cnt <= numRows; cnt++) {
pstmt.setString(1, "trig" + cnt);
pstmt.setString(2, "grp" + '_' + group + '_' + cnt);
pstmt.execute();
}
}
public static void checkQueries_43623(final Connection conn,
final byte[] jobData) throws Exception {
PreparedStatement pstmt;
ResultSet rs;
final int numRows = 10;
pstmt = conn.prepareStatement("SELECT TRIGGER_STATE FROM QUARTZ_TRIGGERS "
+ "WHERE TRIGGER_NAME = ? AND TRIGGER_GROUP = ?");
for (int cnt = 1; cnt <= numRows; cnt++) {
pstmt.setString(1, "trig" + cnt);
pstmt.setString(2, "grp" + cnt);
rs = pstmt.executeQuery();
assertTrue(rs.next());
assertEquals("st" + cnt, rs.getString(1));
assertFalse(rs.next());
rs.close();
}
pstmt = conn.prepareStatement("SELECT TRIGGER_STATE, JOB_DATA FROM "
+ "QUARTZ_TRIGGERS WHERE TRIGGER_NAME = ? AND TRIGGER_GROUP = ?");
for (int cnt = 1; cnt <= numRows; cnt++) {
pstmt.setString(1, "trig" + cnt);
pstmt.setString(2, "grp" + cnt);
rs = pstmt.executeQuery();
assertTrue(rs.next());
assertEquals("st" + cnt, rs.getString(1));
if (!Arrays.equals(jobData, rs.getBytes(2))) {
fail("LOB column not expected value");
}
assertFalse(rs.next());
rs.close();
}
pstmt = conn.prepareStatement("SELECT TRIGGER_STATE FROM QUARTZ_TRIGGERS "
+ "WHERE JOB_NAME = ?");
for (int cnt = 1; cnt <= numRows; cnt++) {
pstmt.setString(1, "job" + cnt);
rs = pstmt.executeQuery();
assertTrue(rs.next());
assertEquals("st" + cnt, rs.getString(1));
assertFalse(rs.next());
rs.close();
}
pstmt = conn.prepareStatement("SELECT TRIGGER_STATE, JOB_DATA FROM "
+ "QUARTZ_TRIGGERS WHERE JOB_NAME = ?");
for (int cnt = 1; cnt <= numRows; cnt++) {
pstmt.setString(1, "job" + cnt);
rs = pstmt.executeQuery();
assertTrue(rs.next());
assertEquals("st" + cnt, rs.getString(1));
if (!Arrays.equals(jobData, rs.getBytes(2))) {
fail("LOB column not expected value");
}
assertFalse(rs.next());
rs.close();
}
}
public String getSuffix() {
return " ";
}
public void waitTillAllClear() {
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ui.popup;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.*;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.ui.PopupBorder;
import com.intellij.ui.ScreenUtil;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.popup.async.AsyncPopupImpl;
import com.intellij.ui.popup.async.AsyncPopupStep;
import com.intellij.ui.popup.list.ComboBoxPopup;
import com.intellij.ui.popup.list.ListPopupImpl;
import com.intellij.ui.popup.tree.TreePopupImpl;
import com.intellij.ui.popup.util.MnemonicsSearch;
import com.intellij.ui.speedSearch.ElementFilter;
import com.intellij.ui.speedSearch.SpeedSearch;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.TimerUtil;
import org.intellij.lang.annotations.JdkConstants;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.util.Collections;
public abstract class WizardPopup extends AbstractPopup implements ActionListener, ElementFilter {
private static final Logger LOG = Logger.getInstance(WizardPopup.class);
private static final Dimension MAX_SIZE = new Dimension(Integer.MAX_VALUE, 600);
protected static final int STEP_X_PADDING = 2;
private final WizardPopup myParent;
protected final PopupStep<Object> myStep;
protected WizardPopup myChild;
private final Timer myAutoSelectionTimer =
TimerUtil.createNamedTimer("Wizard auto-selection", Registry.intValue("ide.popup.auto.delay", 500), this);
private final MnemonicsSearch myMnemonicsSearch;
private Object myParentValue;
private Point myLastOwnerPoint;
private Window myOwnerWindow;
private MyComponentAdapter myOwnerListener;
private final ActionMap myActionMap = new ActionMap();
private final InputMap myInputMap = new InputMap();
private boolean myKeyPressedReceived;
/**
* @deprecated use {@link #WizardPopup(Project, JBPopup, PopupStep)}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.3")
public WizardPopup(@NotNull PopupStep<Object> aStep) {
this(CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext()), null, aStep);
}
public WizardPopup(@Nullable Project project, @Nullable JBPopup aParent, @NotNull PopupStep<Object> aStep) {
myParent = (WizardPopup) aParent;
myStep = aStep;
mySpeedSearch.setEnabled(myStep.isSpeedSearchEnabled());
final JComponent content = createContent();
JComponent popupComponent = createPopupComponent(content);
init(project, popupComponent, getPreferredFocusableComponent(), true, true, true, null,
isResizable(), aStep.getTitle(), null, true, Collections.emptySet(), false, null, null, null, false, null, true, false, true, null, 0f,
null, true, false, new Component[0], null, SwingConstants.LEFT, true, Collections.emptyList(),
null, null, false, true, true, null, true, null);
registerAction("disposeAll", KeyEvent.VK_ESCAPE, InputEvent.SHIFT_MASK, new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
if (mySpeedSearch.isHoldingFilter()) {
mySpeedSearch.reset();
}
else {
disposeAll();
}
}
});
AbstractAction goBackAction = new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
goBack();
}
};
registerAction("goBack3", KeyEvent.VK_ESCAPE, 0, goBackAction);
myMnemonicsSearch = new MnemonicsSearch(this) {
@Override
protected void select(Object value) {
onSelectByMnemonic(value);
}
};
}
@NotNull
protected JComponent createPopupComponent(JComponent content) {
JScrollPane scrollPane = createScrollPane(content);
scrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED);
scrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
scrollPane.getHorizontalScrollBar().setBorder(null);
scrollPane.getActionMap().get("unitScrollLeft").setEnabled(false);
scrollPane.getActionMap().get("unitScrollRight").setEnabled(false);
scrollPane.setBorder(JBUI.Borders.empty());
return scrollPane;
}
@NotNull
protected JScrollPane createScrollPane(JComponent content) {
return ScrollPaneFactory.createScrollPane(content);
}
private void disposeAll() {
WizardPopup root = PopupDispatcher.getActiveRoot();
disposeAllParents(null);
root.getStep().canceled();
}
public void goBack() {
if (mySpeedSearch.isHoldingFilter()) {
mySpeedSearch.reset();
return;
}
if (myParent != null) {
myParent.disposeChildren();
}
else {
disposeAll();
}
}
protected abstract JComponent createContent();
@Override
public void dispose() {
myAutoSelectionTimer.stop();
super.dispose();
PopupDispatcher.unsetShowing(this);
PopupDispatcher.clearRootIfNeeded(this);
if (myOwnerWindow != null && myOwnerListener != null) {
myOwnerWindow.removeComponentListener(myOwnerListener);
}
}
public void disposeChildren() {
if (myChild != null) {
myChild.disposeChildren();
Disposer.dispose(myChild);
myChild = null;
}
}
@Override
public void show(@NotNull final Component owner, final int aScreenX, final int aScreenY, final boolean considerForcedXY) {
LOG.assertTrue (!isDisposed());
Rectangle targetBounds = new Rectangle(new Point(aScreenX, aScreenY), getContent().getPreferredSize());
if (getParent() != null) {
final Rectangle parentBounds = getParent().getBounds();
parentBounds.x += STEP_X_PADDING;
parentBounds.width -= STEP_X_PADDING * 2;
ScreenUtil.moveToFit(targetBounds, ScreenUtil.getScreenRectangle(
parentBounds.x + parentBounds.width / 2,
parentBounds.y + parentBounds.height / 2), null);
if (parentBounds.intersects(targetBounds)) {
targetBounds.x = getParent().getBounds().x - targetBounds.width - STEP_X_PADDING;
}
} else {
ScreenUtil.moveToFit(targetBounds, ScreenUtil.getScreenRectangle(aScreenX + 1, aScreenY + 1), null);
}
if (getParent() == null) {
PopupDispatcher.setActiveRoot(this);
}
else {
PopupDispatcher.setShowing(this);
}
LOG.assertTrue (!isDisposed(), "Disposed popup, parent="+getParent());
super.show(owner, targetBounds.x, targetBounds.y, true);
}
@Override
protected void afterShow() {
super.afterShow();
registerAutoMove();
}
private void registerAutoMove() {
if (myOwner != null) {
myOwnerWindow = SwingUtilities.getWindowAncestor(myOwner);
if (myOwnerWindow != null) {
myLastOwnerPoint = myOwnerWindow.getLocationOnScreen();
myOwnerListener = new MyComponentAdapter();
myOwnerWindow.addComponentListener(myOwnerListener);
}
}
}
private void processParentWindowMoved() {
if (isDisposed()) return;
final Point newOwnerPoint = myOwnerWindow.getLocationOnScreen();
int deltaX = myLastOwnerPoint.x - newOwnerPoint.x;
int deltaY = myLastOwnerPoint.y - newOwnerPoint.y;
myLastOwnerPoint = newOwnerPoint;
final Window wnd = SwingUtilities.getWindowAncestor(getContent());
if (!wnd.isShowing()) return;
final Point current = wnd.getLocationOnScreen();
setLocation(new Point(current.x - deltaX, current.y - deltaY));
}
protected abstract JComponent getPreferredFocusableComponent();
@Override
public void cancel(InputEvent e) {
super.cancel(e);
disposeChildren();
Disposer.dispose(this);
getStep().canceled();
}
@Override
public boolean isCancelKeyEnabled() {
return super.isCancelKeyEnabled() && !mySpeedSearch.isHoldingFilter();
}
protected void disposeAllParents(InputEvent e) {
myDisposeEvent = e;
Disposer.dispose(this);
if (myParent != null) {
myParent.disposeAllParents(null);
}
}
public final void registerAction(@NonNls String aActionName, int aKeyCode, @JdkConstants.InputEventMask int aModifier, Action aAction) {
myInputMap.put(KeyStroke.getKeyStroke(aKeyCode, aModifier), aActionName);
myActionMap.put(aActionName, aAction);
}
protected String getActionForKeyStroke(final KeyStroke keyStroke) {
return (String) myInputMap.get(keyStroke);
}
public final void registerAction(@NonNls String aActionName, KeyStroke keyStroke, Action aAction) {
myInputMap.put(keyStroke, aActionName);
myActionMap.put(aActionName, aAction);
}
protected abstract InputMap getInputMap();
protected abstract ActionMap getActionMap();
protected final void setParentValue(Object parentValue) {
myParentValue = parentValue;
}
@Override
@NotNull
protected MyContentPanel createContentPanel(final boolean resizable, final @NotNull PopupBorder border, final boolean isToDrawMacCorner) {
return new MyContainer(border);
}
protected boolean isResizable() {
return false;
}
private static final class MyContainer extends MyContentPanel {
private MyContainer(@NotNull PopupBorder border) {
super(border);
setOpaque(true);
setFocusCycleRoot(true);
}
@Override
public Dimension getPreferredSize() {
if (isPreferredSizeSet()) {
return super.getPreferredSize();
}
final Component focusOwner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner();
Point p = null;
if (focusOwner != null && focusOwner.isShowing()) {
p = focusOwner.getLocationOnScreen();
}
return computeNotBiggerDimension(super.getPreferredSize().getSize(), p);
}
private static Dimension computeNotBiggerDimension(Dimension ofContent, final Point locationOnScreen) {
int resultHeight = ofContent.height > MAX_SIZE.height + 50 ? MAX_SIZE.height : ofContent.height;
if (locationOnScreen != null) {
final Rectangle r = ScreenUtil.getScreenRectangle(locationOnScreen);
resultHeight = Math.min(ofContent.height, r.height - (r.height / 4));
}
int resultWidth = Math.min(ofContent.width, MAX_SIZE.width);
if (ofContent.height > MAX_SIZE.height) {
resultWidth += ScrollPaneFactory.createScrollPane().getVerticalScrollBar().getPreferredSize().getWidth();
}
return new Dimension(resultWidth, resultHeight);
}
}
public WizardPopup getParent() {
return myParent;
}
public PopupStep getStep() {
return myStep;
}
public final boolean dispatch(KeyEvent event) {
if (event.getID() == KeyEvent.KEY_PRESSED) {
myKeyPressedReceived = true;
final KeyStroke stroke = KeyStroke.getKeyStroke(event.getKeyCode(), event.getModifiers(), false);
if (proceedKeyEvent(event, stroke)) return true;
}
else if (!myKeyPressedReceived && !(this instanceof ComboBoxPopup)) {
// key was pressed while this popup wasn't active, ignore the event
return false;
}
if (event.getID() == KeyEvent.KEY_RELEASED) {
final KeyStroke stroke = KeyStroke.getKeyStroke(event.getKeyCode(), event.getModifiers(), true);
return proceedKeyEvent(event, stroke);
}
myMnemonicsSearch.processKeyEvent(event);
mySpeedSearch.processKeyEvent(event);
if (event.isConsumed()) return true;
process(event);
return event.isConsumed();
}
private boolean proceedKeyEvent(KeyEvent event, KeyStroke stroke) {
if (myInputMap.get(stroke) != null) {
final Action action = myActionMap.get(myInputMap.get(stroke));
if (action != null && action.isEnabled()) {
action.actionPerformed(new ActionEvent(getContent(), event.getID(), "", event.getWhen(), event.getModifiers()));
event.consume();
return true;
}
}
return false;
}
protected void process(KeyEvent aEvent) {
}
public Rectangle getBounds() {
JComponent content = isDisposed() ? null : getContent();
return content == null ? null : new Rectangle(content.getLocationOnScreen(), content.getSize());
}
protected WizardPopup createPopup(WizardPopup parent, PopupStep step, Object parentValue) {
if (step instanceof AsyncPopupStep) {
return new AsyncPopupImpl(getProject(), parent, (AsyncPopupStep)step, parentValue);
}
if (step instanceof ListPopupStep) {
return new ListPopupImpl(getProject(), parent, (ListPopupStep)step, parentValue);
}
else if (step instanceof TreePopupStep) {
return new TreePopupImpl(getProject(), parent, (TreePopupStep)step, parentValue);
}
else {
throw new IllegalArgumentException(step.getClass().toString());
}
}
@Override
public final void actionPerformed(ActionEvent e) {
myAutoSelectionTimer.stop();
if (getStep().isAutoSelectionEnabled()) {
onAutoSelectionTimer();
}
}
protected final void restartTimer() {
if (!myAutoSelectionTimer.isRunning()) {
myAutoSelectionTimer.start();
}
else {
myAutoSelectionTimer.restart();
}
}
protected final void stopTimer() {
myAutoSelectionTimer.stop();
}
protected void onAutoSelectionTimer() {
}
@Override
public boolean shouldBeShowing(Object value) {
if (!myStep.isSpeedSearchEnabled()) return true;
SpeedSearchFilter<Object> filter = myStep.getSpeedSearchFilter();
if (filter == null) return true;
if (!filter.canBeHidden(value)) return true;
if (!mySpeedSearch.isHoldingFilter()) return true;
String text = filter.getIndexedString(value);
return mySpeedSearch.shouldBeShowing(text);
}
public SpeedSearch getSpeedSearch() {
return mySpeedSearch;
}
protected void onSelectByMnemonic(Object value) {
}
protected abstract void onChildSelectedFor(Object value);
protected final void notifyParentOnChildSelection() {
if (myParent == null || myParentValue == null) return;
myParent.onChildSelectedFor(myParentValue);
}
private class MyComponentAdapter extends ComponentAdapter {
@Override
public void componentMoved(final ComponentEvent e) {
processParentWindowMoved();
}
}
@Override
public final void setFinalRunnable(Runnable runnable) {
if (getParent() == null) {
super.setFinalRunnable(runnable);
} else {
getParent().setFinalRunnable(runnable);
}
}
@Override
public void setOk(boolean ok) {
if (getParent() == null) {
super.setOk(ok);
} else {
getParent().setOk(ok);
}
}
}
| |
package de.test.antennapod.storage;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.test.FlakyTest;
import android.test.InstrumentationTestCase;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.concurrent.TimeUnit;
import de.danoeh.antennapod.core.feed.Feed;
import de.danoeh.antennapod.core.feed.FeedItem;
import de.danoeh.antennapod.core.feed.FeedMedia;
import de.danoeh.antennapod.core.preferences.UserPreferences;
import de.danoeh.antennapod.core.storage.DBReader;
import de.danoeh.antennapod.core.storage.DBTasks;
import de.danoeh.antennapod.core.storage.PodDBAdapter;
import de.danoeh.antennapod.core.util.flattr.FlattrStatus;
import static de.test.antennapod.storage.DBTestUtils.saveFeedlist;
/**
* Test class for DBTasks
*/
public class DBTasksTest extends InstrumentationTestCase {
private static final String TAG = "DBTasksTest";
private static final int EPISODE_CACHE_SIZE = 5;
private Context context;
private File destFolder;
@Override
protected void tearDown() throws Exception {
super.tearDown();
assertTrue(PodDBAdapter.deleteDatabase(context));
for (File f : destFolder.listFiles()) {
assertTrue(f.delete());
}
assertTrue(destFolder.delete());
}
@Override
protected void setUp() throws Exception {
super.setUp();
context = getInstrumentation().getTargetContext();
destFolder = context.getExternalCacheDir();
assertNotNull(destFolder);
assertTrue(destFolder.exists());
assertTrue(destFolder.canWrite());
context.deleteDatabase(PodDBAdapter.DATABASE_NAME);
// make sure database is created
PodDBAdapter adapter = new PodDBAdapter(context);
adapter.open();
adapter.close();
SharedPreferences.Editor prefEdit = PreferenceManager.getDefaultSharedPreferences(context.getApplicationContext()).edit();
prefEdit.putString(UserPreferences.PREF_EPISODE_CACHE_SIZE, Integer.toString(EPISODE_CACHE_SIZE));
prefEdit.commit();
UserPreferences.init(context);
}
@FlakyTest(tolerance = 3)
public void testPerformAutoCleanupShouldDelete() throws IOException {
final int NUM_ITEMS = EPISODE_CACHE_SIZE * 2;
Feed feed = new Feed("url", new Date(), "title");
List<FeedItem> items = new ArrayList<FeedItem>();
feed.setItems(items);
List<File> files = new ArrayList<File>();
for (int i = 0; i < NUM_ITEMS; i++) {
FeedItem item = new FeedItem(0, "title", "id", "link", new Date(), FeedItem.PLAYED, feed);
File f = new File(destFolder, "file " + i);
assertTrue(f.createNewFile());
files.add(f);
item.setMedia(new FeedMedia(0, item, 1, 0, 1L, "m", f.getAbsolutePath(), "url", true, new Date(NUM_ITEMS - i), 0));
items.add(item);
}
PodDBAdapter adapter = new PodDBAdapter(context);
adapter.open();
adapter.setCompleteFeed(feed);
adapter.close();
assertTrue(feed.getId() != 0);
for (FeedItem item : items) {
assertTrue(item.getId() != 0);
assertTrue(item.getMedia().getId() != 0);
}
DBTasks.performAutoCleanup(context);
for (int i = 0; i < files.size(); i++) {
if (i < EPISODE_CACHE_SIZE) {
assertTrue(files.get(i).exists());
} else {
assertFalse(files.get(i).exists());
}
}
}
@FlakyTest(tolerance = 3)
public void testPerformAutoCleanupShouldNotDeleteBecauseUnread() throws IOException {
final int NUM_ITEMS = EPISODE_CACHE_SIZE * 2;
Feed feed = new Feed("url", new Date(), "title");
List<FeedItem> items = new ArrayList<FeedItem>();
feed.setItems(items);
List<File> files = new ArrayList<File>();
for (int i = 0; i < NUM_ITEMS; i++) {
FeedItem item = new FeedItem(0, "title", "id", "link", new Date(), FeedItem.UNPLAYED, feed);
File f = new File(destFolder, "file " + i);
assertTrue(f.createNewFile());
assertTrue(f.exists());
files.add(f);
item.setMedia(new FeedMedia(0, item, 1, 0, 1L, "m", f.getAbsolutePath(), "url", true, new Date(NUM_ITEMS - i), 0));
items.add(item);
}
PodDBAdapter adapter = new PodDBAdapter(context);
adapter.open();
adapter.setCompleteFeed(feed);
adapter.close();
assertTrue(feed.getId() != 0);
for (FeedItem item : items) {
assertTrue(item.getId() != 0);
assertTrue(item.getMedia().getId() != 0);
}
DBTasks.performAutoCleanup(context);
for (File file : files) {
assertTrue(file.exists());
}
}
@FlakyTest(tolerance = 3)
public void testPerformAutoCleanupShouldNotDeleteBecauseInQueue() throws IOException {
final int NUM_ITEMS = EPISODE_CACHE_SIZE * 2;
Feed feed = new Feed("url", new Date(), "title");
List<FeedItem> items = new ArrayList<FeedItem>();
feed.setItems(items);
List<File> files = new ArrayList<File>();
for (int i = 0; i < NUM_ITEMS; i++) {
FeedItem item = new FeedItem(0, "title", "id", "link", new Date(), FeedItem.PLAYED, feed);
File f = new File(destFolder, "file " + i);
assertTrue(f.createNewFile());
assertTrue(f.exists());
files.add(f);
item.setMedia(new FeedMedia(0, item, 1, 0, 1L, "m", f.getAbsolutePath(), "url", true, new Date(NUM_ITEMS - i), 0));
items.add(item);
}
PodDBAdapter adapter = new PodDBAdapter(context);
adapter.open();
adapter.setCompleteFeed(feed);
adapter.setQueue(items);
adapter.close();
assertTrue(feed.getId() != 0);
for (FeedItem item : items) {
assertTrue(item.getId() != 0);
assertTrue(item.getMedia().getId() != 0);
}
DBTasks.performAutoCleanup(context);
for (File file : files) {
assertTrue(file.exists());
}
}
/**
* Reproduces a bug where DBTasks.performAutoCleanup(android.content.Context) would use the ID of the FeedItem in the
* call to DBWriter.deleteFeedMediaOfItem instead of the ID of the FeedMedia. This would cause the wrong item to be deleted.
* @throws IOException
*/
@FlakyTest(tolerance = 3)
public void testPerformAutoCleanupShouldNotDeleteBecauseInQueue_withFeedsWithNoMedia() throws IOException {
// add feed with no enclosures so that item ID != media ID
saveFeedlist(context, 1, 10, false);
// add candidate for performAutoCleanup
List<Feed> feeds = saveFeedlist(context, 1, 1, true);
FeedMedia m = feeds.get(0).getItems().get(0).getMedia();
m.setDownloaded(true);
m.setFile_url("file");
PodDBAdapter adapter = new PodDBAdapter(context);
adapter.open();
adapter.setMedia(m);
adapter.close();
testPerformAutoCleanupShouldNotDeleteBecauseInQueue();
}
@FlakyTest(tolerance = 3)
public void testUpdateFeedNewFeed() {
final int NUM_ITEMS = 10;
Feed feed = new Feed("url", new Date(), "title");
feed.setItems(new ArrayList<FeedItem>());
for (int i = 0; i < NUM_ITEMS; i++) {
feed.getItems().add(new FeedItem(0, "item " + i, "id " + i, "link " + i, new Date(), FeedItem.UNPLAYED, feed));
}
Feed newFeed = DBTasks.updateFeed(context, feed)[0];
assertTrue(newFeed == feed);
assertTrue(feed.getId() != 0);
for (FeedItem item : feed.getItems()) {
assertFalse(item.isPlayed());
assertTrue(item.getId() != 0);
}
}
/** Two feeds with the same title, but different download URLs should be treated as different feeds. */
public void testUpdateFeedSameTitle() {
Feed feed1 = new Feed("url1", new Date(), "title");
Feed feed2 = new Feed("url2", new Date(), "title");
feed1.setItems(new ArrayList<FeedItem>());
feed2.setItems(new ArrayList<FeedItem>());
Feed savedFeed1 = DBTasks.updateFeed(context, feed1)[0];
Feed savedFeed2 = DBTasks.updateFeed(context, feed2)[0];
assertTrue(savedFeed1.getId() != savedFeed2.getId());
}
public void testUpdateFeedUpdatedFeed() {
final int NUM_ITEMS_OLD = 10;
final int NUM_ITEMS_NEW = 10;
final Feed feed = new Feed("url", new Date(), "title");
feed.setItems(new ArrayList<FeedItem>());
for (int i = 0; i < NUM_ITEMS_OLD; i++) {
feed.getItems().add(new FeedItem(0, "item " + i, "id " + i, "link " + i, new Date(i), FeedItem.PLAYED, feed));
}
PodDBAdapter adapter = new PodDBAdapter(context);
adapter.open();
adapter.setCompleteFeed(feed);
adapter.close();
// ensure that objects have been saved in db, then reset
assertTrue(feed.getId() != 0);
final long feedID = feed.getId();
feed.setId(0);
List<Long> itemIDs = new ArrayList<Long>();
for (FeedItem item : feed.getItems()) {
assertTrue(item.getId() != 0);
itemIDs.add(item.getId());
item.setId(0);
}
for (int i = NUM_ITEMS_OLD; i < NUM_ITEMS_NEW + NUM_ITEMS_OLD; i++) {
feed.getItems().add(0, new FeedItem(0, "item " + i, "id " + i, "link " + i, new Date(i), FeedItem.UNPLAYED, feed));
}
final Feed newFeed = DBTasks.updateFeed(context, feed)[0];
assertTrue(feed != newFeed);
updatedFeedTest(newFeed, feedID, itemIDs, NUM_ITEMS_OLD, NUM_ITEMS_NEW);
final Feed feedFromDB = DBReader.getFeed(context, newFeed.getId());
assertNotNull(feedFromDB);
assertTrue(feedFromDB.getId() == newFeed.getId());
updatedFeedTest(feedFromDB, feedID, itemIDs, NUM_ITEMS_OLD, NUM_ITEMS_NEW);
}
private void updatedFeedTest(final Feed newFeed, long feedID, List<Long> itemIDs, final int NUM_ITEMS_OLD, final int NUM_ITEMS_NEW) {
assertTrue(newFeed.getId() == feedID);
assertTrue(newFeed.getItems().size() == NUM_ITEMS_NEW + NUM_ITEMS_OLD);
Collections.reverse(newFeed.getItems());
Date lastDate = new Date(0);
for (int i = 0; i < NUM_ITEMS_OLD; i++) {
FeedItem item = newFeed.getItems().get(i);
assertTrue(item.getFeed() == newFeed);
assertTrue(item.getId() == itemIDs.get(i));
assertTrue(item.isPlayed());
assertTrue(item.getPubDate().getTime() >= lastDate.getTime());
lastDate = item.getPubDate();
}
for (int i = NUM_ITEMS_OLD; i < NUM_ITEMS_NEW + NUM_ITEMS_OLD; i++) {
FeedItem item = newFeed.getItems().get(i);
assertTrue(item.getFeed() == newFeed);
assertTrue(item.getId() != 0);
assertFalse(item.isPlayed());
assertTrue(item.getPubDate().getTime() >= lastDate.getTime());
lastDate = item.getPubDate();
}
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.device.mgt.iot.virtualfirealarm.agent.advanced.transport;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.DatagramSocket;
import java.net.HttpURLConnection;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.NetworkInterface;
import java.net.ServerSocket;
import java.net.SocketException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
public class TransportUtils {
private static final Log log = LogFactory.getLog(TransportUtils.class);
public static final int MIN_PORT_NUMBER = 9000;
public static final int MAX_PORT_NUMBER = 11000;
/**
* Given a server endpoint as a String, this method splits it into Protocol, Host and Port
*
* @param ipString a network endpoint in the format - '<PROTOCOL>://<HOST>:<PORT>'
* @return a map with keys "Protocol", "Host" & "Port" for the related values from the ipString
* @throws TransportHandlerException
*/
public static Map<String, String> getHostAndPort(String ipString)
throws TransportHandlerException {
Map<String, String> ipPortMap = new HashMap<String, String>();
String[] ipPortArray = ipString.split(":");
if (ipPortArray.length != 3) {
String errorMsg =
"The IP String - '" + ipString +
"' is invalid. It needs to be in format '<PROTOCOL>://<HOST>:<PORT>'.";
log.info(errorMsg);
throw new TransportHandlerException(errorMsg);
}
ipPortMap.put("Protocol", ipPortArray[0]);
ipPortMap.put("Host", ipPortArray[1].replace("/", ""));
ipPortMap.put("Port", ipPortArray[2]);
return ipPortMap;
}
/**
* This method validates whether a specific IP Address is of IPv4 type
*
* @param ipAddress the IP Address which needs to be validated
* @return true if it is of IPv4 type and false otherwise
*/
public static boolean validateIPv4(String ipAddress) {
try {
if (ipAddress == null || ipAddress.isEmpty()) {
return false;
}
String[] parts = ipAddress.split("\\.");
if (parts.length != 4) {
return false;
}
for (String s : parts) {
int i = Integer.parseInt(s);
if ((i < 0) || (i > 255)) {
return false;
}
}
return !ipAddress.endsWith(".");
} catch (NumberFormatException nfe) {
log.warn("The IP Address: " + ipAddress + " could not " +
"be validated against IPv4-style");
return false;
}
}
public static Map<String, String> getInterfaceIPMap() throws TransportHandlerException {
Map<String, String> interfaceToIPMap = new HashMap<String, String>();
Enumeration<NetworkInterface> networkInterfaces;
String networkInterfaceName = "";
String ipAddress;
try {
networkInterfaces = NetworkInterface.getNetworkInterfaces();
} catch (SocketException exception) {
String errorMsg =
"Error encountered whilst trying to get the list of network-interfaces";
log.error(errorMsg);
throw new TransportHandlerException(errorMsg, exception);
}
try {
for (; networkInterfaces.hasMoreElements(); ) {
networkInterfaceName = networkInterfaces.nextElement().getName();
if (log.isDebugEnabled()) {
log.debug("Network Interface: " + networkInterfaceName);
log.debug("------------------------------------------");
}
Enumeration<InetAddress> interfaceIPAddresses = NetworkInterface.getByName(
networkInterfaceName).getInetAddresses();
for (; interfaceIPAddresses.hasMoreElements(); ) {
ipAddress = interfaceIPAddresses.nextElement().getHostAddress();
if (log.isDebugEnabled()) {
log.debug("IP Address: " + ipAddress);
}
if (TransportUtils.validateIPv4(ipAddress)) {
interfaceToIPMap.put(networkInterfaceName, ipAddress);
}
}
if (log.isDebugEnabled()) {
log.debug("------------------------------------------");
}
}
} catch (SocketException exception) {
String errorMsg =
"Error encountered whilst trying to get the IP Addresses of the network " +
"interface: " + networkInterfaceName;
log.error(errorMsg);
throw new TransportHandlerException(errorMsg, exception);
}
return interfaceToIPMap;
}
/**
* Attempts to find a free port between the MIN_PORT_NUMBER(9000) and MAX_PORT_NUMBER(11000).
* Tries 'RANDOMLY picked' port numbers between this range up-until "randomAttempts" number of
* times. If still fails, then tries each port in descending order from the MAX_PORT_NUMBER
* whilst skipping already attempted ones via random selection.
*
* @param randomAttempts no of times to TEST port numbers picked randomly over the given range
* @return an available/free port
*/
public static synchronized int getAvailablePort(int randomAttempts) {
ArrayList<Integer> failedPorts = new ArrayList<Integer>(randomAttempts);
Random randomNum = new Random();
int randomPort = MAX_PORT_NUMBER;
while (randomAttempts > 0) {
randomPort = randomNum.nextInt(MAX_PORT_NUMBER - MIN_PORT_NUMBER) + MIN_PORT_NUMBER;
if (checkIfPortAvailable(randomPort)) {
return randomPort;
}
failedPorts.add(randomPort);
randomAttempts--;
}
randomPort = MAX_PORT_NUMBER;
while (true) {
if (!failedPorts.contains(randomPort) && checkIfPortAvailable(randomPort)) {
return randomPort;
}
randomPort--;
}
}
private static boolean checkIfPortAvailable(int port) {
ServerSocket tcpSocket = null;
DatagramSocket udpSocket = null;
try {
tcpSocket = new ServerSocket(port);
tcpSocket.setReuseAddress(true);
udpSocket = new DatagramSocket(port);
udpSocket.setReuseAddress(true);
return true;
} catch (IOException ex) {
// denotes the port is in use
} finally {
if (tcpSocket != null) {
try {
tcpSocket.close();
} catch (IOException e) {
/* not to be thrown */
}
}
if (udpSocket != null) {
udpSocket.close();
}
}
return false;
}
/**
* This is a utility method that creates and returns a HTTP connection object.
*
* @param urlString the URL pattern to which the connection needs to be created
* @return an HTTPConnection object which cn be used to send HTTP requests
* @throws TransportHandlerException if errors occur when creating the HTTP connection with
* the given URL string
*/
public static HttpURLConnection getHttpConnection(String urlString) throws
TransportHandlerException {
URL connectionUrl;
HttpURLConnection httpConnection;
try {
connectionUrl = new URL(urlString);
httpConnection = (HttpURLConnection) connectionUrl.openConnection();
} catch (MalformedURLException e) {
String errorMsg = "Error occured whilst trying to form HTTP-URL from string: " + urlString;
log.error(errorMsg);
throw new TransportHandlerException(errorMsg, e);
} catch (IOException exception) {
String errorMsg = "Error occured whilst trying to open a connection to: " + urlString;
log.error(errorMsg);
throw new TransportHandlerException(errorMsg, exception);
}
return httpConnection;
}
/**
* This is a utility method that reads and returns the response from a HTTP connection
*
* @param httpConnection the connection from which a response is expected
* @return the response (as a string) from the given HTTP connection
* @throws TransportHandlerException if any errors occur whilst reading the response from
* the connection stream
*/
public static String readResponseFromHttpRequest(HttpURLConnection httpConnection)
throws TransportHandlerException {
BufferedReader bufferedReader;
try {
bufferedReader = new BufferedReader(new InputStreamReader(
httpConnection.getInputStream(), StandardCharsets.UTF_8));
} catch (IOException exception) {
String errorMsg = "There is an issue with connecting the reader to the input stream at: " +
httpConnection.getURL();
log.error(errorMsg);
throw new TransportHandlerException(errorMsg, exception);
}
String responseLine;
StringBuilder completeResponse = new StringBuilder();
try {
while ((responseLine = bufferedReader.readLine()) != null) {
completeResponse.append(responseLine);
}
} catch (IOException exception) {
String errorMsg = "Error occured whilst trying read from the connection stream at: " +
httpConnection.getURL();
log.error(errorMsg);
throw new TransportHandlerException(errorMsg, exception);
}
try {
bufferedReader.close();
} catch (IOException exception) {
log.error(
"Could not succesfully close the bufferedReader to the connection at: " + httpConnection.getURL());
}
return completeResponse.toString();
}
}
| |
/**
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.explorer.notebook.form;
import com.stratio.explorer.notebook.utils.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.*;
public class InputTest {
private InputExpectedValues expectedValues;
private EntrySplitParameters splitParameters;
@Before
public void setUp(){
expectedValues = new InputExpectedValues();
splitParameters = new EntrySplitParameters();
}
@After
public void tearDown(){
expectedValues =null;
}
@Test
public void whenCallExtractSimpleQueryParamWithEmptyString(){
Map<String, Input> params =Input.extractSimpleQueryParam(ScriptTypes.EMPTY);
assertTrue(params.isEmpty());
}
@Test
public void whenCallExtractSimpleQueryParamWithNotValidString(){
Map<String, Input> params =Input.extractSimpleQueryParam(ScriptTypes.NOT_VALID);
assertTrue(params.isEmpty());
}
@Test
public void whenCallExtractSimpleQueryParamWithNullValue(){
Map<String, Input> params =Input.extractSimpleQueryParam(ScriptTypes.NULL_VALUE);
assertTrue(params.isEmpty());
}
@Test
public void whenCallExtractSimpleQueryParamWithHiddenCharacter(){
Map<String, Input> params =Input.extractSimpleQueryParam(ScriptTypes.HIDDEN_CHAR);
assertTrue(params.isEmpty());
}
@Test
public void whenCallExtractSimpleQueryParamWithHiddenCharacterAndDelimiter(){
Map<String, Input> params =Input.extractSimpleQueryParam(ScriptTypes.HIDDEN_CHAR_AND_DELIMITER);
assertTrue(params.isEmpty());
}
@Test
public void whenCallExractSimpleQueryParamWithOnlyJsonObject(){
Map<String, Input> params =Input.extractSimpleQueryParam(ScriptTypes.EMPTY_OBJECT);
assertTrue(params.isEmpty());
}
@Test
public void whenCallExtractsimpleQueryParamWithDelimiterAndEmptyObject(){
Map<String, Input> params =Input.extractSimpleQueryParam(ScriptTypes.DELIMITER_AND_EMPTY_OBJETC);
assertThat(params.size(), is(1));
throutghAssertsWithInput(params.get(""));
}
@Test
public void whenCallExractSimpleQueryParamWithHiddenCharDelimiterAndEmptyObject(){
expectedValues.hidden = true;
Map<String, Input> params =Input.extractSimpleQueryParam(ScriptTypes.HIDDEN_CHAR_DELIMITER_AND_EMPTY_OBJETC);
assertThat(params.size(), is(1));
throutghAssertsWithInput(params.get(""));
}
@Test
public void whenCallExtractSimpleQueryParamsWithDelimiterAndFilledWithJsonObject(){
expectedValues.type = KeyValuesStore.FIRST_KEY_VALUE.key();
expectedValues.name = KeyValuesStore.FIRST_KEY_VALUE.value();
Map<String, Input> params =Input.extractSimpleQueryParam(ScriptTypes.DELIMITER_WITH_ONE_KEY_VALUE_SEPARATE_WITH_DOTS_SEPARATOR);
assertThat(params.size(), is(1));
throutghAssertsWithInput(params.get(expectedValues.name));
}
@Test
public void whenCallExtractSimpleQueryParamsWithDelimiterAndTwoSimpleJsonObject(){
expectedValues.type = KeyValuesStore.FIRST_KEY_VALUE.key();
expectedValues.name = KeyValuesStore.FIRST_KEY_VALUE.value();
Map<String, Input> params =Input.extractSimpleQueryParam(ScriptTypes.DELIMITER_WITH_TWO_KEY_VALUE_OBJECT_WITH_DOTS_SEPARATOR_SEPARATE_BY_COMMAS);
assertThat(params.size(), is(1));
throutghAssertsWithInput(params.get(expectedValues.name));
}
@Test
public void whenCallExtractSimpleQueryParamsWithComplexObjectSeparateByEqualsSymbol(){
expectedValues.defaultValue = KeyValuesStore.FIRST_KEY_VALUE.value();
expectedValues.name = KeyValuesStore.FIRST_KEY_VALUE.key() ;
expectedValues.options = new Input.ParamOption[] {new Input.ParamOption(KeyValuesStore.SECOND_KEY_VALUE.toStringSeparateBysimbol("="),null)};
Map<String, Input> params =Input.extractSimpleQueryParam(ScriptTypes.DELIMITER_WITH_OBJECT_WITH_TWO_OBJECTS_WITH_EQUALS_SEPARATOR);
throutghAssertsWithInput(params.get(expectedValues.name));
}
@Test
public void whenCallExtractSimpleQueryParamsWithParenthesisInKey(){
expectedValues.defaultValue = KeyValuesStore.KEY_WITH_PARENTESIS.value();
expectedValues.name = KeyValuesStore.VALUES_WITH_PARENTHESIS[0] ;
expectedValues.displayName = KeyValuesStore.VALUES_WITH_PARENTHESIS[1].substring(1, KeyValuesStore.VALUES_WITH_PARENTHESIS[1].length()-1);
Map<String, Input> params =Input.extractSimpleQueryParam(ScriptTypes.DELIMITER_WITH_KEY_VALUE_WITH_PARENTHESIS_IN_KEY);
throutghAssertsWithInput(params.get(expectedValues.name));
}
private void throutghAssertsWithInput (Input params){
assertThat("Input.params should be equals to"+expectedValues.name,params.name,is(expectedValues.name));
assertThat("Input displayName should be equals "+expectedValues.displayName,params.displayName, is(expectedValues.displayName));
assertThat("Input type should be equals "+expectedValues.type,params.type,is(expectedValues.type));
assertThat("Input default name should be equals "+expectedValues.defaultValue,params.defaultValue, is(expectedValues.defaultValue));
assertThat("Input hidden value should be equals "+expectedValues.hidden,params.hidden, is(expectedValues.hidden));
if (params.options == null)
assertThat("Input optons should be equals to "+expectedValues.options,params.options, is(expectedValues.options));
if (params.options != null){
assertEquals("Input options should be equals to "+expectedValues.options.length,params.options.length,expectedValues.options.length);
for (int index =0;index<params.options.length;index++){
assertEquals("Input param options display name should be equals to "+expectedValues.options[index].getDisplayName(),params.options[index].getDisplayName(), expectedValues.options[index].getDisplayName());
assertEquals("Input param options ",params.options[index].getValue(), expectedValues.options[index].getValue());
}
}
}
/***************************************************************************************************************************************************************************************/
//TODO : WHEN UPPER COVERED THEN SEPARATE IN THREE CLASS TEST
//TODO : REMOVE ALL HARDCODED NAMES
@Test (expected = NullPointerException.class)
public void whenCallgetSimpleQueryWithNullParams(){
String script ="anyScrips";
String query =Input.getSimpleQuery(null, script);
}
@Test (expected = NullPointerException.class)
public void whenCallGetSimpleQueryWithNullScrips(){
Map<String,Object> params = ParamsBuilder.buildParamsByExpectedInput(expectedValues);
Input.getSimpleQuery(params, ScriptTypes.NULL_VALUE);
}
@Test
public void whenCallGetSimpleQuerywithEmptyScripts(){
Map<String,Object> params = ParamsBuilder.buildParamsByExpectedInput(expectedValues);
String query = Input.getSimpleQuery(params, ScriptTypes.EMPTY);
througthAsserts(query, "");
}
@Test
public void whenCallGetSimpleQueryWithParamsAndscriptFilled(){
expectedValues.name = "value";
Map<String,Object> params = new HashMap<>();
params.put(expectedValues.name,"{value = 1}");
Object a = params.get(expectedValues.name);
String query = Input.getSimpleQuery(params, ScriptTypes.EMPTY_OBJECT);
througthAsserts(query,"{}");
}
@Test
public void whenCallGetSimpleQueryWithParamsAndscriptFilledWithObjectWithEqualsSimbolSeparator(){
expectedValues.name = "value";
Map<String,Object> params = new HashMap<>();
params.put(expectedValues.name, "{"+KeyValuesStore.FIRST_KEY_VALUE.key() +"}");
String query = Input.getSimpleQuery(params, ScriptTypes.DELIMITER_WITH_KEY_VALUE_OBJECT_WITH_EQUALS);
througthAsserts(query, KeyValuesStore.FIRST_KEY_VALUE.value());
}
@Test
public void whenCallGetSimpleQueryWithParamsAndScriptFilledWithManyObjectSeparatebyComma(){
expectedValues.name = "value";
Map<String,Object> params = new HashMap<>();
params.put(expectedValues.name, "{" + KeyValuesStore.FIRST_KEY_VALUE.key() + "}");
String query = Input.getSimpleQuery(params,ScriptTypes.DELIMITER_WITH_KEY_VALUE_OBJETC_WITH_LIT_VALUES);
througthAsserts(query, KeyValuesStore.LITS_VALUES[0]);
}
@Test
public void whenCallGetSimpleQueryWithNestedObject(){
expectedValues.name = "value";
Map<String,Object> params = new HashMap<>();
params.put(expectedValues.name, "{" + KeyValuesStore.VALUE_WITH_NESTED_OBJECT.key() + "},"+"{" + KeyValuesStore.VALUE_WITH_NESTED_OBJECT.key() + "}");
String query = Input.getSimpleQuery(params,ScriptTypes.DELIMITER_WITH_KEY_VALUE_OBJECT_WITH_NESTED_OBJECT);
througthAsserts(query, "a:b");
}
private void througthAsserts(String query,String value){
assertThat("Query result should be equals to " + value, query, is(value));
}
/***************************************************************************************************************************/
//TODO : when test ending separate in three diferen class
@Test
public void whenCallSplitMethodWithEmptyString(){
String [] result = Input.split("");
througthSplitAsserts(result, new String[]{""});
}
@Test
public void whenCallSplitMethodWithAnyStringNotMactherWithRegularexpresion(){
String [] result = Input.split("asassasasas");
througthSplitAsserts(result, new String[]{"asassasasas"});
}
@Test
public void whenCallSplitMethodWithAllParametersNotInitialize(){
String [] result = Input.split(splitParameters.str, splitParameters.escapeSeq,
splitParameters.escapeChar, splitParameters.blockStart,
splitParameters.blockEnd, splitParameters.splitters,
splitParameters.includeSplitter);
througthSplitAsserts(result,new String[]{});
}
@Test
public void whenCallSplitMethodWithStrInitialized(){
splitParameters.str = "aaaaaa-11111";
String [] result = Input.split(splitParameters.str, splitParameters.escapeSeq,
splitParameters.escapeChar, splitParameters.blockStart,
splitParameters.blockEnd, splitParameters.splitters,
splitParameters.includeSplitter);
througthSplitAsserts(result,new String[]{splitParameters.str});
}
@Test
public void whenCallSplitMethodWithStringThatMatcherWithRegularExpresionAndScapeChar(){
splitParameters.str = "aaaaaa-11111";
splitParameters.escapeChar ='-';
String [] result = Input.split(splitParameters.str, splitParameters.escapeSeq,
splitParameters.escapeChar, splitParameters.blockStart,
splitParameters.blockEnd, splitParameters.splitters,
splitParameters.includeSplitter);
througthSplitAsserts(result,new String[]{splitParameters.str});
}
@Test
public void whenCallSplitMethodWithStrScapeCharAndSplitters(){
splitParameters.str = "aaaaaa-11111";
splitParameters.escapeChar ='-';
splitParameters.splitters = new String[]{"aa","s2","s3"};
boolean includeSplitter = false;
String [] result = Input.split(splitParameters.str, splitParameters.escapeSeq,
splitParameters.escapeChar, splitParameters.blockStart,
splitParameters.blockEnd, splitParameters.splitters,
splitParameters.includeSplitter);
througthSplitAsserts(result, new String[]{"", "", "", "-11111"});
}
private void througthSplitAsserts(String [] result ,String [] expected){
assertThat("Result should be equals "+expected,result,is(expected));
}
}
| |
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.localCanBeFinal;
import com.intellij.codeInsight.daemon.GroupNames;
import com.intellij.codeInspection.*;
import com.intellij.codeInspection.ui.MultipleCheckboxOptionsPanel;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.WriteExternalException;
import com.intellij.psi.*;
import com.intellij.psi.controlFlow.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.*;
/**
* @author max
*/
public class LocalCanBeFinal extends AbstractBaseJavaLocalInspectionTool {
public boolean REPORT_VARIABLES = true;
public boolean REPORT_PARAMETERS = true;
public boolean REPORT_CATCH_PARAMETERS = true;
public boolean REPORT_FOREACH_PARAMETERS = true;
public boolean REPORT_IMPLICIT_FINALS = true;
private final LocalQuickFix myQuickFix;
@NonNls public static final String SHORT_NAME = "LocalCanBeFinal";
public LocalCanBeFinal() {
myQuickFix = new AcceptSuggested();
}
@Override
public void writeSettings(@NotNull Element node) throws WriteExternalException {
node.addContent(new Element("option").setAttribute("name", "REPORT_VARIABLES").setAttribute("value", String.valueOf(REPORT_VARIABLES)));
node.addContent(new Element("option").setAttribute("name", "REPORT_PARAMETERS").setAttribute("value", String.valueOf(REPORT_PARAMETERS)));
if (!REPORT_CATCH_PARAMETERS) {
node.addContent(new Element("option").setAttribute("name", "REPORT_CATCH_PARAMETERS").setAttribute("value", "false"));
}
if (!REPORT_FOREACH_PARAMETERS) {
node.addContent(new Element("option").setAttribute("name", "REPORT_FOREACH_PARAMETERS").setAttribute("value", "false"));
}
if (!REPORT_IMPLICIT_FINALS) {
node.addContent(new Element("option").setAttribute("name", "REPORT_IMPLICIT_FINALS").setAttribute("value", "false"));
}
}
@Override
public ProblemDescriptor[] checkMethod(@NotNull PsiMethod method, @NotNull InspectionManager manager, boolean isOnTheFly) {
List<ProblemDescriptor> list = checkCodeBlock(method.getBody(), manager, isOnTheFly);
return list == null ? null : list.toArray(ProblemDescriptor.EMPTY_ARRAY);
}
@Override
public ProblemDescriptor[] checkClass(@NotNull PsiClass aClass, @NotNull InspectionManager manager, boolean isOnTheFly) {
List<ProblemDescriptor> allProblems = null;
final PsiClassInitializer[] initializers = aClass.getInitializers();
for (PsiClassInitializer initializer : initializers) {
final List<ProblemDescriptor> problems = checkCodeBlock(initializer.getBody(), manager, isOnTheFly);
if (problems != null) {
if (allProblems == null) {
allProblems = new ArrayList<>(1);
}
allProblems.addAll(problems);
}
}
return allProblems == null ? null : allProblems.toArray(ProblemDescriptor.EMPTY_ARRAY);
}
@Nullable
private List<ProblemDescriptor> checkCodeBlock(final PsiCodeBlock body, final InspectionManager manager, final boolean onTheFly) {
if (body == null) return null;
final ControlFlow flow;
try {
ControlFlowPolicy policy = new ControlFlowPolicy() {
@Override
public PsiVariable getUsedVariable(@NotNull PsiReferenceExpression refExpr) {
if (refExpr.isQualified()) return null;
PsiElement refElement = refExpr.resolve();
if (refElement instanceof PsiLocalVariable || refElement instanceof PsiParameter) {
if (!isVariableDeclaredInMethod((PsiVariable)refElement)) return null;
return (PsiVariable)refElement;
}
return null;
}
@Override
public boolean isParameterAccepted(@NotNull PsiParameter psiParameter) {
return isVariableDeclaredInMethod(psiParameter);
}
@Override
public boolean isLocalVariableAccepted(@NotNull PsiLocalVariable psiVariable) {
return isVariableDeclaredInMethod(psiVariable);
}
private boolean isVariableDeclaredInMethod(PsiVariable psiVariable) {
return PsiTreeUtil.getParentOfType(psiVariable, PsiClass.class) == PsiTreeUtil.getParentOfType(body, PsiClass.class);
}
};
flow = ControlFlowFactory.getInstance(body.getProject()).getControlFlow(body, policy, false);
}
catch (AnalysisCanceledException e) {
return null;
}
int start = flow.getStartOffset(body);
int end = flow.getEndOffset(body);
final Collection<PsiVariable> writtenVariables = ControlFlowUtil.getWrittenVariables(flow, start, end, false);
final List<ProblemDescriptor> problems = new ArrayList<>();
final HashSet<PsiVariable> result = new HashSet<>();
body.accept(new JavaRecursiveElementWalkingVisitor() {
@Override public void visitCodeBlock(PsiCodeBlock block) {
if (block.getParent() instanceof PsiLambdaExpression && block != body) {
final List<ProblemDescriptor> descriptors = checkCodeBlock(block, manager, onTheFly);
if (descriptors != null) {
problems.addAll(descriptors);
}
return;
}
super.visitCodeBlock(block);
PsiElement anchor = block;
if (block.getParent() instanceof PsiSwitchStatement) {
anchor = block.getParent();
}
int from = flow.getStartOffset(anchor);
int end = flow.getEndOffset(anchor);
List<PsiVariable> ssa = ControlFlowUtil.getSSAVariables(flow, from, end, true);
HashSet<PsiElement> declared = getDeclaredVariables(block);
for (PsiVariable psiVariable : ssa) {
if (declared.contains(psiVariable)) {
result.add(psiVariable);
}
}
}
@Override
public void visitResourceVariable(PsiResourceVariable variable) {
result.add(variable);
}
@Override
public void visitCatchSection(PsiCatchSection section) {
super.visitCatchSection(section);
if (!REPORT_CATCH_PARAMETERS) return;
final PsiParameter parameter = section.getParameter();
if (PsiTreeUtil.getParentOfType(parameter, PsiClass.class) != PsiTreeUtil.getParentOfType(body, PsiClass.class)) {
return;
}
final PsiCodeBlock catchBlock = section.getCatchBlock();
if (catchBlock == null) return;
final int from = flow.getStartOffset(catchBlock);
final int end = flow.getEndOffset(catchBlock);
if (!ControlFlowUtil.getWrittenVariables(flow, from, end, false).contains(parameter)) {
writtenVariables.remove(parameter);
result.add(parameter);
}
}
@Override public void visitForeachStatement(PsiForeachStatement statement) {
super.visitForeachStatement(statement);
if (!REPORT_FOREACH_PARAMETERS) return;
final PsiParameter param = statement.getIterationParameter();
if (PsiTreeUtil.getParentOfType(param, PsiClass.class) != PsiTreeUtil.getParentOfType(body, PsiClass.class)) {
return;
}
final PsiStatement body = statement.getBody();
if (body == null) return;
int from = flow.getStartOffset(body);
int end = flow.getEndOffset(body);
if (!ControlFlowUtil.getWrittenVariables(flow, from, end, false).contains(param)) {
writtenVariables.remove(param);
result.add(param);
}
}
private HashSet<PsiElement> getDeclaredVariables(PsiCodeBlock block) {
final HashSet<PsiElement> result = new HashSet<>();
PsiElement[] children = block.getChildren();
for (PsiElement child : children) {
child.accept(new JavaElementVisitor() {
@Override public void visitReferenceExpression(PsiReferenceExpression expression) {
visitReferenceElement(expression);
}
@Override public void visitDeclarationStatement(PsiDeclarationStatement statement) {
PsiElement[] declaredElements = statement.getDeclaredElements();
for (PsiElement declaredElement : declaredElements) {
if (declaredElement instanceof PsiVariable) result.add(declaredElement);
}
}
@Override
public void visitForStatement(PsiForStatement statement) {
super.visitForStatement(statement);
final PsiStatement initialization = statement.getInitialization();
if (!(initialization instanceof PsiDeclarationStatement)) {
return;
}
final PsiDeclarationStatement declarationStatement = (PsiDeclarationStatement)initialization;
final PsiElement[] declaredElements = declarationStatement.getDeclaredElements();
for (final PsiElement declaredElement : declaredElements) {
if (declaredElement instanceof PsiVariable) {
result.add(declaredElement);
}
}
}
});
}
return result;
}
@Override public void visitReferenceExpression(PsiReferenceExpression expression) {
}
});
if (body.getParent() instanceof PsiMethod && REPORT_PARAMETERS) {
final PsiMethod method = (PsiMethod)body.getParent();
if (!(method instanceof SyntheticElement)) { // e.g. JspHolderMethod
Collections.addAll(result, method.getParameterList().getParameters());
}
}
for (Iterator<PsiVariable> iterator = result.iterator(); iterator.hasNext(); ) {
final PsiVariable variable = iterator.next();
if (shouldBeIgnored(variable)) {
iterator.remove();
continue;
}
final PsiElement parent = variable.getParent();
if (!(parent instanceof PsiDeclarationStatement)) {
continue;
}
final PsiDeclarationStatement declarationStatement = (PsiDeclarationStatement)parent;
final PsiElement[] elements = declarationStatement.getDeclaredElements();
final PsiElement grandParent = parent.getParent();
if (elements.length > 1 && grandParent instanceof PsiForStatement) {
iterator.remove(); // do not report when more than 1 variable declared in for loop
}
}
for (PsiVariable writtenVariable : writtenVariables) {
if (writtenVariable instanceof PsiParameter) {
result.remove(writtenVariable);
}
}
if (result.isEmpty() && problems.isEmpty()) return null;
for (PsiVariable variable : result) {
final PsiIdentifier nameIdentifier = variable.getNameIdentifier();
PsiElement problemElement = nameIdentifier != null ? nameIdentifier : variable;
if (variable instanceof PsiParameter && !(((PsiParameter)variable).getDeclarationScope() instanceof PsiForeachStatement)) {
problems.add(manager.createProblemDescriptor(problemElement,
InspectionsBundle.message("inspection.can.be.local.parameter.problem.descriptor"),
myQuickFix, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, onTheFly));
}
else {
problems.add(manager.createProblemDescriptor(problemElement,
InspectionsBundle.message("inspection.can.be.local.variable.problem.descriptor"),
myQuickFix, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, onTheFly));
}
}
return problems;
}
private boolean shouldBeIgnored(PsiVariable psiVariable) {
PsiModifierList modifierList = psiVariable.getModifierList();
if (modifierList == null) return true;
if (modifierList.hasExplicitModifier(PsiModifier.FINAL)) return true;
if (!REPORT_IMPLICIT_FINALS && modifierList.hasModifierProperty(PsiModifier.FINAL)) return true;
if (psiVariable instanceof PsiLocalVariable) {
return !REPORT_VARIABLES;
}
if (psiVariable instanceof PsiParameter) {
final PsiParameter parameter = (PsiParameter)psiVariable;
final PsiElement declarationScope = parameter.getDeclarationScope();
if (declarationScope instanceof PsiCatchSection) {
return !REPORT_CATCH_PARAMETERS;
}
else if (declarationScope instanceof PsiForeachStatement) {
return !REPORT_FOREACH_PARAMETERS;
}
return !REPORT_PARAMETERS;
}
return true;
}
@Override
@NotNull
public String getDisplayName() {
return InspectionsBundle.message("inspection.local.can.be.final.display.name");
}
@Override
@NotNull
public String getGroupDisplayName() {
return GroupNames.STYLE_GROUP_NAME;
}
@Override
@NotNull
public String getShortName() {
return SHORT_NAME;
}
private static class AcceptSuggested implements LocalQuickFix {
@Override
@NotNull
public String getFamilyName() {
return InspectionsBundle.message("inspection.can.be.final.accept.quickfix");
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor problem) {
PsiElement nameIdentifier = problem.getPsiElement();
if (nameIdentifier == null) return;
PsiVariable psiVariable = PsiTreeUtil.getParentOfType(nameIdentifier, PsiVariable.class, false);
if (psiVariable == null) return;
psiVariable.normalizeDeclaration();
PsiUtil.setModifierProperty(psiVariable, PsiModifier.FINAL, true);
}
}
@Override
public JComponent createOptionsPanel() {
final MultipleCheckboxOptionsPanel panel = new MultipleCheckboxOptionsPanel(this);
panel.addCheckbox(InspectionsBundle.message("inspection.local.can.be.final.option"), "REPORT_VARIABLES");
panel.addCheckbox(InspectionsBundle.message("inspection.local.can.be.final.option1"), "REPORT_PARAMETERS");
panel.addCheckbox(InspectionsBundle.message("inspection.local.can.be.final.option2"), "REPORT_CATCH_PARAMETERS");
panel.addCheckbox(InspectionsBundle.message("inspection.local.can.be.final.option3"), "REPORT_FOREACH_PARAMETERS");
panel.addCheckbox(InspectionsBundle.message("inspection.local.can.be.final.option4"), "REPORT_IMPLICIT_FINALS");
return panel;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.jdbc.thin;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Callable;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.cache.query.SqlFieldsQuery;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.testframework.GridTestUtils;
/**
* Test that checks indexes handling with JDBC.
*/
public abstract class JdbcThinDynamicIndexAbstractSelfTest extends JdbcThinAbstractDmlStatementSelfTest {
/** */
private static final String CREATE_INDEX = "create index idx on Person (id desc)";
/** */
private static final String DROP_INDEX = "drop index idx";
/** */
private static final String CREATE_INDEX_IF_NOT_EXISTS = "create index if not exists idx on Person (id desc)";
/** */
private static final String DROP_INDEX_IF_EXISTS = "drop index idx if exists";
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
super.beforeTest();
try (PreparedStatement ps =
conn.prepareStatement("INSERT INTO Person (_key, id, age, firstName, lastName) values (?, ?, ?, ?, ?)")) {
ps.setString(1, "j");
ps.setInt(2, 1);
ps.setInt(3, 10);
ps.setString(4, "John");
ps.setString(5, "Smith");
ps.executeUpdate();
ps.setString(1, "m");
ps.setInt(2, 2);
ps.setInt(3, 20);
ps.setString(4, "Mark");
ps.setString(5, "Stone");
ps.executeUpdate();
ps.setString(1, "s");
ps.setInt(2, 3);
ps.setInt(3, 30);
ps.setString(4, "Sarah");
ps.setString(5, "Pazzi");
ps.executeUpdate();
}
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override CacheConfiguration cacheConfig() {
CacheConfiguration ccfg = super.cacheConfig();
ccfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
ccfg.setCacheMode(cacheMode());
ccfg.setAtomicityMode(atomicityMode());
if (nearCache())
ccfg.setNearConfiguration(new NearCacheConfiguration());
return ccfg;
}
/**
* @return Cache mode to use.
*/
protected abstract CacheMode cacheMode();
/**
* @return Cache atomicity mode to use.
*/
protected abstract CacheAtomicityMode atomicityMode();
/**
* @return Whether to use near cache.
*/
protected abstract boolean nearCache();
/**
* Execute given SQL statement.
* @param sql Statement.
* @throws SQLException if failed.
*/
private void jdbcRun(String sql) throws SQLException {
try (Statement stmt = conn.createStatement()) {
stmt.execute(sql);
}
}
/**
* @param rs Result set.
* @return The value of the first column at the first row from result set.
* @throws SQLException If failed.
*/
private Object getSingleValue(ResultSet rs) throws SQLException {
assertEquals(1, rs.getMetaData().getColumnCount());
assertTrue(rs.next());
Object res = rs.getObject(1);
assertTrue(rs.isLast());
return res;
}
/**
* Test that after index creation index is used by queries.
* @throws SQLException If failed.
*/
public void testCreateIndex() throws SQLException {
assertSize(3);
assertColumnValues(30, 20, 10);
jdbcRun(CREATE_INDEX);
// Test that local queries on all server nodes use new index.
for (int i = 0 ; i < 3; i++) {
List<List<?>> locRes = ignite(i).cache(DEFAULT_CACHE_NAME).query(new SqlFieldsQuery("explain select id from " +
"Person where id = 5").setLocal(true)).getAll();
assertEquals(F.asList(
Collections.singletonList("SELECT\n" +
" ID\n" +
"FROM \"" + DEFAULT_CACHE_NAME + "\".PERSON\n" +
" /* \"" + DEFAULT_CACHE_NAME + "\".IDX: ID = 5 */\n" +
"WHERE ID = 5")
), locRes);
}
assertSize(3);
assertColumnValues(30, 20, 10);
}
/**
* Test that creating an index with duplicate name yields an error.
* @throws SQLException If failed.
*/
public void testCreateIndexWithDuplicateName() throws SQLException {
jdbcRun(CREATE_INDEX);
GridTestUtils.assertThrowsAnyCause(log, new Callable<Void>() {
@Override public Void call() throws Exception {
jdbcRun(CREATE_INDEX);
return null;
}
}, SQLException.class, "Index already exists: IDX");
}
/**
* Test that creating an index with duplicate name does not yield an error with {@code IF NOT EXISTS}.
* @throws SQLException If failed.
*/
public void testCreateIndexIfNotExists() throws SQLException {
jdbcRun(CREATE_INDEX);
// Despite duplicate name, this does not yield an error.
jdbcRun(CREATE_INDEX_IF_NOT_EXISTS);
}
/**
* Test that after index drop there are no attempts to use it, and data state remains intact.
* @throws SQLException If failed.
*/
public void testDropIndex() throws SQLException {
assertSize(3);
jdbcRun(CREATE_INDEX);
assertSize(3);
jdbcRun(DROP_INDEX);
// Test that no local queries on server nodes use new index.
for (int i = 0 ; i < 3; i++) {
List<List<?>> locRes = ignite(i).cache(DEFAULT_CACHE_NAME).query(new SqlFieldsQuery("explain select id from " +
"Person where id = 5").setLocal(true)).getAll();
assertEquals(F.asList(
Collections.singletonList("SELECT\n" +
" ID\n" +
"FROM \"" + DEFAULT_CACHE_NAME + "\".PERSON\n" +
" /* \"" + DEFAULT_CACHE_NAME + "\".PERSON.__SCAN_ */\n" +
"WHERE ID = 5")
), locRes);
}
assertSize(3);
}
/**
* Test that dropping a non-existent index yields an error.
*/
public void testDropMissingIndex() {
GridTestUtils.assertThrowsAnyCause(log, new Callable<Void>() {
@Override public Void call() throws Exception {
jdbcRun(DROP_INDEX);
return null;
}
}, SQLException.class, "Index doesn't exist: IDX");
}
/**
* Test that dropping a non-existent index does not yield an error with {@code IF EXISTS}.
* @throws SQLException If failed.
*/
public void testDropMissingIndexIfExists() throws SQLException {
// Despite index missing, this does not yield an error.
jdbcRun(DROP_INDEX_IF_EXISTS);
}
/**
* Test that changes in cache affect index, and vice versa.
* @throws SQLException If failed.
*/
public void testIndexState() throws SQLException {
IgniteCache<String, Person> cache = cache();
assertSize(3);
assertColumnValues(30, 20, 10);
jdbcRun(CREATE_INDEX);
assertSize(3);
assertColumnValues(30, 20, 10);
cache.remove("m");
assertColumnValues(30, 10);
cache.put("a", new Person(4, "someVal", "a", 5));
assertColumnValues(5, 30, 10);
jdbcRun(DROP_INDEX);
assertColumnValues(5, 30, 10);
}
/**
* Check that values of {@code field1} match what we expect.
* @param vals Expected values.
* @throws SQLException If failed.
*/
private void assertColumnValues(int... vals) throws SQLException {
try (Statement stmt = conn.createStatement()) {
try (ResultSet rs = stmt.executeQuery("SELECT age FROM Person ORDER BY id desc")) {
assertEquals(1, rs.getMetaData().getColumnCount());
for (int i = 0; i < vals.length; i++) {
assertTrue("Result set must have " + vals.length + " rows, got " + i, rs.next());
assertEquals(vals[i], rs.getInt(1));
}
assertFalse("Result set must have exactly " + vals.length + " rows", rs.next());
}
}
}
/**
* Do a {@code SELECT COUNT(*)} query to check index state correctness.
* @param expSize Expected number of items in table.
* @throws SQLException If failed.
*/
private void assertSize(long expSize) throws SQLException {
assertEquals(expSize, cache().size());
try (Statement stmt = conn.createStatement()) {
conn.setSchema('"' + DEFAULT_CACHE_NAME + '"');
try (ResultSet rs = stmt.executeQuery("SELECT COUNT(*) from Person")) {
assertEquals(expSize, getSingleValue(rs));
}
}
}
/**
* @return Cache.
*/
private IgniteCache<String, Person> cache() {
return grid(0).cache(DEFAULT_CACHE_NAME);
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.util;
import java.util.Iterator;
import java.util.NoSuchElementException;
/**
* This class represents an object which queues Runnable objects for
* invocation in a single thread.
*
* @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a>
* @version $Id$
*/
public class RunnableQueue implements Runnable {
/**
* Type-safe enumeration of queue states.
*/
public static final class RunnableQueueState {
private final String value;
private RunnableQueueState(String value) {
this.value = value; }
public String getValue() { return value; }
public String toString() {
return "[RunnableQueueState: " + value + ']'; }
}
/**
* The queue is in the process of running tasks.
*/
public static final RunnableQueueState RUNNING
= new RunnableQueueState("Running");
/**
* The queue may still be running tasks but as soon as possible
* will go to SUSPENDED state.
*/
public static final RunnableQueueState SUSPENDING
= new RunnableQueueState("Suspending");
/**
* The queue is no longer running any tasks and will not
* run any tasks until resumeExecution is called.
*/
public static final RunnableQueueState SUSPENDED
= new RunnableQueueState("Suspended");
/**
* The Suspension state of this thread.
*/
protected volatile RunnableQueueState state;
/**
* Object to synchronize/wait/notify for suspension
* issues.
*/
protected final Object stateLock = new Object();
/**
* Used to indicate if the queue was resumed while
* still running, so a 'resumed' event can be sent.
*/
protected boolean wasResumed;
/**
* The Runnable objects list, also used as synchronization point
* for pushing/poping runables.
*/
private final DoublyLinkedList list = new DoublyLinkedList();
/**
* Count of preempt entries in queue, so preempt entries
* can be kept properly ordered.
*/
protected int preemptCount;
/**
* The object which handle run events.
*/
protected RunHandler runHandler;
/**
* The current thread.
*/
protected volatile HaltingThread runnableQueueThread;
/**
* The {@link IdleRunnable} to run if the queue is empty.
*/
private IdleRunnable idleRunnable;
/**
* The time (in milliseconds) that the idle runnable should be run next.
*/
private long idleRunnableWaitTime;
/**
* Creates a new RunnableQueue started in a new thread.
* @return a RunnableQueue which is guaranteed to have entered its
* <code>run()</code> method.
*/
public static RunnableQueue createRunnableQueue() {
RunnableQueue result = new RunnableQueue();
synchronized (result) {
// Sync on the new object, so we can wait until the new
// thread is ready to go.
HaltingThread ht = new HaltingThread
(result, "RunnableQueue-" + threadCount++);
ht.setDaemon(true);
ht.start();
while (result.getThread() == null) {
try {
result.wait();
} catch (InterruptedException ie) {
}
}
}
return result;
}
private static volatile int threadCount;
/**
* Runs this queue.
*/
public void run() {
synchronized (this) {
runnableQueueThread = (HaltingThread)Thread.currentThread();
// Wake the create method so it knows we are in
// our run and ready to go.
notify();
}
Link l;
Runnable rable;
try {
while (!HaltingThread.hasBeenHalted()) {
boolean callSuspended = false;
boolean callResumed = false;
// Mutex for suspension work.
synchronized (stateLock) {
if (state != RUNNING) {
state = SUSPENDED;
callSuspended = true;
}
}
if (callSuspended)
executionSuspended();
synchronized (stateLock) {
while (state != RUNNING) {
state = SUSPENDED;
// notify suspendExecution in case it is
// waiting til we shut down.
stateLock.notifyAll();
// Wait until resumeExecution called.
try {
stateLock.wait();
} catch(InterruptedException ie) { }
}
if (wasResumed) {
wasResumed = false;
callResumed = true;
}
}
if (callResumed)
executionResumed();
// The following seriously stress tests the class
// for stuff happening between the two sync blocks.
//
// try {
// Thread.sleep(1);
// } catch (InterruptedException ie) { }
synchronized (list) {
if (state == SUSPENDING)
continue;
l = (Link)list.pop();
if (preemptCount != 0) preemptCount--;
if (l == null) {
// No item to run, see if there is an idle runnable
// to run instead.
if (idleRunnable != null &&
(idleRunnableWaitTime = idleRunnable.getWaitTime())
< System.currentTimeMillis()) {
rable = idleRunnable;
} else {
// Wait for a runnable.
try {
if (idleRunnable != null && idleRunnableWaitTime
!= Long.MAX_VALUE) {
long t = idleRunnableWaitTime
- System.currentTimeMillis();
if (t > 0) {
list.wait(t);
}
} else {
list.wait();
}
} catch (InterruptedException ie) {
// just loop again.
}
continue; // start loop over again...
}
} else {
rable = l.runnable;
}
}
try {
runnableStart(rable);
rable.run();
} catch (ThreadDeath td) {
// Let it kill us...
throw td;
} catch (Throwable t) {
// Might be nice to notify someone directly.
// But this is more or less what Swing does.
t.printStackTrace();
}
// Notify something waiting on the runnable just completed,
// if we just ran one from the queue.
if (l != null) {
l.unlock();
}
try {
runnableInvoked(rable);
} catch (ThreadDeath td) {
// Let it kill us...
throw td;
} catch (Throwable t) {
// Might be nice to notify someone directly.
// But this is more or less what Swing does.
t.printStackTrace();
}
}
} finally {
do {
// Empty the list of pending runnables and unlock them (so
// invokeAndWait will return).
// It's up to the runnables to check if the runnable actually
// ran, if that is important.
synchronized (list) {
l = (Link)list.pop();
}
if (l == null) break;
else l.unlock();
} while (true);
synchronized (this) {
runnableQueueThread = null;
}
}
}
/**
* Returns the thread in which the RunnableQueue is currently running.
* @return null if the RunnableQueue has not entered his
* <code>run()</code> method.
*/
public HaltingThread getThread() {
return runnableQueueThread;
}
/**
* Schedules the given Runnable object for a later invocation, and
* returns.
* An exception is thrown if the RunnableQueue was not started.
* @throws IllegalStateException if getThread() is null.
*/
public void invokeLater(Runnable r) {
if (runnableQueueThread == null) {
throw new IllegalStateException
("RunnableQueue not started or has exited");
}
synchronized (list) {
list.push(new Link(r));
list.notify();
}
}
/**
* Waits until the given Runnable's <code>run()</code> has returned.
* <em>Note: <code>invokeAndWait()</code> must not be called from the
* current thread (for example from the <code>run()</code> method of the
* argument).</em>
* @throws IllegalStateException if getThread() is null or if the
* thread returned by getThread() is the current one.
*/
public void invokeAndWait(Runnable r) throws InterruptedException {
if (runnableQueueThread == null) {
throw new IllegalStateException
("RunnableQueue not started or has exited");
}
if (runnableQueueThread == Thread.currentThread()) {
throw new IllegalStateException
("Cannot be called from the RunnableQueue thread");
}
LockableLink l = new LockableLink(r);
synchronized (list) {
list.push(l);
list.notify();
}
l.lock(); // todo: the 'other side' of list may retrieve the l before it is locked...
}
/**
* Schedules the given Runnable object for a later invocation, and
* returns. The given runnable preempts any runnable that is not
* currently executing (ie the next runnable started will be the
* one given). An exception is thrown if the RunnableQueue was
* not started.
* @throws IllegalStateException if getThread() is null.
*/
public void preemptLater(Runnable r) {
if (runnableQueueThread == null) {
throw new IllegalStateException
("RunnableQueue not started or has exited");
}
synchronized (list) {
list.add(preemptCount, new Link(r));
preemptCount++;
list.notify();
}
}
/**
* Waits until the given Runnable's <code>run()</code> has returned.
* The given runnable preempts any runnable that is not currently
* executing (ie the next runnable started will be the one given).
* <em>Note: <code>preemptAndWait()</code> must not be called from the
* current thread (for example from the <code>run()</code> method of the
* argument).</em>
* @throws IllegalStateException if getThread() is null or if the
* thread returned by getThread() is the current one.
*/
public void preemptAndWait(Runnable r) throws InterruptedException {
if (runnableQueueThread == null) {
throw new IllegalStateException
("RunnableQueue not started or has exited");
}
if (runnableQueueThread == Thread.currentThread()) {
throw new IllegalStateException
("Cannot be called from the RunnableQueue thread");
}
LockableLink l = new LockableLink(r);
synchronized (list) {
list.add(preemptCount, l);
preemptCount++;
list.notify();
}
l.lock(); // todo: the 'other side' of list may retrieve the l before it is locked...
}
public RunnableQueueState getQueueState() {
synchronized (stateLock) {
return state;
}
}
/**
* Suspends the execution of this queue after the current runnable
* completes.
* @param waitTillSuspended if true this method will not return
* until the queue has suspended (no runnable in progress
* or about to be in progress). If resumeExecution is
* called while waiting will simply return (this really
* indicates a race condition in your code). This may
* return before an associated RunHandler is notified.
* @throws IllegalStateException if getThread() is null.
*/
public void suspendExecution(boolean waitTillSuspended) {
if (runnableQueueThread == null) {
throw new IllegalStateException
("RunnableQueue not started or has exited");
}
// System.err.println("Suspend Called");
synchronized (stateLock) {
wasResumed = false;
if (state == SUSPENDED) {
// already suspended, notify stateLock so an event is
// generated.
stateLock.notifyAll();
return;
}
if (state == RUNNING) {
state = SUSPENDING;
synchronized (list) {
// Wake up run thread if it is waiting for jobs,
// so we go into the suspended case (notifying
// run-handler etc...)
list.notify();
}
}
if (waitTillSuspended) {
while (state == SUSPENDING) {
try {
stateLock.wait();
} catch(InterruptedException ie) { }
}
}
}
}
/**
* Resumes the execution of this queue.
* @throws IllegalStateException if getThread() is null.
*/
public void resumeExecution() {
// System.err.println("Resume Called");
if (runnableQueueThread == null) {
throw new IllegalStateException
("RunnableQueue not started or has exited");
}
synchronized (stateLock) {
wasResumed = true;
if (state != RUNNING) {
state = RUNNING;
stateLock.notifyAll(); // wake it up.
}
}
}
/**
* Returns iterator lock to use to work with the iterator
* returned by iterator().
*/
public Object getIteratorLock() {
return list;
}
/**
* Returns an iterator over the runnables.
*/
public Iterator iterator() {
return new Iterator() {
Link head = (Link)list.getHead();
Link link;
public boolean hasNext() {
if (head == null) {
return false;
}
if (link == null) {
return true;
}
return link != head;
}
public Object next() {
if (head == null || head == link) {
throw new NoSuchElementException();
}
if (link == null) {
link = (Link)head.getNext();
return head.runnable;
}
Object result = link.runnable;
link = (Link)link.getNext();
return result;
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
/**
* Sets the RunHandler for this queue.
*/
public synchronized void setRunHandler(RunHandler rh) {
runHandler = rh;
}
/**
* Returns the RunHandler or null.
*/
public synchronized RunHandler getRunHandler() {
return runHandler;
}
/**
* Sets a Runnable to be run whenever the queue is empty.
*/
public void setIdleRunnable(IdleRunnable r) {
synchronized (list) {
idleRunnable = r;
idleRunnableWaitTime = 0;
list.notify();
}
}
/**
* Called when execution is being suspended.
* Currently just notifies runHandler
*/
protected synchronized void executionSuspended() {
// System.err.println("Suspend Sent");
if (runHandler != null) {
runHandler.executionSuspended(this);
}
}
/**
* Called when execution is being resumed.
* Currently just notifies runHandler
*/
protected synchronized void executionResumed() {
// System.err.println("Resumed Sent");
if (runHandler != null) {
runHandler.executionResumed(this);
}
}
/**
* Called just prior to executing a Runnable.
* Currently just notifies runHandler
* @param rable The runnable that is about to start
*/
protected synchronized void runnableStart(Runnable rable ) {
if (runHandler != null) {
runHandler.runnableStart(this, rable);
}
}
/**
* Called when a Runnable completes.
* Currently just notifies runHandler
* @param rable The runnable that just completed.
*/
protected synchronized void runnableInvoked(Runnable rable ) {
if (runHandler != null) {
runHandler.runnableInvoked(this, rable);
}
}
/**
* A {@link Runnable} that can also inform the caller how long it should
* be until it is run again.
*/
public interface IdleRunnable extends Runnable {
/**
* Returns the system time that can be safely waited until before this
* {@link Runnable} is run again.
*
* @return time to wait until, <code>0</code> if no waiting can
* be done, or {@link Long#MAX_VALUE} if the {@link Runnable}
* should not be run again at this time
*/
long getWaitTime();
}
/**
* This interface must be implemented by an object which wants to
* be notified of run events.
*/
public interface RunHandler {
/**
* Called just prior to invoking the runnable
*/
void runnableStart(RunnableQueue rq, Runnable r);
/**
* Called when the given Runnable has just been invoked and
* has returned.
*/
void runnableInvoked(RunnableQueue rq, Runnable r);
/**
* Called when the execution of the queue has been suspended.
*/
void executionSuspended(RunnableQueue rq);
/**
* Called when the execution of the queue has been resumed.
*/
void executionResumed(RunnableQueue rq);
}
/**
* This is an adapter class that implements the RunHandler interface.
* It simply does nothing in response to the calls.
*/
public static class RunHandlerAdapter implements RunHandler {
/**
* Called just prior to invoking the runnable
*/
public void runnableStart(RunnableQueue rq, Runnable r) { }
/**
* Called when the given Runnable has just been invoked and
* has returned.
*/
public void runnableInvoked(RunnableQueue rq, Runnable r) { }
/**
* Called when the execution of the queue has been suspended.
*/
public void executionSuspended(RunnableQueue rq) { }
/**
* Called when the execution of the queue has been resumed.
*/
public void executionResumed(RunnableQueue rq) { }
}
/**
* To store a Runnable.
*/
protected static class Link extends DoublyLinkedList.Node {
/**
* The Runnable.
*/
private final Runnable runnable;
/**
* Creates a new link.
*/
public Link(Runnable r) {
runnable = r;
}
/**
* unlock link and notify locker.
* Basic implementation does nothing.
*/
public void unlock() { return; }
}
/**
* To store a Runnable with an object waiting for him to be executed.
*/
protected static class LockableLink extends Link {
/**
* Whether this link is actually locked.
*/
private volatile boolean locked;
/**
* Creates a new link.
*/
public LockableLink(Runnable r) {
super(r);
}
/**
* Whether the link is actually locked.
*/
public boolean isLocked() {
return locked;
}
/**
* Locks this link.
*/
public synchronized void lock() throws InterruptedException {
locked = true;
notify();
wait();
}
/**
* unlocks this link.
*/
public synchronized void unlock() {
while (!locked) {
try {
wait(); // Wait until lock is called...
} catch (InterruptedException ie) {
// Loop again...
}
}
locked = false;
// Wake the locking thread...
notify();
}
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2013-2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Observable;
import java.util.Observer;
import java.util.logging.Level;
import javax.swing.JOptionPane;
import javax.swing.SwingUtilities;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentVisitor;
import org.sleuthkit.datamodel.Directory;
import org.sleuthkit.datamodel.File;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.LayoutFile;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
/**
* deleted content view nodes
*/
public class DeletedContent implements AutopsyVisitableItem {
private SleuthkitCase skCase;
public enum DeletedContentFilter implements AutopsyVisitableItem {
FS_DELETED_FILTER(0,
"FS_DELETED_FILTER", //NON-NLS
NbBundle.getMessage(DeletedContent.class, "DeletedContent.fsDelFilter.text")),
ALL_DELETED_FILTER(1,
"ALL_DELETED_FILTER", //NON-NLS
NbBundle.getMessage(DeletedContent.class, "DeletedContent.allDelFilter.text"));
private int id;
private String name;
private String displayName;
private DeletedContentFilter(int id, String name, String displayName) {
this.id = id;
this.name = name;
this.displayName = displayName;
}
public String getName() {
return this.name;
}
public int getId() {
return this.id;
}
public String getDisplayName() {
return this.displayName;
}
@Override
public <T> T accept(AutopsyItemVisitor<T> v) {
return v.visit(this);
}
}
public DeletedContent(SleuthkitCase skCase) {
this.skCase = skCase;
}
@Override
public <T> T accept(AutopsyItemVisitor<T> v) {
return v.visit(this);
}
public SleuthkitCase getSleuthkitCase() {
return this.skCase;
}
public static class DeletedContentsNode extends DisplayableItemNode {
private static final String NAME = NbBundle.getMessage(DeletedContent.class,
"DeletedContent.deletedContentsNode.name");
private SleuthkitCase skCase;
DeletedContentsNode(SleuthkitCase skCase) {
super(Children.create(new DeletedContentsChildren(skCase), true), Lookups.singleton(NAME));
super.setName(NAME);
super.setDisplayName(NAME);
this.skCase = skCase;
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); //NON-NLS
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> v) {
return v.visit(this);
}
@Override
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
}
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "DeletedContent.createSheet.name.name"),
NbBundle.getMessage(this.getClass(), "DeletedContent.createSheet.name.displayName"),
NbBundle.getMessage(this.getClass(), "DeletedContent.createSheet.name.desc"),
NAME));
return s;
}
}
public static class DeletedContentsChildren extends ChildFactory<DeletedContent.DeletedContentFilter> {
private SleuthkitCase skCase;
private Observable notifier;
// true if we have already told user that not all files will be shown
private static boolean maxFilesDialogShown = false;
public DeletedContentsChildren(SleuthkitCase skCase) {
this.skCase = skCase;
this.notifier = new DeletedContentsChildrenObservable();
}
/**
* Listens for case and ingest invest. Updates observers when events are
* fired. Other nodes are listening to this for changes.
*/
private final class DeletedContentsChildrenObservable extends Observable {
DeletedContentsChildrenObservable() {
IngestManager.getInstance().addIngestJobEventListener(pcl);
IngestManager.getInstance().addIngestModuleEventListener(pcl);
Case.addPropertyChangeListener(pcl);
}
private void removeListeners() {
deleteObservers();
IngestManager.getInstance().removeIngestJobEventListener(pcl);
IngestManager.getInstance().removeIngestModuleEventListener(pcl);
Case.removePropertyChangeListener(pcl);
}
private final PropertyChangeListener pcl = new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
String eventType = evt.getPropertyName();
// new file was added
if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) {
// @@@ COULD CHECK If the new file is deleted before notifying...
update();
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
update();
} else if (eventType.equals(Case.Events.DATA_SOURCE_ADDED.toString())) {
update();
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
// case was closed. Remove listeners so that we don't get called with a stale case handle
if (evt.getNewValue() == null) {
removeListeners();
}
maxFilesDialogShown = false;
}
}
};
private void update() {
setChanged();
notifyObservers();
}
}
@Override
protected boolean createKeys(List<DeletedContent.DeletedContentFilter> list) {
list.addAll(Arrays.asList(DeletedContent.DeletedContentFilter.values()));
return true;
}
@Override
protected Node createNodeForKey(DeletedContent.DeletedContentFilter key) {
return new DeletedContentNode(skCase, key, notifier);
}
public class DeletedContentNode extends DisplayableItemNode {
private DeletedContent.DeletedContentFilter filter;
private final Logger logger = Logger.getLogger(DeletedContentNode.class.getName());
// Use version that has observer for updates
@Deprecated
DeletedContentNode(SleuthkitCase skCase, DeletedContent.DeletedContentFilter filter) {
super(Children.create(new DeletedContentChildren(filter, skCase, null), true), Lookups.singleton(filter.getDisplayName()));
this.filter = filter;
init();
}
DeletedContentNode(SleuthkitCase skCase, DeletedContent.DeletedContentFilter filter, Observable o) {
super(Children.create(new DeletedContentChildren(filter, skCase, o), true), Lookups.singleton(filter.getDisplayName()));
this.filter = filter;
init();
o.addObserver(new DeletedContentNodeObserver());
}
private void init() {
super.setName(filter.getName());
String tooltip = filter.getDisplayName();
this.setShortDescription(tooltip);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); //NON-NLS
updateDisplayName();
}
// update the display name when new events are fired
private class DeletedContentNodeObserver implements Observer {
@Override
public void update(Observable o, Object arg) {
updateDisplayName();
}
}
private void updateDisplayName() {
//get count of children without preloading all children nodes
final long count = DeletedContentChildren.calculateItems(skCase, filter);
//final long count = getChildren().getNodesCount(true);
super.setDisplayName(filter.getDisplayName() + " (" + count + ")");
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> v) {
return v.visit(this);
}
@Override
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
}
ss.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "DeletedContent.createSheet.filterType.name"),
NbBundle.getMessage(this.getClass(), "DeletedContent.createSheet.filterType.displayName"),
NbBundle.getMessage(this.getClass(), "DeletedContent.createSheet.filterType.desc"),
filter.getDisplayName()));
return s;
}
@Override
public boolean isLeafTypeNode() {
return true;
}
}
static class DeletedContentChildren extends ChildFactory.Detachable<AbstractFile> {
private SleuthkitCase skCase;
private DeletedContent.DeletedContentFilter filter;
private static final Logger logger = Logger.getLogger(DeletedContentChildren.class.getName());
private static final int MAX_OBJECTS = 10001;
private final Observable notifier;
DeletedContentChildren(DeletedContent.DeletedContentFilter filter, SleuthkitCase skCase, Observable o) {
this.skCase = skCase;
this.filter = filter;
this.notifier = o;
}
private final Observer observer = new DeletedContentChildrenObserver();
// Cause refresh of children if there are changes
private class DeletedContentChildrenObserver implements Observer {
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
@Override
protected void addNotify() {
if (notifier != null) {
notifier.addObserver(observer);
}
}
@Override
protected void removeNotify() {
if (notifier != null) {
notifier.deleteObserver(observer);
}
}
@Override
protected boolean createKeys(List<AbstractFile> list) {
List<AbstractFile> queryList = runFsQuery();
if (queryList.size() == MAX_OBJECTS) {
queryList.remove(queryList.size() - 1);
// only show the dialog once - not each time we refresh
if (maxFilesDialogShown == false) {
maxFilesDialogShown = true;
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
JOptionPane.showMessageDialog(WindowManager.getDefault().getMainWindow(), NbBundle.getMessage(this.getClass(),
"DeletedContent.createKeys.maxObjects.msg",
MAX_OBJECTS - 1));
}
});
}
}
list.addAll(queryList);
return true;
}
static private String makeQuery(DeletedContent.DeletedContentFilter filter) {
String query = "";
switch (filter) {
case FS_DELETED_FILTER:
query = "dir_flags = " + TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue() //NON-NLS
+ " AND meta_flags != " + TskData.TSK_FS_META_FLAG_ENUM.ORPHAN.getValue() //NON-NLS
+ " AND type = " + TskData.TSK_DB_FILES_TYPE_ENUM.FS.getFileType(); //NON-NLS
break;
case ALL_DELETED_FILTER:
query = " ( "
+ "( "
+ "(dir_flags = " + TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue() //NON-NLS
+ " OR " //NON-NLS
+ "meta_flags = " + TskData.TSK_FS_META_FLAG_ENUM.ORPHAN.getValue() //NON-NLS
+ ")"
+ " AND type = " + TskData.TSK_DB_FILES_TYPE_ENUM.FS.getFileType() //NON-NLS
+ " )"
+ " OR type = " + TskData.TSK_DB_FILES_TYPE_ENUM.CARVED.getFileType() //NON-NLS
+ " )";
//+ " AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS.getFileType()
//+ " AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS.getFileType()
//+ " AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS.getFileType()
//+ " AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.DERIVED.getFileType()
//+ " AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL.getFileType()
//+ " AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR.getFileType();
break;
default:
logger.log(Level.SEVERE, "Unsupported filter type to get deleted content: {0}", filter); //NON-NLS
}
query += " LIMIT " + MAX_OBJECTS; //NON-NLS
return query;
}
private List<AbstractFile> runFsQuery() {
List<AbstractFile> ret = new ArrayList<>();
String query = makeQuery(filter);
try {
ret = skCase.findAllFilesWhere(query);
} catch (TskCoreException e) {
logger.log(Level.SEVERE, "Error getting files for the deleted content view using: " + query, e); //NON-NLS
}
return ret;
}
/**
* Get children count without actually loading all nodes
*
* @return
*/
static long calculateItems(SleuthkitCase sleuthkitCase, DeletedContent.DeletedContentFilter filter) {
try {
return sleuthkitCase.countFilesWhere(makeQuery(filter));
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error getting deleted files search view count", ex); //NON-NLS
return 0;
}
}
@Override
protected Node createNodeForKey(AbstractFile key) {
return key.accept(new ContentVisitor.Default<AbstractNode>() {
public FileNode visit(AbstractFile f) {
return new FileNode(f, false);
}
public FileNode visit(FsContent f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(LayoutFile f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(File f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(Directory f) {
return new FileNode(f, false);
}
@Override
protected AbstractNode defaultVisit(Content di) {
throw new UnsupportedOperationException(NbBundle.getMessage(this.getClass(),
"DeletedContent.createNodeForKey.typeNotSupported.msg",
di.toString()));
}
});
}
}
}
}
| |
/*
* Copyright (C) 2012 FoxLabs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.foxlabs.util.reflect;
import java.lang.annotation.Annotation;
import java.lang.reflect.Array;
import java.lang.reflect.Type;
import java.lang.reflect.GenericArrayType;
import java.lang.reflect.ParameterizedType;
import java.util.*;
import java.util.concurrent.*;
/**
* Helper class used for working with java types.
*
* @author Fox Mulder
*/
@SuppressWarnings("unchecked")
public abstract class Types {
/**
* Casts an object.
*
* @param <T> The object type.
* @param obj The object to be cast.
* @return The object after casting.
*/
public static <T> T cast(Object obj) {
return (T) obj;
}
/**
* Returns array of types for the specified array of objects. Note that
* type for <code>null</code> elements is <code>java.lang.Object</code>
* type.
*
* @param objs Array of objects.
* @return Array of types for the specified array of objects.
*/
public static Class<?>[] typesOf(Object... objs) {
Class<?>[] types = new Class<?>[objs.length];
for (int i = 0; i < objs.length; i++) {
types[i] = objs[i] == null
? Object.class
: objs[i] instanceof Annotation
? ((Annotation) objs[i]).annotationType()
: objs[i].getClass();
}
return types;
}
/**
* Determines if the specified type is a boolean type (either primitive or
* object).
*
* @param type The type to test.
* @return <code>true</code> if the specified type is a boolean type;
* <code>false</code> otherwise.
*/
public static boolean isBoolean(Class<?> type) {
return type == Boolean.class || type == Boolean.TYPE;
}
/**
* Determines if the specified type is a byte type (either primitive or
* object).
*
* @param type The type to test.
* @return <code>true</code> if the specified type is a byte type;
* <code>false</code> otherwise.
*/
public static boolean isByte(Class<?> type) {
return type == Byte.class || type == Byte.TYPE;
}
/**
* Determines if the specified type is a short type (either primitive or
* object).
*
* @param type The type to test.
* @return <code>true</code> if the specified type is a short type;
* <code>false</code> otherwise.
*/
public static boolean isShort(Class<?> type) {
return type == Short.class || type == Short.TYPE;
}
/**
* Determines if the specified type is an integer type (either primitive or
* object).
*
* @param type The type to test.
* @return <code>true</code> if the specified type is an integer type;
* <code>false</code> otherwise.
*/
public static boolean isInteger(Class<?> type) {
return type == Integer.class || type == Integer.TYPE;
}
/**
* Determines if the specified type is a long type (either primitive or
* object).
*
* @param type The type to test.
* @return <code>true</code> if the specified type is a long type;
* <code>false</code> otherwise.
*/
public static boolean isLong(Class<?> type) {
return type == Long.class || type == Long.TYPE;
}
/**
* Determines if the specified type is a float type (either primitive or
* object).
*
* @param type The type to test.
* @return <code>true</code> if the specified type is a float type;
* <code>false</code> otherwise.
*/
public static boolean isFloat(Class<?> type) {
return type == Float.class || type == Float.TYPE;
}
/**
* Determines if the specified type is a double type (either primitive or
* object).
*
* @param type The type to test.
* @return <code>true</code> if the specified type is a double type;
* <code>false</code> otherwise.
*/
public static boolean isDouble(Class<?> type) {
return type == Double.class || type == Double.TYPE;
}
/**
* Determines if the specified type is an object type (not a primitive,
* array, enum or annotation).
*
* @param type The type to test.
* @return <code>true</code> if the specified type is an object type;
* <code>false</code> otherwise.
*/
public static boolean isObject(Class<?> type) {
return !(type == null || type.isPrimitive() || type.isArray() || type.isEnum() || type.isAnnotation());
}
/**
* Returns corresponding wrapper type for the specified primitive type.
* This method has no effect if the specified type is not primitive type.
*
* @param <T> The wrapper type.
* @param type Primitive type.
* @return Wrapper type for the specified primitive type.
*/
public static <T> Class<T> wrapperTypeOf(Class<T> type) {
if (type != null && type.isPrimitive()) {
if (type == Boolean.TYPE) {
return (Class<T>) Boolean.class;
} else if (type == Character.TYPE) {
return (Class<T>) Character.class;
} else if (type == Byte.TYPE) {
return (Class<T>) Byte.class;
} else if (type == Short.TYPE) {
return (Class<T>) Short.class;
} else if (type == Integer.TYPE) {
return (Class<T>) Integer.class;
} else if (type == Long.TYPE) {
return (Class<T>) Long.class;
} else if (type == Float.TYPE) {
return (Class<T>) Float.class;
} else if (type == Double.TYPE) {
return (Class<T>) Double.class;
}
}
return type;
}
/**
* Returns corresponding primitive type for the specified wrapper type.
* This method has no effect if the specified type is not wrapper type.
*
* @param type Wrapper type.
* @return Primitive type for the specified wrapper type.
*/
public static Class<?> primitiveTypeOf(Class<?> type) {
if (type == Boolean.class) {
return Boolean.TYPE;
} else if (type == Character.class) {
return Character.TYPE;
} else if (type == Byte.class) {
return Byte.TYPE;
} else if (type == Short.class) {
return Short.TYPE;
} else if (type == Integer.class) {
return Integer.TYPE;
} else if (type == Long.class) {
return Long.TYPE;
} else if (type == Float.class) {
return Float.TYPE;
} else if (type == Double.class) {
return Double.TYPE;
} else {
return type;
}
}
/**
* Returns default value for the specified type. Note that this method can
* return default value for primitive types only. For other types it
* returns <code>null</code>.
*
* @param <T> The value type.
* @param type The type for which default value should be returned.
* @return Default value for the specified type.
*/
public static <T> T defaultValueOf(Class<T> type) {
if (type != null && type.isPrimitive()) {
if (type == Boolean.TYPE) {
return (T) Boolean.FALSE;
} else if (type == Character.TYPE) {
return (T) Character.valueOf((char) 0);
} else if (type == Byte.TYPE) {
return (T) Byte.valueOf((byte) 0);
} else if (type == Short.TYPE) {
return (T) Short.valueOf((short) 0);
} else if (type == Integer.TYPE) {
return (T) Integer.valueOf(0);
} else if (type == Long.TYPE) {
return (T) Long.valueOf(0L);
} else if (type == Float.TYPE) {
return (T) Float.valueOf(0f);
} else if (type == Double.TYPE) {
return (T) Double.valueOf(0d);
}
}
return null;
}
/**
* Returns super type for the specified type. Actually, this method just
* checks if the specified type is <code>null</code> and returns
* <code>java.lang.Object</code> type if so.
*
* @param type The type to test.
* @return Super type for the specified type.
*/
public static Class<?> superTypeOf(Class<?> type) {
return type == null ? Object.class : type;
}
/**
* Returns super type for the specified two types.
*
* @param type1 First type.
* @param type2 Second type.
* @return Super type for the specified two types.
*/
public static Class<?> superTypeOf(Class<?> type1, Class<?> type2) {
if (type1 == type2) {
return type1;
} else if (type1 == null) {
return type2;
} else if (type2 == null) {
return type1;
} else if (type1.isAssignableFrom(type2)) {
return type1;
} else if (type2.isAssignableFrom(type1)) {
return type2;
} else {
return Object.class;
}
}
/**
* Returns super type for the specified array of types.
*
* @param types Array of types.
* @return Super type for the specified array of types.
*/
public static Class<?> superTypeOf(Class<?>... types) {
int count = types.length;
if (count == 0) {
return Object.class;
}
Class<?> type = types[0];
if (count == 1) {
return superTypeOf(type);
}
for (int i = 1; i < count; i++) {
type = superTypeOf(type, types[i]);
if (type == Object.class) {
return Object.class;
}
}
return type;
}
/**
* Returns raw type for the specified generic type.
*
* @param <T> The raw type.
* @param type Generic type.
* @return Raw type for the specified generic type.
*/
public static <T> Class<T> rawTypeOf(Type type) {
if (type instanceof Class) {
return (Class<T>) type;
} else if (type instanceof ParameterizedType) {
return (Class<T>) ((ParameterizedType) type).getRawType();
} else if (type instanceof GenericArrayType) {
return (Class<T>) arrayTypeOf(rawTypeOf(((GenericArrayType) type).getGenericComponentType()));
} else {
return (Class<T>) Object.class;
}
}
/**
* Returns array type for the specified type of array elements.
*
* @param <T> The array type.
* @param <E> The array element type.
* @param elementType Type of array elements.
* @return Array type for the specified type of array elements.
*/
public static <T, E> Class<T> arrayTypeOf(Class<E> elementType) {
if (elementType == null) {
return (Class<T>) Object[].class;
} else {
return (Class<T>) Array.newInstance(elementType, 0).getClass();
}
}
/**
* Returns raw element (value) type for the specified generic array,
* collection or map type. Note that this method returns <code>null</code>
* if the specified type is not array, collection or map type.
*
* @param <T> The element type.
* @param type Generic array, collection or map type.
* @return Raw element (value) type for the specified type.
*/
public static <T> Class<T> elementTypeOf(Type type) {
Class<?> rawtype = rawTypeOf(type);
if (rawtype.isArray()) {
return (Class<T>) rawtype.getComponentType();
} else if (Collection.class.isAssignableFrom(rawtype)) {
if (type instanceof Class) {
return (Class<T>) parameterTypeOf((Class<?>) type, Collection.class, 0);
} else if (type instanceof ParameterizedType) {
Type[] argtypes = ((ParameterizedType) type).getActualTypeArguments();
return (Class<T>) (argtypes.length == 0 ? Object.class : rawTypeOf(argtypes[0]));
}
} else if (Map.class.isAssignableFrom(rawtype)) {
if (type instanceof Class) {
return (Class<T>) parameterTypeOf((Class<?>) type, Map.class, 1);
} else if (type instanceof ParameterizedType) {
Type[] argtypes = ((ParameterizedType) type).getActualTypeArguments();
return (Class<T>) (argtypes.length < 2 ? Object.class : rawTypeOf(argtypes[1]));
}
}
return null;
}
/**
* Returns raw key type for the specified generic map type. Note that this
* method returns <code>null</code> if the specified type is not map type.
*
* @param <T> The key type.
* @param type Generic map type.
* @return Raw key type for the specified type.
*/
public static <T> Class<T> keyTypeOf(Type type) {
if (Map.class.isAssignableFrom(rawTypeOf(type))) {
if (type instanceof Class) {
return (Class<T>) parameterTypeOf((Class<?>) type, Map.class, 0);
} else if (type instanceof ParameterizedType) {
Type[] argtypes = ((ParameterizedType) type).getActualTypeArguments();
return (Class<T>) (argtypes.length < 1 ? Object.class : rawTypeOf(argtypes[0]));
}
}
return null;
}
/**
* Returns the type of parameter with the specified index for the specified
* type.
*
* @param type Subtype.
* @param base Base super type.
* @param index Parameter index.
* @return The type of parameter with the specified index for the specified
* type.
*/
public static Class<?> parameterTypeOf(Class<?> type, Class<?> base, int index) {
List<Type> supertypes = new LinkedList<Type>();
if (type.getGenericSuperclass() != null) {
supertypes.add(type.getGenericSuperclass());
}
for (Type intf : type.getGenericInterfaces()) {
supertypes.add(intf);
}
for (Type supertype : supertypes) {
Class<?> rawtype = rawTypeOf(supertype);
if (base.isAssignableFrom(rawtype)) {
Class<?> paramtype = parameterTypeOf(rawtype, base, index);
if (paramtype != Object.class) {
return paramtype;
} else if (supertype instanceof ParameterizedType) {
Type[] argtypes = ((ParameterizedType) supertype).getActualTypeArguments();
if (index < argtypes.length && argtypes[index] instanceof Class) {
return (Class<?>) argtypes[index];
}
}
}
}
return Object.class;
}
/**
* Creates a new object instance of the specified type.
*
* @param <T> The object instance type.
* @param type The type of object to be created.
* @return A new object instance of the specified type.
* @throws RuntimeException if object instantiation fails.
*/
public static <T> T newInstance(Class<T> type) {
try {
return type.newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Creates a new array for the specified type of array elements.
*
* @param <T> The array type.
* @param <E> The array element type.
* @param elementType The type of array elements.
* @param length Length of the new array.
* @return A new array for the specified type of array elements.
*/
public static <T, E> T newArray(Class<E> elementType, int length) {
return (T) Array.newInstance(elementType, length);
}
/**
* Creates a new collection of the specified type.
*
* @param <T> The collection type.
* @param <E> The collection element type.
* @param type The type of collection to be created.
* @param size Initial size of the new collection.
* @return A new collection of the specified type.
*/
public static <T extends Collection<E>, E> T newCollection(Class<T> type, int size) {
if (Collection.class.equals(type)) {
return type.cast(new ArrayList<E>(size));
} else if (List.class.isAssignableFrom(type)) {
return type.cast(newList(cast(type.asSubclass(List.class)), size));
} else if (Set.class.isAssignableFrom(type)) {
return type.cast(newSet(cast(type.asSubclass(Set.class)), size));
} else if (Queue.class.isAssignableFrom(type)) {
return type.cast(newQueue(cast(type.asSubclass(Queue.class)), size));
} else {
return newInstance(type);
}
}
/**
* Creates a new list of the specified type.
*
* @param <T> The list type.
* @param <E> The list element type.
* @param type The type of list to be created.
* @param size Initial size of the new list.
* @return A new list of the specified type.
*/
public static <T extends List<E>, E> T newList(Class<T> type, int size) {
if (List.class.equals(type) || ArrayList.class.equals(type)) {
return type.cast(new ArrayList<E>(size));
} else if (LinkedList.class.equals(type)) {
return type.cast(new LinkedList<E>());
} else {
return newInstance(type);
}
}
/**
* Creates a new set of the specified type.
*
* @param <T> The set type.
* @param <E> The set element type.
* @param type The type of set to be created.
* @param size Initial size of the new set.
* @return A new set of the specified type.
*/
public static <T extends Set<E>, E> T newSet(Class<T> type, int size) {
if (Set.class.equals(type) || LinkedHashSet.class.equals(type)) {
return type.cast(new LinkedHashSet<E>(size));
} else if (HashSet.class.equals(type)) {
return type.cast(new HashSet<E>(size));
} else if (SortedSet.class.equals(type) || NavigableSet.class.equals(type) || TreeSet.class.equals(type)) {
return type.cast(new TreeSet<E>());
} else if (ConcurrentSkipListSet.class.equals(type)) {
return type.cast(new ConcurrentSkipListSet<E>());
} else {
return newInstance(type);
}
}
/**
* Creates a new queue of the specified type.
*
* @param <T> The queue type.
* @param <E> The queue element type.
* @param type The type of queue to be created.
* @param size Initial size of the new queue.
* @return A new queue of the specified type.
*/
public static <T extends Queue<E>, E> T newQueue(Class<T> type, int size) {
if (Queue.class.equals(type) || PriorityQueue.class.equals(type)) {
return type.cast(new PriorityQueue<E>(size));
} else if (Deque.class.equals(type) || LinkedList.class.equals(type)) {
return type.cast(new LinkedList<E>());
} else if (ArrayDeque.class.equals(type)) {
return type.cast(new ArrayDeque<E>(size));
} else if (ConcurrentLinkedQueue.class.equals(type)) {
return type.cast(new ConcurrentLinkedQueue<E>());
} else if (BlockingQueue.class.equals(type) || LinkedBlockingQueue.class.equals(type)) {
return type.cast(new LinkedBlockingQueue<E>(size));
} else if (ArrayBlockingQueue.class.equals(type)) {
return type.cast(new ArrayBlockingQueue<E>(size));
} else if (PriorityBlockingQueue.class.equals(type)) {
return type.cast(new PriorityBlockingQueue<E>(size));
} else if (SynchronousQueue.class.equals(type)) {
return type.cast(new SynchronousQueue<E>());
} else if (BlockingDeque.class.equals(type) || LinkedBlockingDeque.class.equals(type)) {
return type.cast(new LinkedBlockingDeque<E>(size));
} else {
return newInstance(type);
}
}
/**
* Creates a new map of the specified type.
*
* @param <T> The map type.
* @param <K> The map key type.
* @param <V> The map value type.
* @param type The type of map to be created.
* @param size Initial size of the new map.
* @return A new map of the specified type.
*/
public static <T extends Map<K, V>, K, V> T newMap(Class<T> type, int size) {
if (Map.class.equals(type) || LinkedHashMap.class.equals(type)) {
return type.cast(new LinkedHashMap<K, V>(size));
} else if (HashMap.class.equals(type)) {
return type.cast(new HashMap<K, V>(size));
} else if (IdentityHashMap.class.equals(type)) {
return type.cast(new IdentityHashMap<K, V>(size));
} else if (SortedMap.class.equals(type) || NavigableMap.class.equals(type) || TreeMap.class.equals(type)) {
return type.cast(new TreeMap<K, V>());
} else if (ConcurrentMap.class.equals(type) || ConcurrentHashMap.class.equals(type)) {
return type.cast(new ConcurrentHashMap<K, V>(size));
} else if (ConcurrentNavigableMap.class.equals(type) || ConcurrentSkipListMap.class.equals(type)) {
return type.cast(new ConcurrentSkipListMap<K, V>());
} else {
return newInstance(type);
}
}
/**
* Returns a hash code value for the specified array of types.
*
* @param types Array of types.
* @return A hash code value for the specified array of types.
*/
public static int hashCode(Class<?>... types) {
if (types == null || types.length == 0) {
return 0;
}
int hash = 1;
for (Class<?> type : types) {
hash = 31 * hash + (type == null ? 0 : type.hashCode());
}
return hash;
}
}
| |
/*
* The MIT License
*
* Copyright 2012 mpower.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.boazglean.kathab.api;
import javax.sql.DataSource;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.DefaultValue;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.boazglean.kathab.api.summarization.*;
import org.h2.jdbcx.JdbcDataSource;
/**
*
* @author mpower
*/
@Path(value="/summary")
@Data
@Slf4j
public class LogSummarizerService extends JdbcLogSummarizer implements LogSummarizer {
public LogSummarizerService() {
JdbcDataSource jdbcSource;
jdbcSource = new JdbcDataSource();
jdbcSource.setURL("jdbc:h2:mem:api-webapp;DB_CLOSE_DELAY=-1");
jdbcSource.setUser("apiwebapp");
jdbcSource.setPassword("password");
this.setSource(jdbcSource);
}
@Override
public DataSource getSource() {
return super.getSource();
}
@Override
public void setSource(DataSource source) {
super.setSource(source);
}
@Override
@GET
@Path(value="/level/all")
@Produces(value="application/json")
public LevelSummary summarizeLevel() {
log.info("Call to summary");
return super.summarizeLevel();
}
@Override
@GET
@Path(value="/level/level={level}")
@Produces(value="application/json")
public LevelSummary summarizeLevel(@PathParam(value="level") LogLevel... levels) {
log.info("Call to summary");
log.debug("Call to summary, levels: {}", levels);
return super.summarizeLevel(levels);
}
@GET
@Path(value="/prefix/level={level}&prefix={prefix}")
@Produces(value="application/json")
public LevelSummary summarizeLevel(@PathParam(value="prefix") String includePrefix, @PathParam(value="level") LogLevel... levels) {
log.info("Call to summary");
log.debug("Call to summary, prefix: {}, levels: {}", includePrefix, levels);
return super.summarizeLevel(System.currentTimeMillis(), TimePeriod.DEFAULT, new String[] {includePrefix}, levels);
}
@Override
@GET
@Path(value="/level/level={level}&period={period}&prefix={prefix}")
@Produces(value="application/json")
public LevelSummary summarizeLevel(
@DefaultValue("0")long endMillis, /* not used */
@PathParam(value="period") TimePeriod period,
@PathParam(value="prefix") String[] includePrefix,
@PathParam(value="level") LogLevel... levels) {
return super.summarizeLevel(System.currentTimeMillis(), period, includePrefix, levels); //To change body of overridden methods use File | Settings | File Templates.
}
@Override
@GET
@Path(value="/level/prefix={prefix}")
@Produces(value="application/json")
public LevelSummary summarizeLevel(@PathParam(value="prefix") String... includePrefix) {
return super.summarizeLevel(includePrefix); //To change body of overridden methods use File | Settings | File Templates.
}
@Override
@GET
@Path(value="/level/period={period}")
@Produces(value="application/json")
public LevelSummary summarizeLevel(@PathParam(value="period") TimePeriod period) {
return super.summarizeLevel(period); //To change body of overridden methods use File | Settings | File Templates.
}
@Override
@GET
@Path(value="/prefix/all")
@Produces(value="application/json")
public PrefixSummary summarizePrefix() {
log.info("Call to summary");
return super.summarizePrefix();
}
@Override
@GET
@Path(value="/prefix/prefix={prefix}")
@Produces(value="application/json")
public PrefixSummary summarizePrefix(@PathParam(value="prefix") String... includePrefixes) {
log.info("Call to summary");
log.debug("Call to summary, prefixes: {}", includePrefixes);
return super.summarizePrefix(includePrefixes);
}
@Override
@GET
@Path(value="/prefix/period={period}")
@Produces(value="application/json")
public PrefixSummary summarizePrefix(@PathParam(value="period") TimePeriod period) {
return super.summarizePrefix(period); //To change body of overridden methods use File | Settings | File Templates.
}
@Override
@GET
@Path(value="/prefix/level={level}&period={period}&prefix={prefix}")
@Produces(value="application/json")
public PrefixSummary summarizePrefix(
@DefaultValue("0")long endMillis, /* not used */
@PathParam(value="period") TimePeriod period,
@PathParam(value="prefix") String[] includePrefixes,
@PathParam(value="level") LogLevel... levels) {
return super.summarizePrefix(System.currentTimeMillis(), period, includePrefixes, levels); //To change body of overridden methods use File | Settings | File Templates.
}
@Override
@GET
@Path(value="/prefix/level={level}")
@Produces(value="application/json")
public PrefixSummary summarizePrefix(@PathParam(value="level") LogLevel... levels) {
return super.summarizePrefix(levels); //To change body of overridden methods use File | Settings | File Templates.
}
@Override
@GET
@Path(value="/period/level={level}")
@Produces(value="application/json")
public TimeSummary summarizeTime(@PathParam(value="level") LogLevel... levels) {
return super.summarizeTime(levels); //To change body of overridden methods use File | Settings | File Templates.
}
@Override
@GET
@Path(value="/period/level={level}&period={period}&prefix={prefix}")
@Produces(value="application/json")
public TimeSummary summarizeTime(
@DefaultValue("0")long endMillis, /* not used */
@PathParam(value="period") TimePeriod period,
@PathParam(value="prefix") String[] includePrefix,
@PathParam(value="level") LogLevel... levels) {
return super.summarizeTime(System.currentTimeMillis(), period, includePrefix, levels); //To change body of overridden methods use File | Settings | File Templates.
}
@Override
@GET
@Path(value="/period/prefix={prefix}")
@Produces(value="application/json")
public TimeSummary summarizeTime(String... includePrefix) {
return super.summarizeTime(includePrefix); //To change body of overridden methods use File | Settings | File Templates.
}
@Override
@GET
@Path(value="/period/all")
@Produces(value="application/json")
public TimeSummary summarizeTime() {
return super.summarizeTime();
}
@Override
@GET
@Path(value="/period/period={period}")
@Produces(value="application/json")
public TimeSummary summarizeTime(@PathParam(value="period") TimePeriod period) {
return super.summarizeTime(period);
}
}
| |
/*
* Copyright 2012 GitHub Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.mobile.util;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.AsyncTask;
import android.support.v4.os.AsyncTaskCompat;
import android.text.Html.ImageGetter;
import android.text.TextUtils;
import android.widget.TextView;
import com.alorma.github.emoji.EmojiBitmapLoader;
import com.alorma.github.sdk.bean.info.RepoInfo;
import com.gh4a.utils.FileUtils;
import com.mikepenz.iconics.IconicsDrawable;
import com.mikepenz.octicons_typeface_library.Octicons;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.ref.WeakReference;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import static android.view.View.GONE;
import static android.view.View.VISIBLE;
/**
* Original source https://github.com/github/android/blob/master/app/src/main/java/com/github/mobile/util/HttpImageGetter.java
* Getter for an image
*/
public class HttpImageGetter implements ImageGetter {
private final Context context;
private final Map<Object, CharSequence> rawHtmlCache = new HashMap<>();
private final Map<Object, CharSequence> fullHtmlCache = new HashMap<>();
private RepoInfo repoInfo;
private LoadingImageGetter loading;
private File dir;
private int width;
private int height;
private ArrayList<WeakReference<Bitmap>> loadedBitmaps;
private boolean destroyed;
/**
* Create image getter for context
*/
public HttpImageGetter(Context context) {
this.context = context;
if (context != null) {
dir = context.getCacheDir();
width = context.getResources().getDisplayMetrics().widthPixels / 2;
height = context.getResources().getDisplayMetrics().heightPixels / 2;
loadedBitmaps = new ArrayList<>();
loading = new LoadingImageGetter(context, 24);
}
}
private static boolean containsImages(final String html) {
return html.contains("<img");
}
public void destroy() {
for (WeakReference<Bitmap> ref : loadedBitmaps) {
Bitmap bitmap = ref.get();
if (bitmap != null) {
bitmap.recycle();
}
}
destroyed = true;
}
private HttpImageGetter show(final TextView view, final CharSequence html) {
if (TextUtils.isEmpty(html)) return hide(view);
view.setText(html);
new EmojiBitmapLoader().parseTextView(view);
view.setVisibility(VISIBLE);
view.setTag(null);
return this;
}
private HttpImageGetter hide(final TextView view) {
view.setText(null);
view.setVisibility(GONE);
view.setTag(null);
return this;
}
/**
* Encode given HTML string and map it to the given id
*
* @return this image getter
*/
public void encode(final Object id, final String html) {
if (TextUtils.isEmpty(html)) return;
CharSequence encoded = HtmlUtils.encode(html, loading);
// Use default encoding if no img tags
if (containsImages(html)) {
rawHtmlCache.put(id, encoded);
} else {
rawHtmlCache.remove(id);
fullHtmlCache.put(id, encoded);
}
}
/**
* Bind text view to HTML string
*
* @return this image getter
*/
public HttpImageGetter bind(final TextView view, final String html, final Object id) {
if (TextUtils.isEmpty(html)) return hide(view);
CharSequence encoded = fullHtmlCache.get(id);
if (encoded != null) return show(view, encoded);
encoded = rawHtmlCache.get(id);
if (encoded == null) {
encoded = HtmlUtils.encode(html, loading);
if (containsImages(html)) {
rawHtmlCache.put(id, encoded);
} else {
rawHtmlCache.remove(id);
fullHtmlCache.put(id, encoded);
return show(view, encoded);
}
}
if (TextUtils.isEmpty(encoded)) return hide(view);
show(view, encoded);
view.setTag(id);
ImageGetterAsyncTask asyncTask = new ImageGetterAsyncTask();
AsyncTaskCompat.executeParallel(asyncTask, html, id, view);
return this;
}
public void repoInfo(RepoInfo repoInfo) {
this.repoInfo = repoInfo;
}
private InputStream fetch(String urlString) throws IOException {
if (!urlString.contains("http")) {
Uri.Builder builder = Uri.parse("https://github.com/").buildUpon();
builder.appendPath(repoInfo.owner);
builder.appendPath(repoInfo.name);
builder.appendPath("raw");
builder.appendPath(repoInfo.branch);
if (urlString.startsWith("./")) {
urlString = urlString.replace("./", "");
}
builder.appendPath(urlString);
urlString = builder.build().toString();
}
URL url = new URL(urlString);
return url.openStream();
}
@Override
public Drawable getDrawable(String source) {
/*
try {
Bitmap bitmap = ImageUtils.getBitmap(context, dir.getAbsolutePath(), width, height);
loadedBitmaps.add(new WeakReference<>(bitmap));
BitmapDrawable drawable = new BitmapDrawable(context.getResources(), bitmap);
drawable.setBounds(0, 0, bitmap.getWidth(), bitmap.getHeight());
return drawable;
} catch (ExecutionException | InterruptedException e) {
e.printStackTrace();
}
return null;
*/
if (loading != null) {
File output = null;
if (destroyed) {
return loading.getDrawable(source);
}
try {
output = File.createTempFile("image", ".jpg", dir);
InputStream is = fetch(source);
if (is != null) {
boolean success = FileUtils.save(output, is);
if (success) {
Bitmap bitmap = ImageUtils.getBitmap(output, width, Integer.MAX_VALUE);
if (bitmap == null) {
return loading.getDrawable(source);
}
loadedBitmaps.add(new WeakReference<Bitmap>(bitmap));
BitmapDrawable drawable = new BitmapDrawable(context.getResources(), bitmap);
drawable.setBounds(0, 0, bitmap.getWidth(), bitmap.getHeight());
return drawable;
} else {
return loading.getDrawable(source);
}
} else {
return loading.getDrawable(source);
}
} catch (IOException e) {
return loading.getDrawable(source);
} finally {
if (output != null) output.delete();
}
}
return null;
}
private static class LoadingImageGetter implements ImageGetter {
private final Drawable image;
private LoadingImageGetter(final Context context, final int size) {
int imageSize = Math.round(context.getResources().getDisplayMetrics().density * size + 0.5F);
image = new IconicsDrawable(context, Octicons.Icon.oct_file_media).sizePx(imageSize);
}
public Drawable getDrawable(String source) {
return image;
}
}
public class ImageGetterAsyncTask extends AsyncTask<Object, Void, CharSequence> {
String html;
Object id;
TextView view;
@Override
protected CharSequence doInBackground(Object... params) {
html = (String) params[0];
id = params[1];
view = (TextView) params[2];
return HtmlUtils.encode(html, HttpImageGetter.this);
}
protected void onPostExecute(CharSequence result) {
if (result != null) {
rawHtmlCache.remove(id);
fullHtmlCache.put(id, result);
if (id.equals(view.getTag())) {
show(view, result);
}
}
}
}
}
| |
/*
* Copyright (c) 2002, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
import com.sun.javadoc.*;
import java.util.*;
import java.io.*;
/**
* Runs javadoc and then runs regression tests on the resulting output.
* This class currently contains three tests:
* <ul>
* <li> String search: Reads each file, complete with newlines,
* into a string. Lets you search for strings that contain
* newlines. String matching is case-sensitive.
* You can run javadoc multiple times with different arguments,
* generating output into different destination directories, and
* then perform a different array of tests on each one.
* To do this, the run method accepts a test array for testing
* that a string is found, and a negated test array for testing
* that a string is not found.
* <li> Run diffs: Iterate through the list of given file pairs
* and diff the pairs.
* <li> Check exit code: Check the exit code of Javadoc and
* record whether the test passed or failed.
* </ul>
*
* @author Doug Kramer
* @author Jamie Ho
* @since 1.4.2
*/
public abstract class JavadocTester {
protected static final String FS = System.getProperty("file.separator");
protected static final String PS = System.getProperty("path.separator");
protected static final String NL = System.getProperty("line.separator");
protected static final String SRC_DIR = System.getProperty("test.src", ".");
protected static final String JAVA_VERSION = System.getProperty("java.version");
protected static final String[][] NO_TEST = new String[][] {};
protected static final String[] NO_FILE_TEST = new String[] {};
/**
* Use this as the file name in the test array when you want to search
* for a string in the error output.
*/
public static final String ERROR_OUTPUT = "ERROR_OUTPUT";
/**
* Use this as the file name in the test array when you want to search
* for a string in the notice output.
*/
public static final String NOTICE_OUTPUT = "NOTICE_OUTPUT";
/**
* Use this as the file name in the test array when you want to search
* for a string in the warning output.
*/
public static final String WARNING_OUTPUT = "WARNING_OUTPUT";
/**
* Use this as the file name in the test array when you want to search
* for a string in standard output.
*/
public static final String STANDARD_OUTPUT = "STANDARD_OUTPUT";
/**
* The default doclet.
*/
public static final String DEFAULT_DOCLET_CLASS = "com.sun.tools.doclets.formats.html.HtmlDoclet";
public static final String DEFAULT_DOCLET_CLASS_OLD = "com.sun.tools.doclets.standard.Standard";
/**
* The writer to write error messages.
*/
public StringWriter errors;
/**
* The writer to write notices.
*/
public StringWriter notices;
/**
* The writer to write warnings.
*/
public StringWriter warnings;
/**
* The buffer of warning output..
*/
public StringBuffer standardOut;
/**
* The current subtest number.
*/
private static int numTestsRun = 0;
/**
* The number of subtests passed.
*/
private static int numTestsPassed = 0;
/**
* The current run of javadoc
*/
private static int javadocRunNum = 0;
/**
* Whether or not to match newlines exactly.
* Set this value to false if the match strings
* contain text from javadoc comments containing
* non-platform newlines.
*/
protected boolean exactNewlineMatch = true;
/**
* Construct a JavadocTester.
*/
public JavadocTester() {
}
/**
* Return the bug id.
* @return the bug id
*/
public abstract String getBugId();
/**
* Return the name of the bug.
* @return the name of the bug
*/
public abstract String getBugName();
/**
* Execute the tests.
*
* @param tester the tester to execute
* @param args the arguments to pass to Javadoc
* @param testArray the array of tests
* @param negatedTestArray the array of negated tests
* @return the return code for the execution of Javadoc
*/
public static int run(JavadocTester tester, String[] args,
String[][] testArray, String[][] negatedTestArray) {
int returnCode = tester.runJavadoc(args);
tester.runTestsOnHTML(testArray, negatedTestArray);
return returnCode;
}
/**
* Execute the tests.
*
* @param tester the tester to execute
* @param args the arguments to pass to Javadoc
* @param testArray the array of tests
* @param negatedTestArray the array of negated tests
* @param fileTestArray the array of file tests
* @param negatedFileTestArray the array of negated file tests
* @return the return code for the execution of Javadoc
*/
public static int run(JavadocTester tester, String[] args,
String[][] testArray, String[][] negatedTestArray, String[] fileTestArray,
String[] negatedFileTestArray) {
int returnCode = tester.runJavadoc(args);
tester.runTestsOnHTML(testArray, negatedTestArray);
tester.runTestsOnFile(fileTestArray, negatedFileTestArray);
return returnCode;
}
/**
* Execute Javadoc using the default doclet.
*
* @param args the arguments to pass to Javadoc
* @return the return code from the execution of Javadoc
*/
public int runJavadoc(String[] args) {
float javaVersion = Float.parseFloat(JAVA_VERSION.substring(0,3));
String docletClass = javaVersion < 1.5 ?
DEFAULT_DOCLET_CLASS_OLD : DEFAULT_DOCLET_CLASS;
return runJavadoc(docletClass, args);
}
/**
* Execute Javadoc.
*
* @param docletClass the doclet being tested.
* @param args the arguments to pass to Javadoc
* @return the return code from the execution of Javadoc
*/
public int runJavadoc(String docletClass, String[] args) {
javadocRunNum++;
if (javadocRunNum == 1) {
System.out.println("\n" + "Running javadoc...");
} else {
System.out.println("\n" + "Running javadoc (run "
+ javadocRunNum + ")...");
}
initOutputBuffers();
ByteArrayOutputStream stdout = new ByteArrayOutputStream();
PrintStream prevOut = System.out;
System.setOut(new PrintStream(stdout));
ByteArrayOutputStream stderr = new ByteArrayOutputStream();
PrintStream prevErr = System.err;
System.setErr(new PrintStream(stderr));
int returnCode = com.sun.tools.javadoc.Main.execute(
getBugName(),
new PrintWriter(errors, true),
new PrintWriter(warnings, true),
new PrintWriter(notices, true),
docletClass,
getClass().getClassLoader(),
args);
System.setOut(prevOut);
standardOut = new StringBuffer(stdout.toString());
System.setErr(prevErr);
errors.write(NL + stderr.toString());
printJavadocOutput();
return returnCode;
}
/**
* Create new string writer buffers
*/
private void initOutputBuffers() {
errors = new StringWriter();
notices = new StringWriter();
warnings = new StringWriter();
}
/**
* Run array of tests on the resulting HTML.
* This method accepts a testArray for testing that a string is found
* and a negatedTestArray for testing that a string is not found.
*
* @param testArray the array of tests
* @param negatedTestArray the array of negated tests
*/
public void runTestsOnHTML(String[][] testArray, String[][] negatedTestArray) {
runTestsOnHTML(testArray, false);
runTestsOnHTML(negatedTestArray, true);
}
/**
* Run array of tests on the generated files.
* This method accepts a fileTestArray for testing if a file is generated
* and a negatedFileTestArray for testing if a file is not found.
*
* @param testArray the array of file tests
* @param negatedTestArray the array of negated file tests
*/
public void runTestsOnFile(String[] fileTestArray, String[] negatedFileTestArray) {
runTestsOnFile(fileTestArray, false);
runTestsOnFile(negatedFileTestArray, true);
}
/**
* Run the array of tests on the resulting HTML.
*
* @param testArray the array of tests
* @param isNegated true if test is negated; false otherwise
*/
private void runTestsOnHTML(String[][] testArray , boolean isNegated) {
for (int i = 0; i < testArray.length; i++) {
numTestsRun++;
System.out.print("Running subtest #" + numTestsRun + "... ");
// Get string to find
String stringToFind = testArray[i][1];
// Read contents of file into a string
String fileString;
try {
fileString = readFileToString(testArray[i][0]);
} catch (Error e) {
if (isNegated) {
System.out.println( "FAILED" + "\n"
+ "for bug " + getBugId()
+ " (" + getBugName() + ") "
+ "due to "
+ e + "\n");
continue;
}
throw e;
}
// Find string in file's contents
boolean isFound = findString(fileString, stringToFind);
if ((isNegated && !isFound) || (!isNegated && isFound) ) {
numTestsPassed += 1;
System.out.println( "Passed" + "\n"
+ (isNegated ? "not found:" : "found:") + "\n"
+ stringToFind + " in " + testArray[i][0] + "\n");
} else {
System.out.println( "FAILED" + "\n"
+ "for bug " + getBugId()
+ " (" + getBugName() + ")" + "\n"
+ "when searching for:" + "\n"
+ stringToFind
+ " in " + testArray[i][0] + "\n");
}
}
}
/**
* Run the array of file tests on the generated files.
*
* @param testArray the array of file tests
* @param isNegated true if test is negated; false otherwise
*/
private void runTestsOnFile(String[] testArray, boolean isNegated) {
String fileName;
String failedString;
String passedString;
for (int i = 0; i < testArray.length; i++) {
numTestsRun++;
fileName = testArray[i];
failedString = "FAILED" + "\n"
+ "for bug " + getBugId() + " (" + getBugName() + ") "
+ "file (" + fileName + ") found" + "\n";
passedString = "Passed" + "\n" +
"file (" + fileName + ") not found" + "\n";
System.out.print("Running subtest #" + numTestsRun + "... ");
try {
File file = new File(fileName);
if ((file.exists() && !isNegated) || (!file.exists() && isNegated)) {
numTestsPassed += 1;
System.out.println(passedString);
} else {
System.out.println(failedString);
}
} catch (Error e) {
System.err.println(e);
}
}
}
/**
* Iterate through the list of given file pairs and diff each file.
*
* @param filePairs the pairs of files to diff.
* @throws an Error is thrown if any differences are found between
* file pairs.
*/
public void runDiffs(String[][] filePairs) throws Error {
runDiffs(filePairs, true);
}
/**
* Iterate through the list of given file pairs and diff each file.
*
* @param filePairs the pairs of files to diff.
* @param throwErrorIFNoMatch flag to indicate whether or not to throw
* an error if the files do not match.
*
* @throws an Error is thrown if any differences are found between
* file pairs and throwErrorIFNoMatch is true.
*/
public void runDiffs(String[][] filePairs, boolean throwErrorIfNoMatch) throws Error {
for (int i = 0; i < filePairs.length; i++) {
diff(filePairs[i][0], filePairs[i][1], throwErrorIfNoMatch);
}
}
/**
* Check the exit code of Javadoc and record whether the test passed
* or failed.
*
* @param expectedExitCode The exit code that is required for the test
* to pass.
* @param actualExitCode The actual exit code from the previous run of
* Javadoc.
*/
public void checkExitCode(int expectedExitCode, int actualExitCode) {
numTestsRun++;
if (expectedExitCode == actualExitCode) {
System.out.println( "Passed" + "\n" + " got return code " +
actualExitCode);
numTestsPassed++;
} else {
System.out.println( "FAILED" + "\n" + "for bug " + getBugId()
+ " (" + getBugName() + ")" + "\n" + "Expected return code " +
expectedExitCode + " but got " + actualExitCode);
}
}
/**
* Print a summary of the test results.
*/
protected void printSummary() {
if ( numTestsRun != 0 && numTestsPassed == numTestsRun ) {
// Test passed
System.out.println("\n" + "All " + numTestsPassed
+ " subtests passed");
} else {
// Test failed
throw new Error("\n" + (numTestsRun - numTestsPassed)
+ " of " + (numTestsRun)
+ " subtests failed for bug " + getBugId()
+ " (" + getBugName() + ")" + "\n");
}
}
/**
* Print the output stored in the buffers.
*/
protected void printJavadocOutput() {
System.out.println(STANDARD_OUTPUT + " : \n" + getStandardOutput());
System.err.println(ERROR_OUTPUT + " : \n" + getErrorOutput());
System.err.println(WARNING_OUTPUT + " : \n" + getWarningOutput());
System.out.println(NOTICE_OUTPUT + " : \n" + getNoticeOutput());
}
/**
* Read the file and return it as a string.
*
* @param fileName the name of the file to read
* @return the file in string format
*/
public String readFileToString(String fileName) throws Error {
if (fileName.equals(ERROR_OUTPUT)) {
return getErrorOutput();
} else if (fileName.equals(NOTICE_OUTPUT)) {
return getNoticeOutput();
} else if (fileName.equals(WARNING_OUTPUT)) {
return getWarningOutput();
} else if (fileName.equals(STANDARD_OUTPUT)) {
return getStandardOutput();
}
try {
File file = new File(fileName);
if ( !file.exists() ) {
System.out.println("\n" + "FILE DOES NOT EXIST: " + fileName);
}
BufferedReader in = new BufferedReader(new FileReader(file));
// Create an array of characters the size of the file
char[] allChars = new char[(int)file.length()];
// Read the characters into the allChars array
in.read(allChars, 0, (int)file.length());
in.close();
// Convert to a string
String allCharsString = new String(allChars);
return allCharsString;
} catch (FileNotFoundException e) {
System.err.println(e);
throw new Error("File not found: " + fileName);
} catch (IOException e) {
System.err.println(e);
throw new Error("Error reading file: " + fileName);
}
}
/**
* Compare the two given files.
*
* @param file1 the first file to compare.
* @param file2 the second file to compare.
* @param throwErrorIFNoMatch flag to indicate whether or not to throw
* an error if the files do not match.
* @return true if the files are the same and false otherwise.
*/
public boolean diff(String file1, String file2, boolean throwErrorIFNoMatch) throws Error {
String file1Contents = readFileToString(file1);
String file2Contents = readFileToString(file2);
numTestsRun++;
if (file1Contents.trim().compareTo(file2Contents.trim()) == 0) {
System.out.println("Diff successful: " + file1 + ", " + file2);
numTestsPassed++;
return true;
} else if (throwErrorIFNoMatch) {
throw new Error("Diff failed: " + file1 + ", " + file2);
} else {
return false;
}
}
/**
* Search for the string in the given file and return true
* if the string was found.
* If exactNewlineMatch is false, newlines will be normalized
* before the comparison.
*
* @param fileString the contents of the file to search through
* @param stringToFind the string to search for
* @return true if the string was found
*/
private boolean findString(String fileString, String stringToFind) {
if (exactNewlineMatch) {
return fileString.indexOf(stringToFind) >= 0;
} else {
return fileString.replace(NL, "\n").indexOf(stringToFind.replace(NL, "\n")) >= 0;
}
}
/**
* Return the standard output.
* @return the standard output
*/
public String getStandardOutput() {
return standardOut.toString();
}
/**
* Return the error output.
* @return the error output
*/
public String getErrorOutput() {
return errors.getBuffer().toString();
}
/**
* Return the notice output.
* @return the notice output
*/
public String getNoticeOutput() {
return notices.getBuffer().toString();
}
/**
* Return the warning output.
* @return the warning output
*/
public String getWarningOutput() {
return warnings.getBuffer().toString();
}
/**
* A utility to copy a directory from one place to another.
* We may possibly want to move this to our doclet toolkit in
* the near future and maintain it from there.
*
* @param targetDir the directory to copy.
* @param destDir the destination to copy the directory to.
*/
public static void copyDir(String targetDir, String destDir) {
if (targetDir.endsWith("SCCS")) {
return;
}
try {
File targetDirObj = new File(targetDir);
File destDirParentObj = new File(destDir);
File destDirObj = new File(destDirParentObj, targetDirObj.getName());
if (! destDirParentObj.exists()) {
destDirParentObj.mkdir();
}
if (! destDirObj.exists()) {
destDirObj.mkdir();
}
String[] files = targetDirObj.list();
for (int i = 0; i < files.length; i++) {
File srcFile = new File(targetDirObj, files[i]);
File destFile = new File(destDirObj, files[i]);
if (srcFile.isFile()) {
System.out.println("Copying " + srcFile + " to " + destFile);
copyFile(destFile, srcFile);
} else if(srcFile.isDirectory()) {
copyDir(srcFile.getAbsolutePath(), destDirObj.getAbsolutePath());
}
}
} catch (IOException exc) {
throw new Error("Could not copy " + targetDir + " to " + destDir);
}
}
/**
* Copy source file to destination file.
*
* @throws SecurityException
* @throws IOException
*/
public static void copyFile(File destfile, File srcfile)
throws IOException {
byte[] bytearr = new byte[512];
int len = 0;
FileInputStream input = new FileInputStream(srcfile);
File destDir = destfile.getParentFile();
destDir.mkdirs();
FileOutputStream output = new FileOutputStream(destfile);
try {
while ((len = input.read(bytearr)) != -1) {
output.write(bytearr, 0, len);
}
} catch (FileNotFoundException exc) {
} catch (SecurityException exc) {
} finally {
input.close();
output.close();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.functions.source;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.TypeExtractor;
import org.apache.flink.runtime.state.CheckpointListener;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.runtime.state.FunctionSnapshotContext;
import org.apache.flink.runtime.state.SerializedCheckpointData;
import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
import org.apache.flink.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* Abstract base class for data sources that receive elements from a message queue and
* acknowledge them back by IDs.
*
* <p>The mechanism for this source assumes that messages are identified by a unique ID.
* When messages are taken from the message queue, the message must not be dropped immediately,
* but must be retained until acknowledged. Messages that are not acknowledged within a certain
* time interval will be served again (to a different connection, established by the recovered source).
*
* <p>Note that this source can give no guarantees about message order in the case of failures,
* because messages that were retrieved but not yet acknowledged will be returned later again, after
* a set of messages that was not retrieved before the failure.
*
* <p>Internally, this source gathers the IDs of elements it emits. Per checkpoint, the IDs are stored and
* acknowledged when the checkpoint is complete. That way, no message is acknowledged unless it is certain
* that it has been successfully processed throughout the topology and the updates to any state caused by
* that message are persistent.
*
* <p>All messages that are emitted and successfully processed by the streaming program will eventually be
* acknowledged. In corner cases, the source may receive certain IDs multiple times, if a
* failure occurs while acknowledging. To cope with this situation, an additional Set stores all
* processed IDs. IDs are only removed after they have been acknowledged.
*
* <p>A typical way to use this base in a source function is by implementing a run() method as follows:
* <pre>{@code
* public void run(SourceContext<Type> ctx) throws Exception {
* while (running) {
* Message msg = queue.retrieve();
* synchronized (ctx.getCheckpointLock()) {
* ctx.collect(msg.getMessageData());
* addId(msg.getMessageId());
* }
* }
* }
* }</pre>
*
* <b>NOTE:</b> This source has a parallelism of {@code 1}.
*
* @param <Type> The type of the messages created by the source.
* @param <UId> The type of unique IDs which may be used to acknowledge elements.
*/
@PublicEvolving
public abstract class MessageAcknowledgingSourceBase<Type, UId>
extends RichSourceFunction<Type>
implements CheckpointedFunction, CheckpointListener {
private static final long serialVersionUID = -8689291992192955579L;
private static final Logger LOG = LoggerFactory.getLogger(MessageAcknowledgingSourceBase.class);
/** Serializer used to serialize the IDs for checkpoints. */
private final TypeSerializer<UId> idSerializer;
/** The list gathering the IDs of messages emitted during the current checkpoint. */
private transient List<UId> idsForCurrentCheckpoint;
/**
* The list with IDs from checkpoints that were triggered, but not yet completed or notified of
* completion.
*/
protected transient ArrayDeque<Tuple2<Long, List<UId>>> pendingCheckpoints;
/**
* Set which contain all processed ids. Ids are acknowledged after checkpoints. When restoring
* a checkpoint, ids may be processed again. This happens when the checkpoint completed but the
* ids for a checkpoint haven't been acknowledged yet.
*/
private transient Set<UId> idsProcessedButNotAcknowledged;
private transient ListState<SerializedCheckpointData[]> checkpointedState;
// ------------------------------------------------------------------------
/**
* Creates a new MessageAcknowledgingSourceBase for IDs of the given type.
*
* @param idClass The class of the message ID type, used to create a serializer for the message IDs.
*/
protected MessageAcknowledgingSourceBase(Class<UId> idClass) {
this(TypeExtractor.getForClass(idClass));
}
/**
* Creates a new MessageAcknowledgingSourceBase for IDs of the given type.
*
* @param idTypeInfo The type information of the message ID type, used to create a serializer for the message IDs.
*/
protected MessageAcknowledgingSourceBase(TypeInformation<UId> idTypeInfo) {
this.idSerializer = idTypeInfo.createSerializer(new ExecutionConfig());
}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
Preconditions.checkState(this.checkpointedState == null,
"The " + getClass().getSimpleName() + " has already been initialized.");
this.checkpointedState = context
.getOperatorStateStore()
.getSerializableListState("message-acknowledging-source-state");
this.idsForCurrentCheckpoint = new ArrayList<>(64);
this.pendingCheckpoints = new ArrayDeque<>();
this.idsProcessedButNotAcknowledged = new HashSet<>();
if (context.isRestored()) {
LOG.info("Restoring state for the {}.", getClass().getSimpleName());
List<SerializedCheckpointData[]> retrievedStates = new ArrayList<>();
for (SerializedCheckpointData[] entry : this.checkpointedState.get()) {
retrievedStates.add(entry);
}
// given that the parallelism of the function is 1, we can only have at most 1 state
Preconditions.checkArgument(retrievedStates.size() == 1,
getClass().getSimpleName() + " retrieved invalid state.");
pendingCheckpoints = SerializedCheckpointData.toDeque(retrievedStates.get(0), idSerializer);
// build a set which contains all processed ids. It may be used to check if we have
// already processed an incoming message.
for (Tuple2<Long, List<UId>> checkpoint : pendingCheckpoints) {
idsProcessedButNotAcknowledged.addAll(checkpoint.f1);
}
} else {
LOG.info("No state to restore for the {}.", getClass().getSimpleName());
}
}
@Override
public void close() throws Exception {
idsForCurrentCheckpoint.clear();
pendingCheckpoints.clear();
}
// ------------------------------------------------------------------------
// ID Checkpointing
// ------------------------------------------------------------------------
/**
* This method must be implemented to acknowledge the given set of IDs back to the message queue.
*
* @param uIds The list od IDs to acknowledge.
*/
protected abstract void acknowledgeIDs(long checkpointId, List<UId> uIds);
/**
* Adds an ID to be stored with the current checkpoint.
* @param uid The ID to add.
* @return True if the id has not been processed previously.
*/
protected boolean addId(UId uid) {
idsForCurrentCheckpoint.add(uid);
return idsProcessedButNotAcknowledged.add(uid);
}
// ------------------------------------------------------------------------
// Checkpointing the data
// ------------------------------------------------------------------------
@Override
public void snapshotState(FunctionSnapshotContext context) throws Exception {
Preconditions.checkState(this.checkpointedState != null,
"The " + getClass().getSimpleName() + " has not been properly initialized.");
if (LOG.isDebugEnabled()) {
LOG.debug("{} checkpointing: Messages: {}, checkpoint id: {}, timestamp: {}",
idsForCurrentCheckpoint, context.getCheckpointId(), context.getCheckpointTimestamp());
}
pendingCheckpoints.addLast(new Tuple2<>(context.getCheckpointId(), idsForCurrentCheckpoint));
idsForCurrentCheckpoint = new ArrayList<>(64);
this.checkpointedState.clear();
this.checkpointedState.add(SerializedCheckpointData.fromDeque(pendingCheckpoints, idSerializer));
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
LOG.debug("Committing Messages externally for checkpoint {}", checkpointId);
for (Iterator<Tuple2<Long, List<UId>>> iter = pendingCheckpoints.iterator(); iter.hasNext();) {
Tuple2<Long, List<UId>> checkpoint = iter.next();
long id = checkpoint.f0;
if (id <= checkpointId) {
LOG.trace("Committing Messages with following IDs {}", checkpoint.f1);
acknowledgeIDs(checkpointId, checkpoint.f1);
// remove deduplication data
idsProcessedButNotAcknowledged.removeAll(checkpoint.f1);
// remove checkpoint data
iter.remove();
}
else {
break;
}
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
* Ferenc Hechler, ferenc_hechler@users.sourceforge.net - 83258 [jar exporter] Deploy java application as executable jar
*******************************************************************************/
package org.eclipse.jdt.internal.ui;
import java.net.URL;
import java.util.HashMap;
import java.util.Iterator;
import org.osgi.framework.Bundle;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.ImageData;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.Path;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.jface.resource.ImageRegistry;
/**
* Bundle of most images used by the Java plug-in.
*/
public class JavaPluginImages {
public static final IPath ICONS_PATH= new Path("$nl$/icons/full"); //$NON-NLS-1$
private static final String NAME_PREFIX= "org.eclipse.jdt.ui."; //$NON-NLS-1$
private static final int NAME_PREFIX_LENGTH= NAME_PREFIX.length();
// The plug-in registry
private static ImageRegistry fgImageRegistry= null;
private static HashMap<String, ImageDescriptor> fgAvoidSWTErrorMap= null;
private static final String T_OBJ= "obj16"; //$NON-NLS-1$
private static final String T_OVR= "ovr16"; //$NON-NLS-1$
private static final String T_WIZBAN= "wizban"; //$NON-NLS-1$
private static final String T_ELCL= "elcl16"; //$NON-NLS-1$
private static final String T_DLCL= "dlcl16"; //$NON-NLS-1$
private static final String T_ETOOL= "etool16"; //$NON-NLS-1$
private static final String T_EVIEW= "eview16"; //$NON-NLS-1$
/*
* Keys for images available from the Java-UI plug-in image registry.
*/
public static final String IMG_MISC_PUBLIC= NAME_PREFIX + "methpub_obj.gif"; //$NON-NLS-1$
public static final String IMG_MISC_PROTECTED= NAME_PREFIX + "methpro_obj.gif"; //$NON-NLS-1$
public static final String IMG_MISC_PRIVATE= NAME_PREFIX + "methpri_obj.gif"; //$NON-NLS-1$
public static final String IMG_MISC_DEFAULT= NAME_PREFIX + "methdef_obj.gif"; //$NON-NLS-1$
public static final String IMG_FIELD_PUBLIC= NAME_PREFIX + "field_public_obj.gif"; //$NON-NLS-1$
public static final String IMG_FIELD_PROTECTED= NAME_PREFIX + "field_protected_obj.gif"; //$NON-NLS-1$
public static final String IMG_FIELD_PRIVATE= NAME_PREFIX + "field_private_obj.gif"; //$NON-NLS-1$
public static final String IMG_FIELD_DEFAULT= NAME_PREFIX + "field_default_obj.gif"; //$NON-NLS-1$
public static final String IMG_ELCL_VIEW_MENU= NAME_PREFIX + T_ELCL + "view_menu.gif"; //$NON-NLS-1$
public static final String IMG_DLCL_VIEW_MENU= NAME_PREFIX + T_DLCL + "view_menu.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_GHOST= NAME_PREFIX + "ghost.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_SEARCH_TSK= NAME_PREFIX + "search_tsk.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_PACKDECL= NAME_PREFIX + "packd_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_IMPDECL= NAME_PREFIX + "imp_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_IMPCONT= NAME_PREFIX + "impc_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_JSEARCH= NAME_PREFIX + "jsearch_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_SEARCH_DECL= NAME_PREFIX + "search_decl_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_SEARCH_REF= NAME_PREFIX + "search_ref_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_CLASS= NAME_PREFIX + "class_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_CLASSALT= NAME_PREFIX + "classfo_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_CLASS_DEFAULT= NAME_PREFIX + "class_default_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_INNER_CLASS_PUBLIC= NAME_PREFIX + "innerclass_public_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_INNER_CLASS_DEFAULT= NAME_PREFIX + "innerclass_default_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_INNER_CLASS_PROTECTED= NAME_PREFIX + "innerclass_protected_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_INNER_CLASS_PRIVATE= NAME_PREFIX + "innerclass_private_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_INTERFACE= NAME_PREFIX + "int_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_INTERFACEALT= NAME_PREFIX + "intf_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_INTERFACE_DEFAULT= NAME_PREFIX + "int_default_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_INNER_INTERFACE_PUBLIC= NAME_PREFIX + "innerinterface_public_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_INNER_INTERFACE_DEFAULT= NAME_PREFIX + "innerinterface_default_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_INNER_INTERFACE_PROTECTED= NAME_PREFIX + "innerinterface_protected_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_INNER_INTERFACE_PRIVATE= NAME_PREFIX + "innerinterface_private_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_ANNOTATION= NAME_PREFIX + "annotation_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_ANNOTATION_DEFAULT= NAME_PREFIX + "annotation_default_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_ANNOTATION_PROTECTED= NAME_PREFIX + "annotation_protected_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_ANNOTATION_PRIVATE= NAME_PREFIX + "annotation_private_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_ANNOTATION_ALT= NAME_PREFIX + "annotation_alt_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_ENUM= NAME_PREFIX + "enum_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_ENUM_DEFAULT= NAME_PREFIX + "enum_default_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_ENUM_PROTECTED= NAME_PREFIX + "enum_protected_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_ENUM_PRIVATE= NAME_PREFIX + "enum_private_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_ENUM_ALT= NAME_PREFIX + "enum_alt_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_CUNIT= NAME_PREFIX + "jcu_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_CUNIT_RESOURCE= NAME_PREFIX + "jcu_resource_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_CFILE= NAME_PREFIX + "classf_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_CFILECLASS= NAME_PREFIX + "class_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_CFILEINT= NAME_PREFIX + "int_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_LOGICAL_PACKAGE= NAME_PREFIX + "logical_package_obj.gif";//$NON-NLS-1$
public static final String IMG_OBJS_EMPTY_LOGICAL_PACKAGE= NAME_PREFIX + "empty_logical_package_obj.gif";//$NON-NLS-1$
public static final String IMG_OBJS_PACKAGE= NAME_PREFIX + "package_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_EMPTY_PACK_RESOURCE= NAME_PREFIX + "empty_pack_fldr_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_EMPTY_PACKAGE= NAME_PREFIX + "empty_pack_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_PACKFRAG_ROOT= NAME_PREFIX + "packagefolder_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_JAR= NAME_PREFIX + "jar_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_EXTJAR= NAME_PREFIX + "jar_l_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_JAR_WSRC= NAME_PREFIX + "jar_src_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_EXTJAR_WSRC= NAME_PREFIX + "jar_lsrc_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_CLASSFOLDER= NAME_PREFIX + "cf_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_CLASSFOLDER_WSRC= NAME_PREFIX + "cf_src_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_ENV_VAR= NAME_PREFIX + "envvar_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_JAVA_MODEL= NAME_PREFIX + "java_model_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_UNKNOWN= NAME_PREFIX + "unknown_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_LOCAL_VARIABLE= NAME_PREFIX + "localvariable_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_PROJECT_SETTINGS= NAME_PREFIX + "settings_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_LIBRARY= NAME_PREFIX + "library_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_JAVADOCTAG= NAME_PREFIX + "jdoc_tag_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_HTMLTAG= NAME_PREFIX + "html_tag_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_TEMPLATE= NAME_PREFIX + "template_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_SWT_TEMPLATE= NAME_PREFIX + "template_swt_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_TYPEVARIABLE= NAME_PREFIX + "typevariable_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_EXCEPTION= NAME_PREFIX + "jexception_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_ERROR= NAME_PREFIX + "jrtexception_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_BREAKPOINT_INSTALLED= NAME_PREFIX + "brkpi_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_QUICK_ASSIST= NAME_PREFIX + "quickassist_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_QUICK_FIX= NAME_PREFIX + "quickfix_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_FIXABLE_PROBLEM= NAME_PREFIX + "quickfix_warning_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_FIXABLE_ERROR= NAME_PREFIX + "quickfix_error_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_ACCESSRULES_ATTRIB= NAME_PREFIX + "access_restriction_attrib.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_REFACTORING_FATAL= NAME_PREFIX + "fatalerror_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_REFACTORING_ERROR= NAME_PREFIX + "error_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_REFACTORING_WARNING= NAME_PREFIX + "warning_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_REFACTORING_INFO= NAME_PREFIX + "info_obj.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_NLS_TRANSLATE= NAME_PREFIX + "translate.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_NLS_NEVER_TRANSLATE= NAME_PREFIX + "never_translate.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_NLS_SKIP= NAME_PREFIX + "skip.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_SEARCH_READACCESS= NAME_PREFIX + "occ_read.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_SEARCH_WRITEACCESS= NAME_PREFIX + "occ_write.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_SEARCH_OCCURRENCE= NAME_PREFIX + "occ_match.gif"; //$NON-NLS-1$
public static final String IMG_OBJS_HELP= NAME_PREFIX + "help.gif"; //$NON-NLS-1$
/*
* Set of predefined Image Descriptors.
*/
public static final ImageDescriptor DESC_VIEW_ERRORWARNING_TAB= createUnManaged(T_EVIEW, "errorwarning_tab.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_VIEW_CLASSFILEGENERATION_TAB= createUnManaged(T_EVIEW, "classfilegeneration_tab.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_VIEW_JDKCOMPLIANCE_TAB= createUnManaged(T_EVIEW, "jdkcompliance_tab.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_FILTER= createUnManaged(T_ELCL, "filter_ps.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_FILTER= createUnManaged(T_DLCL, "filter_ps.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_CODE_ASSIST= createUnManaged(T_ELCL, "metharg_obj.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_CODE_ASSIST= createUnManaged(T_DLCL, "metharg_obj.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_VIEW_MENU= createManaged(T_ELCL, "view_menu.gif", IMG_ELCL_VIEW_MENU); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_VIEW_MENU= createManaged(T_DLCL, "view_menu.gif", IMG_DLCL_VIEW_MENU); //$NON-NLS-1$
public static final ImageDescriptor DESC_MISC_PUBLIC= createManagedFromKey(T_OBJ, IMG_MISC_PUBLIC);
public static final ImageDescriptor DESC_MISC_PROTECTED= createManagedFromKey(T_OBJ, IMG_MISC_PROTECTED);
public static final ImageDescriptor DESC_MISC_PRIVATE= createManagedFromKey(T_OBJ, IMG_MISC_PRIVATE);
public static final ImageDescriptor DESC_MISC_DEFAULT= createManagedFromKey(T_OBJ, IMG_MISC_DEFAULT);
public static final ImageDescriptor DESC_FIELD_PUBLIC= createManagedFromKey(T_OBJ, IMG_FIELD_PUBLIC);
public static final ImageDescriptor DESC_FIELD_PROTECTED= createManagedFromKey(T_OBJ, IMG_FIELD_PROTECTED);
public static final ImageDescriptor DESC_FIELD_PRIVATE= createManagedFromKey(T_OBJ, IMG_FIELD_PRIVATE);
public static final ImageDescriptor DESC_FIELD_DEFAULT= createManagedFromKey(T_OBJ, IMG_FIELD_DEFAULT);
public static final ImageDescriptor DESC_MENU_SHIFT_RIGHT= createUnManaged(T_ETOOL, "shift_r_edit.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_MENU_SHIFT_LEFT= createUnManaged(T_ETOOL, "shift_l_edit.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OBJS_GHOST= createManagedFromKey(T_OBJ, IMG_OBJS_GHOST);
public static final ImageDescriptor DESC_OBJS_PACKDECL= createManagedFromKey(T_OBJ, IMG_OBJS_PACKDECL);
public static final ImageDescriptor DESC_OBJS_IMPDECL= createManagedFromKey(T_OBJ, IMG_OBJS_IMPDECL);
public static final ImageDescriptor DESC_OBJS_IMPCONT= createManagedFromKey(T_OBJ, IMG_OBJS_IMPCONT);
public static final ImageDescriptor DESC_OBJS_JSEARCH= createManagedFromKey(T_OBJ, IMG_OBJS_JSEARCH);
public static final ImageDescriptor DESC_OBJS_SEARCH_DECL= createManagedFromKey(T_OBJ, IMG_OBJS_SEARCH_DECL);
public static final ImageDescriptor DESC_OBJS_SEARCH_REF= createManagedFromKey(T_OBJ, IMG_OBJS_SEARCH_REF);
public static final ImageDescriptor DESC_OBJS_CUNIT= createManagedFromKey(T_OBJ, IMG_OBJS_CUNIT);
public static final ImageDescriptor DESC_OBJS_CUNIT_RESOURCE= createManagedFromKey(T_OBJ, IMG_OBJS_CUNIT_RESOURCE);
public static final ImageDescriptor DESC_OBJS_CFILE= createManagedFromKey(T_OBJ, IMG_OBJS_CFILE);
public static final ImageDescriptor DESC_OBJS_CFILECLASS= createManagedFromKey(T_OBJ, IMG_OBJS_CFILECLASS);
public static final ImageDescriptor DESC_ELCL_CLEAR= createUnManaged(T_ELCL, "clear_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_CLEAR= createUnManaged(T_DLCL, "clear_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OBJS_CFILEINT= createManagedFromKey(T_OBJ, IMG_OBJS_CFILEINT);
public static final ImageDescriptor DESC_OBJS_PACKAGE= createManagedFromKey(T_OBJ, IMG_OBJS_PACKAGE);
public static final ImageDescriptor DESC_OBJS_EMPTY_LOGICAL_PACKAGE= createManagedFromKey(T_OBJ, IMG_OBJS_EMPTY_LOGICAL_PACKAGE);
public static final ImageDescriptor DESC_OBJS_LOGICAL_PACKAGE= createManagedFromKey(T_OBJ, IMG_OBJS_LOGICAL_PACKAGE);
public static final ImageDescriptor DESC_OBJS_EMPTY_PACKAGE_RESOURCES= createManagedFromKey(T_OBJ, IMG_OBJS_EMPTY_PACK_RESOURCE);
public static final ImageDescriptor DESC_OBJS_EMPTY_PACKAGE= createManagedFromKey(T_OBJ, IMG_OBJS_EMPTY_PACKAGE);
public static final ImageDescriptor DESC_OBJS_PACKFRAG_ROOT= createManagedFromKey(T_OBJ, IMG_OBJS_PACKFRAG_ROOT);
public static final ImageDescriptor DESC_OBJS_PROJECT_SETTINGS= createManagedFromKey(T_OBJ, IMG_OBJS_PROJECT_SETTINGS);
public static final ImageDescriptor DESC_OBJS_JAVA_MODEL= createManagedFromKey(T_OBJ, IMG_OBJS_JAVA_MODEL);
public static final ImageDescriptor DESC_OBJS_CLASS= createManagedFromKey(T_OBJ, IMG_OBJS_CLASS);
public static final ImageDescriptor DESC_OBJS_CLASS_DEFAULT= createManagedFromKey(T_OBJ, IMG_OBJS_CLASS_DEFAULT);
public static final ImageDescriptor DESC_OBJS_INNER_CLASS_PUBLIC= createManagedFromKey(T_OBJ, IMG_OBJS_INNER_CLASS_PUBLIC);
public static final ImageDescriptor DESC_OBJS_INNER_CLASS_DEFAULT= createManagedFromKey(T_OBJ, IMG_OBJS_INNER_CLASS_DEFAULT);
public static final ImageDescriptor DESC_OBJS_INNER_CLASS_PROTECTED= createManagedFromKey(T_OBJ, IMG_OBJS_INNER_CLASS_PROTECTED);
public static final ImageDescriptor DESC_OBJS_INNER_CLASS_PRIVATE= createManagedFromKey(T_OBJ, IMG_OBJS_INNER_CLASS_PRIVATE);
public static final ImageDescriptor DESC_OBJS_CLASSALT= createManagedFromKey(T_OBJ, IMG_OBJS_CLASSALT);
public static final ImageDescriptor DESC_OBJS_INTERFACE= createManagedFromKey(T_OBJ, IMG_OBJS_INTERFACE);
public static final ImageDescriptor DESC_OBJS_INTERFACE_DEFAULT= createManagedFromKey(T_OBJ, IMG_OBJS_INTERFACE_DEFAULT);
public static final ImageDescriptor DESC_OBJS_INNER_INTERFACE_PUBLIC= createManagedFromKey(T_OBJ, IMG_OBJS_INNER_INTERFACE_PUBLIC);
public static final ImageDescriptor DESC_OBJS_INNER_INTERFACE_DEFAULT= createManagedFromKey(T_OBJ, IMG_OBJS_INNER_INTERFACE_DEFAULT);
public static final ImageDescriptor DESC_OBJS_INNER_INTERFACE_PROTECTED= createManagedFromKey(T_OBJ, IMG_OBJS_INNER_INTERFACE_PROTECTED);
public static final ImageDescriptor DESC_OBJS_INNER_INTERFACE_PRIVATE= createManagedFromKey(T_OBJ, IMG_OBJS_INNER_INTERFACE_PRIVATE);
public static final ImageDescriptor DESC_OBJS_INTERFACEALT= createManagedFromKey(T_OBJ, IMG_OBJS_INTERFACEALT);
public static final ImageDescriptor DESC_OBJS_ANNOTATION= createManagedFromKey(T_OBJ, IMG_OBJS_ANNOTATION);
public static final ImageDescriptor DESC_OBJS_ANNOTATION_DEFAULT= createManagedFromKey(T_OBJ, IMG_OBJS_ANNOTATION_DEFAULT);
public static final ImageDescriptor DESC_OBJS_ANNOTATION_PROTECTED= createManagedFromKey(T_OBJ, IMG_OBJS_ANNOTATION_PROTECTED);
public static final ImageDescriptor DESC_OBJS_ANNOTATION_PRIVATE= createManagedFromKey(T_OBJ, IMG_OBJS_ANNOTATION_PRIVATE);
public static final ImageDescriptor DESC_OBJS_ANNOTATION_ALT= createManagedFromKey(T_OBJ, IMG_OBJS_ANNOTATION_ALT);
public static final ImageDescriptor DESC_OBJS_ENUM= createManagedFromKey(T_OBJ, IMG_OBJS_ENUM);
public static final ImageDescriptor DESC_OBJS_ENUM_DEFAULT= createManagedFromKey(T_OBJ, IMG_OBJS_ENUM_DEFAULT);
public static final ImageDescriptor DESC_OBJS_ENUM_PROTECTED= createManagedFromKey(T_OBJ, IMG_OBJS_ENUM_PROTECTED);
public static final ImageDescriptor DESC_OBJS_ENUM_PRIVATE= createManagedFromKey(T_OBJ, IMG_OBJS_ENUM_PRIVATE);
public static final ImageDescriptor DESC_OBJS_ENUM_ALT= createManagedFromKey(T_OBJ, IMG_OBJS_ENUM_ALT);
public static final ImageDescriptor DESC_OBJS_JAR= createManagedFromKey(T_OBJ, IMG_OBJS_JAR);
public static final ImageDescriptor DESC_OBJS_EXTJAR= createManagedFromKey(T_OBJ, IMG_OBJS_EXTJAR);
public static final ImageDescriptor DESC_OBJS_JAR_WSRC= createManagedFromKey(T_OBJ, IMG_OBJS_JAR_WSRC);
public static final ImageDescriptor DESC_OBJS_EXTJAR_WSRC= createManagedFromKey(T_OBJ, IMG_OBJS_EXTJAR_WSRC);
public static final ImageDescriptor DESC_OBJS_CLASSFOLDER= createManagedFromKey(T_OBJ, IMG_OBJS_CLASSFOLDER);
public static final ImageDescriptor DESC_OBJS_CLASSFOLDER_WSRC= createManagedFromKey(T_OBJ, IMG_OBJS_CLASSFOLDER_WSRC);
public static final ImageDescriptor DESC_OBJS_ENV_VAR= createManagedFromKey(T_OBJ, IMG_OBJS_ENV_VAR);
public static final ImageDescriptor DESC_OBJS_LIBRARY= createManagedFromKey(T_OBJ, IMG_OBJS_LIBRARY);
public static final ImageDescriptor DESC_OBJS_JAVADOCTAG= createManagedFromKey(T_OBJ, IMG_OBJS_JAVADOCTAG);
public static final ImageDescriptor DESC_OBJS_HTMLTAG= createManagedFromKey(T_OBJ, IMG_OBJS_HTMLTAG);
public static final ImageDescriptor DESC_OBJS_TEMPLATE= createManagedFromKey(T_OBJ, IMG_OBJS_TEMPLATE);
public static final ImageDescriptor DESC_OBJS_SWT_TEMPLATE= createManagedFromKey(T_OBJ, IMG_OBJS_SWT_TEMPLATE);
public static final ImageDescriptor DESC_OBJS_TYPEVARIABLE= createManagedFromKey(T_OBJ, IMG_OBJS_TYPEVARIABLE);
public static final ImageDescriptor DESC_OBJS_EXCEPTION= createManagedFromKey(T_OBJ, IMG_OBJS_EXCEPTION);
public static final ImageDescriptor DESC_OBJS_BREAKPOINT_INSTALLED= createManagedFromKey(T_OBJ, IMG_OBJS_BREAKPOINT_INSTALLED);
public static final ImageDescriptor DESC_OBJS_ERROR= createManagedFromKey(T_OBJ, IMG_OBJS_ERROR);
public static final ImageDescriptor DESC_OBJS_QUICK_ASSIST= createManagedFromKey(T_OBJ, IMG_OBJS_QUICK_ASSIST);
public static final ImageDescriptor DESC_OBJS_QUICK_FIX= createManagedFromKey(T_OBJ, IMG_OBJS_QUICK_FIX);
public static final ImageDescriptor DESC_OBJS_FIXABLE_PROBLEM= createManagedFromKey(T_OBJ, IMG_OBJS_FIXABLE_PROBLEM);
public static final ImageDescriptor DESC_OBJS_FIXABLE_ERROR= createManagedFromKey(T_OBJ, IMG_OBJS_FIXABLE_ERROR);
// public static final ImageDescriptor DESC_OBJS_SNIPPET_EVALUATING= createManaged(T_OBJ, IMG_OBJS_SNIPPET_EVALUATING);
public static final ImageDescriptor DESC_OBJS_DEFAULT_CHANGE= createUnManaged(T_OBJ, "change.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OBJS_EXCLUSION_FILTER_ATTRIB= createUnManaged(T_OBJ, "exclusion_filter_attrib.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OBJS_INCLUSION_FILTER_ATTRIB= createUnManaged(T_OBJ, "inclusion_filter_attrib.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OBJS_OUTPUT_FOLDER_ATTRIB= createUnManaged(T_OBJ, "output_folder_attrib.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OBJS_SOURCE_ATTACH_ATTRIB= createUnManaged(T_OBJ, "source_attach_attrib.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OBJS_JAVADOC_LOCATION_ATTRIB= createUnManaged(T_OBJ, "javadoc_location_attrib.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OBJS_ACCESSRULES_ATTRIB= createManagedFromKey(T_OBJ, IMG_OBJS_ACCESSRULES_ATTRIB);
public static final ImageDescriptor DESC_OBJS_NATIVE_LIB_PATH_ATTRIB= createUnManaged(T_OBJ, "native_lib_path_attrib.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OBJS_REFACTORING_FATAL= createManagedFromKey(T_OBJ, IMG_OBJS_REFACTORING_FATAL);
public static final ImageDescriptor DESC_OBJS_REFACTORING_ERROR= createManagedFromKey(T_OBJ, IMG_OBJS_REFACTORING_ERROR);
public static final ImageDescriptor DESC_OBJS_REFACTORING_WARNING= createManagedFromKey(T_OBJ, IMG_OBJS_REFACTORING_WARNING);
public static final ImageDescriptor DESC_OBJS_REFACTORING_INFO= createManagedFromKey(T_OBJ, IMG_OBJS_REFACTORING_INFO);
public static final ImageDescriptor DESC_OBJS_NLS_TRANSLATE= createManagedFromKey(T_OBJ, IMG_OBJS_NLS_TRANSLATE);
public static final ImageDescriptor DESC_OBJS_NLS_NEVER_TRANSLATE= createManagedFromKey(T_OBJ, IMG_OBJS_NLS_NEVER_TRANSLATE);
public static final ImageDescriptor DESC_OBJS_NLS_SKIP= createManagedFromKey(T_OBJ, IMG_OBJS_NLS_SKIP);
public static final ImageDescriptor DESC_OBJS_UNKNOWN= createManagedFromKey(T_OBJ, IMG_OBJS_UNKNOWN);
public static final ImageDescriptor DESC_OBJS_TYPE_SEPARATOR= createUnManaged(T_OBJ, "type_separator.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OBJS_SEARCH_READACCESS= createManagedFromKey(T_OBJ, IMG_OBJS_SEARCH_READACCESS);
public static final ImageDescriptor DESC_OBJS_SEARCH_WRITEACCESS= createManagedFromKey(T_OBJ, IMG_OBJS_SEARCH_WRITEACCESS);
public static final ImageDescriptor DESC_OBJS_SEARCH_OCCURRENCE= createManagedFromKey(T_OBJ, IMG_OBJS_SEARCH_OCCURRENCE);
public static final ImageDescriptor DESC_OBJS_LOCAL_VARIABLE= createManagedFromKey(T_OBJ, IMG_OBJS_LOCAL_VARIABLE);
public static final ImageDescriptor DESC_OBJS_HELP= createManagedFromKey(T_ELCL, IMG_OBJS_HELP);
public static final ImageDescriptor DESC_ELCL_ADD_TO_BP= createUnManaged(T_ELCL, "add_to_buildpath.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_REMOVE_FROM_BP= createUnManaged(T_ELCL, "remove_from_buildpath.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_INCLUSION= createUnManaged(T_ELCL, "inclusion_filter_attrib.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_EXCLUSION= createUnManaged(T_ELCL, "exclusion_filter_attrib.gif"); //$NON-NLS-1$
// public static final ImageDescriptor DESC_ELCL_INCLUSION_UNDO= createUnManaged(T_ELCL, "inclusion_filter_attrib_undo.gif"); //$NON-NLS-1$
// public static final ImageDescriptor DESC_ELCL_EXCLUSION_UNDO= createUnManaged(T_ELCL, "exclusion_filter_attrib_undo.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_ADD_TO_BP= createUnManaged(T_DLCL, "add_to_buildpath.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_REMOVE_FROM_BP= createUnManaged(T_DLCL, "remove_from_buildpath.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_INCLUSION= createUnManaged(T_DLCL, "inclusion_filter_attrib.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_EXCLUSION= createUnManaged(T_DLCL, "exclusion_filter_attrib.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_OUTPUT_FOLDER_ATTRIB= createUnManaged(T_DLCL, "output_folder_attrib.gif"); //$NON-NLS-1$
// public static final ImageDescriptor DESC_DLCL_INCLUSION_UNDO= createUnManaged(T_DLCL, "inclusion_filter_attrib_undo.gif"); //$NON-NLS-1$
// public static final ImageDescriptor DESC_DLCL_EXCLUSION_UNDO= createUnManaged(T_DLCL, "exclusion_filter_attrib_undo.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_ADD_LINKED_SOURCE_TO_BUILDPATH= createUnManaged(T_DLCL, "add_linked_source_to_buildpath.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_ADD_LINKED_SOURCE_TO_BUILDPATH= createUnManaged(T_ELCL, "add_linked_source_to_buildpath.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_CONFIGURE_BUILDPATH= createUnManaged(T_DLCL, "configure_build_path.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_CONFIGURE_BUILDPATH= createUnManaged(T_ELCL, "configure_build_path.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_CONFIGURE_ANNOTATIONS= createUnManaged(T_DLCL, "configure_annotations.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_CONFIGURE_ANNOTATIONS= createUnManaged(T_ELCL, "configure_annotations.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_CONFIGURE_BUILDPATH_FILTERS= createUnManaged(T_DLCL, "configure_buildpath_filters.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_CONFIGURE_BUILDPATH_FILTERS= createUnManaged(T_ELCL, "configure_buildpath_filters.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_CONFIGURE_OUTPUT_FOLDER= createUnManaged(T_DLCL, "configure_output_folder.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_CONFIGURE_OUTPUT_FOLDER= createUnManaged(T_ELCL, "configure_output_folder.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_CONFIGURE_PROBLEM_SEVERITIES= createUnManaged(T_DLCL, "configure_problem_severity.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_CONFIGURE_PROBLEM_SEVERITIES= createUnManaged(T_ELCL, "configure_problem_severity.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_EXCLUDE_FROM_BUILDPATH= createUnManaged(T_DLCL, "exclude_from_buildpath.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_EXCLUDE_FROM_BUILDPATH= createUnManaged(T_ELCL, "exclude_from_buildpath.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_INCLUDE_ON_BUILDPATH= createUnManaged(T_DLCL, "include_on_buildpath.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_INCLUDE_ON_BUILDPATH= createUnManaged(T_ELCL, "include_on_buildpath.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_ADD_AS_SOURCE_FOLDER= createUnManaged(T_DLCL, "add_as_source_folder.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_ADD_AS_SOURCE_FOLDER= createUnManaged(T_ELCL, "add_as_source_folder.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_REMOVE_AS_SOURCE_FOLDER= createUnManaged(T_DLCL, "remove_as_source_folder.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_REMOVE_AS_SOURCE_FOLDER= createUnManaged(T_ELCL, "remove_as_source_folder.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_COPY_QUALIFIED_NAME= createUnManaged(T_DLCL, "cpyqual_menu.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_COPY_QUALIFIED_NAME= createUnManaged(T_ELCL, "cpyqual_menu.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_OPEN_BROWSER= createUnManaged(T_DLCL, "open_browser.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_OPEN_BROWSER= createUnManaged(T_ELCL, "open_browser.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OBJ_OVERRIDES= createUnManaged(T_OBJ, "over_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OBJ_IMPLEMENTS= createUnManaged(T_OBJ, "implm_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_STATIC= createUnManagedCached(T_OVR, "static_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_FINAL= createUnManagedCached(T_OVR, "final_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_ABSTRACT= createUnManagedCached(T_OVR, "abstract_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_SYNCH= createUnManagedCached(T_OVR, "synch_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_VOLATILE= createUnManagedCached(T_OVR, "volatile_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_TRANSIENT= createUnManagedCached(T_OVR, "transient_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_NATIVE= createUnManagedCached(T_OVR, "native_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_RUN= createUnManagedCached(T_OVR, "run_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_WARNING= createUnManagedCached(T_OVR, "warning_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_ERROR= createUnManagedCached(T_OVR, "error_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_BUILDPATH_ERROR= createUnManagedCached(T_OVR, "error_co_buildpath.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_OVERRIDES= createUnManagedCached(T_OVR, "over_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_IMPLEMENTS= createUnManagedCached(T_OVR, "implm_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_SYNCH_AND_OVERRIDES= createUnManagedCached(T_OVR, "sync_over.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_SYNCH_AND_IMPLEMENTS= createUnManagedCached(T_OVR, "sync_impl.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_CONSTRUCTOR= createUnManagedCached(T_OVR, "constr_ovr.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_DEPRECATED= createUnManagedCached(T_OVR, "deprecated.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_DEFAULT= createUnManagedCached(T_OVR, "default_tsk.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_FOCUS= createUnManagedCached(T_OVR, "focus_ovr.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_ANNOTATION= createUnManagedCached(T_OVR, "annotation_tsk.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_ENUM= createUnManagedCached(T_OVR, "enum_tsk.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_INTERFACE= createUnManagedCached(T_OVR, "interface_tsk.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_CLASS= createUnManagedCached(T_OVR, "class_tsk.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_ABSTRACT_CLASS= createUnManagedCached(T_OVR, "class_abs_tsk.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_LIBRARY= createUnManagedCached(T_OVR, "library_ovr.gif"); //$NON-NLS-1$
// Call Hierarchy
public static final ImageDescriptor DESC_OVR_RECURSIVE= createUnManaged(T_OVR, "recursive_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_OVR_MAX_LEVEL= createUnManaged(T_OVR, "maxlevel_co.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_NEWCLASS= createUnManaged(T_WIZBAN, "newclass_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_NEWINT= createUnManaged(T_WIZBAN, "newint_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_NEWENUM= createUnManaged(T_WIZBAN, "newenum_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_NEWANNOT= createUnManaged(T_WIZBAN, "newannotation_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_NEWJPRJ= createUnManaged(T_WIZBAN, "newjprj_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_NEWSRCFOLDR= createUnManaged(T_WIZBAN, "newsrcfldr_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_NEWMETH= createUnManaged(T_WIZBAN, "newmeth_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_NEWPACK= createUnManaged(T_WIZBAN, "newpack_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_NEWSCRAPPAGE= createUnManaged(T_WIZBAN, "newsbook_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_JAVA_LAUNCH= createUnManaged(T_WIZBAN, "java_app_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_JAVA_ATTACH= createUnManaged(T_WIZBAN, "java_attach_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_REFACTOR= createUnManaged(T_WIZBAN, "refactor_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_REFACTOR_FIELD= createUnManaged(T_WIZBAN, "fieldrefact_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_REFACTOR_METHOD= createUnManaged(T_WIZBAN, "methrefact_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_REFACTOR_TYPE= createUnManaged(T_WIZBAN, "typerefact_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_REFACTOR_PACKAGE= createUnManaged(T_WIZBAN, "packrefact_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_REFACTOR_CODE= createUnManaged(T_WIZBAN, "coderefact_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_REFACTOR_CU= createUnManaged(T_WIZBAN, "compunitrefact_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_REFACTOR_PULL_UP= createUnManaged(T_WIZBAN, "pullup_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_REFACTOR_FIX_DEPRECATION= createUnManaged(T_WIZBAN, "fixdepr_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_JAR_PACKAGER= createUnManaged(T_WIZBAN, "jar_pack_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_FAT_JAR_PACKAGER= createUnManaged(T_WIZBAN, "export_runnable_jar_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_REFACTOR_EXTRACT_SUPERTYPE= createUnManaged(T_WIZBAN, "extractsupertype_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_REPLACE_JAR= createUnManaged(T_WIZBAN, "replacejar_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_JAVA_WORKINGSET= createUnManaged(T_WIZBAN, "java_workingset_wiz.png");//$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_EXPORT_JAVADOC= createUnManaged(T_WIZBAN, "export_javadoc_wiz.png");//$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_EXTERNALIZE_STRINGS= createUnManaged(T_WIZBAN, "extstr_wiz.png");//$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_ADD_LIBRARY= createUnManaged(T_WIZBAN, "addlibrary_wiz.png");//$NON-NLS-1$
public static final ImageDescriptor DESC_WIZBAN_CLEAN_UP= createUnManaged(T_WIZBAN, "cleanup_wiz.png"); //$NON-NLS-1$
public static final ImageDescriptor DESC_TOOL_SHOW_EMPTY_PKG= createUnManaged(T_ETOOL, "show_empty_pkg.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_TOOL_SHOW_SEGMENTS= createUnManaged(T_ETOOL, "segment_edit.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_TOOL_OPENTYPE= createUnManaged(T_ETOOL, "opentype.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_TOOL_NEWPROJECT= createUnManaged(T_ETOOL, "newjprj_wiz.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_TOOL_NEWPACKAGE= createUnManaged(T_ETOOL, "newpack_wiz.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_TOOL_NEWCLASS= createUnManaged(T_ETOOL, "newclass_wiz.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_TOOL_NEWINTERFACE= createUnManaged(T_ETOOL, "newint_wiz.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_TOOL_NEWSNIPPET= createUnManaged(T_ETOOL, "newsbook_wiz.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_TOOL_NEWPACKROOT= createUnManaged(T_ETOOL, "newpackfolder_wiz.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_DLCL_NEWPACKROOT= createUnManaged(T_DLCL, "newpackfolder_wiz.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_TOOL_CLASSPATH_ORDER= createUnManaged(T_OBJ, "cp_order_obj.gif"); //$NON-NLS-1$
public static final ImageDescriptor DESC_ELCL_COLLAPSEALL= createUnManaged(T_ELCL, "collapseall.gif"); //$NON-NLS-1$
// Keys for correction proposal. We have to put the image into the registry since "code assist" doesn't
// have a life cycle. So no chance to dispose icons.
public static final String IMG_CORRECTION_CHANGE= NAME_PREFIX + "correction_change.gif"; //$NON-NLS-1$
public static final String IMG_CORRECTION_MOVE= NAME_PREFIX + "correction_move.gif"; //$NON-NLS-1$
public static final String IMG_CORRECTION_RENAME= NAME_PREFIX + "correction_rename.gif"; //$NON-NLS-1$
public static final String IMG_CORRECTION_LINKED_RENAME= NAME_PREFIX + "correction_linked_rename.gif"; //$NON-NLS-1$
public static final String IMG_CORRECTION_DELETE_IMPORT= NAME_PREFIX + "correction_delete_import.gif"; //$NON-NLS-1$
public static final String IMG_CORRECTION_LOCAL= NAME_PREFIX + "localvariable_obj.gif"; //$NON-NLS-1$
public static final String IMG_CORRECTION_REMOVE= NAME_PREFIX + "remove_correction.gif"; //$NON-NLS-1$
public static final String IMG_CORRECTION_ADD= NAME_PREFIX + "add_correction.gif"; //$NON-NLS-1$
public static final String IMG_CORRECTION_CAST= NAME_PREFIX + "correction_cast.gif"; //$NON-NLS-1$
public static final String IMG_CORRECTION_MULTI_FIX= NAME_PREFIX + "correction_multi_fix.gif"; //$NON-NLS-1$
static {
createManagedFromKey(T_OBJ, IMG_CORRECTION_CHANGE);
createManagedFromKey(T_OBJ, IMG_CORRECTION_MOVE);
createManagedFromKey(T_OBJ, IMG_CORRECTION_RENAME);
createManagedFromKey(T_OBJ, IMG_CORRECTION_LINKED_RENAME);
createManagedFromKey(T_OBJ, IMG_CORRECTION_DELETE_IMPORT);
createManagedFromKey(T_OBJ, IMG_CORRECTION_LOCAL);
createManagedFromKey(T_OBJ, IMG_CORRECTION_REMOVE);
createManagedFromKey(T_OBJ, IMG_CORRECTION_ADD);
createManagedFromKey(T_OBJ, IMG_CORRECTION_CAST);
createManagedFromKey(T_OBJ, IMG_CORRECTION_MULTI_FIX);
}
private static final class CachedImageDescriptor extends ImageDescriptor {
private ImageDescriptor fDescriptor;
private ImageData fData;
public CachedImageDescriptor(ImageDescriptor descriptor) {
fDescriptor = descriptor;
}
@Override
public ImageData getImageData() {
if (fData == null) {
fData= fDescriptor.getImageData();
}
return fData;
}
}
/**
* Returns the image managed under the given key in this registry.
*
* @param key the image's key
* @return the image managed under the given key
*/
public static Image get(String key) {
return getImageRegistry().get(key);
}
/**
* Returns the image descriptor for the given key in this registry. Might be called in a non-UI thread.
*
* @param key the image's key
* @return the image descriptor for the given key
*/
public static ImageDescriptor getDescriptor(String key) {
if (fgImageRegistry == null) {
return fgAvoidSWTErrorMap.get(key);
}
return getImageRegistry().getDescriptor(key);
}
/**
* Sets the three image descriptors for enabled, disabled, and hovered to an action. The actions
* are retrieved from the *tool16 folders.
*
* @param action the action
* @param iconName the icon name
*/
public static void setToolImageDescriptors(IAction action, String iconName) {
setImageDescriptors(action, "tool16", iconName); //$NON-NLS-1$
}
/**
* Sets the three image descriptors for enabled, disabled, and hovered to an action. The actions
* are retrieved from the *lcl16 folders.
*
* @param action the action
* @param iconName the icon name
*/
public static void setLocalImageDescriptors(IAction action, String iconName) {
setImageDescriptors(action, "lcl16", iconName); //$NON-NLS-1$
}
/*
* Helper method to access the image registry from the JavaPlugin class.
*/
/* package */ static ImageRegistry getImageRegistry() {
if (fgImageRegistry == null) {
fgImageRegistry= new ImageRegistry();
for (Iterator<String> iter= fgAvoidSWTErrorMap.keySet().iterator(); iter.hasNext();) {
String key= iter.next();
fgImageRegistry.put(key, fgAvoidSWTErrorMap.get(key));
}
fgAvoidSWTErrorMap= null;
}
return fgImageRegistry;
}
//---- Helper methods to access icons on the file system --------------------------------------
private static void setImageDescriptors(IAction action, String type, String relPath) {
ImageDescriptor id= create("d" + type, relPath, false); //$NON-NLS-1$
if (id != null)
action.setDisabledImageDescriptor(id);
/*
* id= create("c" + type, relPath, false); //$NON-NLS-1$
* if (id != null)
* action.setHoverImageDescriptor(id);
*/
ImageDescriptor descriptor= create("e" + type, relPath, true); //$NON-NLS-1$
action.setHoverImageDescriptor(descriptor);
action.setImageDescriptor(descriptor);
}
private static ImageDescriptor createManagedFromKey(String prefix, String key) {
return createManaged(prefix, key.substring(NAME_PREFIX_LENGTH), key);
}
private static ImageDescriptor createManaged(String prefix, String name, String key) {
ImageDescriptor result= create(prefix, name, true);
if (fgAvoidSWTErrorMap == null) {
fgAvoidSWTErrorMap= new HashMap<String, ImageDescriptor>();
}
fgAvoidSWTErrorMap.put(key, result);
if (fgImageRegistry != null) {
JavaPlugin.logErrorMessage("Image registry already defined"); //$NON-NLS-1$
}
return result;
}
/*
* Creates an image descriptor for the given prefix and name in the JDT UI bundle. The path can
* contain variables like $NL$.
* If no image could be found, <code>useMissingImageDescriptor</code> decides if either
* the 'missing image descriptor' is returned or <code>null</code>.
* or <code>null</code>.
*/
private static ImageDescriptor create(String prefix, String name, boolean useMissingImageDescriptor) {
IPath path= ICONS_PATH.append(prefix).append(name);
return createImageDescriptor(JavaPlugin.getDefault().getBundle(), path, useMissingImageDescriptor);
}
/*
* Creates an image descriptor for the given prefix and name in the JDT UI bundle. The path can
* contain variables like $NL$.
* If no image could be found, the 'missing image descriptor' is returned.
*/
private static ImageDescriptor createUnManaged(String prefix, String name) {
return create(prefix, name, true);
}
/*
* Creates an image descriptor for the given prefix and name in the JDT UI bundle and let type descriptor cache the image data.
* If no image could be found, the 'missing image descriptor' is returned.
*/
private static ImageDescriptor createUnManagedCached(String prefix, String name) {
return new CachedImageDescriptor(create(prefix, name, true));
}
/*
* Creates an image descriptor for the given path in a bundle. The path can contain variables
* like $NL$.
* If no image could be found, <code>useMissingImageDescriptor</code> decides if either
* the 'missing image descriptor' is returned or <code>null</code>.
* Added for 3.1.1.
*/
public static ImageDescriptor createImageDescriptor(Bundle bundle, IPath path, boolean useMissingImageDescriptor) {
URL url= FileLocator.find(bundle, path, null);
if (url != null) {
return ImageDescriptor.createFromURL(url);
}
if (useMissingImageDescriptor) {
return ImageDescriptor.getMissingImageDescriptor();
}
return null;
}
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.transaction.impl;
import com.hazelcast.cluster.ClusterState;
import com.hazelcast.core.Member;
import com.hazelcast.instance.MemberImpl;
import com.hazelcast.internal.cluster.ClusterService;
import com.hazelcast.internal.metrics.Probe;
import com.hazelcast.internal.metrics.ProbeLevel;
import com.hazelcast.internal.util.counters.Counter;
import com.hazelcast.internal.util.counters.MwCounter;
import com.hazelcast.logging.ILogger;
import com.hazelcast.nio.Address;
import com.hazelcast.spi.ClientAwareService;
import com.hazelcast.spi.ExecutionService;
import com.hazelcast.spi.ManagedService;
import com.hazelcast.spi.MemberAttributeServiceEvent;
import com.hazelcast.spi.MembershipAwareService;
import com.hazelcast.spi.MembershipServiceEvent;
import com.hazelcast.spi.NodeEngine;
import com.hazelcast.spi.Operation;
import com.hazelcast.spi.OperationService;
import com.hazelcast.spi.impl.NodeEngineImpl;
import com.hazelcast.transaction.TransactionContext;
import com.hazelcast.transaction.TransactionException;
import com.hazelcast.transaction.TransactionManagerService;
import com.hazelcast.transaction.TransactionOptions;
import com.hazelcast.transaction.TransactionalTask;
import com.hazelcast.transaction.impl.operations.BroadcastTxRollbackOperation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import static com.hazelcast.transaction.impl.Transaction.State;
import static com.hazelcast.transaction.impl.Transaction.State.ACTIVE;
import static com.hazelcast.transaction.impl.Transaction.State.COMMITTING;
import static com.hazelcast.transaction.impl.Transaction.State.ROLLING_BACK;
import static com.hazelcast.util.FutureUtil.ExceptionHandler;
import static com.hazelcast.util.FutureUtil.logAllExceptions;
import static com.hazelcast.util.FutureUtil.waitWithDeadline;
import static com.hazelcast.util.Preconditions.checkNotNull;
import static java.util.Collections.shuffle;
public class TransactionManagerServiceImpl implements TransactionManagerService, ManagedService,
MembershipAwareService, ClientAwareService {
public static final String SERVICE_NAME = "hz:core:txManagerService";
private static final Address[] EMPTY_ADDRESSES = new Address[0];
final ConcurrentMap<String, TxBackupLog> txBackupLogs = new ConcurrentHashMap<String, TxBackupLog>();
// Due to mocking; the probes can't be made final.
@Probe(level = ProbeLevel.MANDATORY)
Counter startCount = MwCounter.newMwCounter();
@Probe(level = ProbeLevel.MANDATORY)
Counter rollbackCount = MwCounter.newMwCounter();
@Probe(level = ProbeLevel.MANDATORY)
Counter commitCount = MwCounter.newMwCounter();
private final ExceptionHandler finalizeExceptionHandler;
private final NodeEngineImpl nodeEngine;
private final ILogger logger;
public TransactionManagerServiceImpl(NodeEngineImpl nodeEngine) {
this.nodeEngine = nodeEngine;
this.logger = nodeEngine.getLogger(TransactionManagerService.class);
this.finalizeExceptionHandler = logAllExceptions(logger, "Error while rolling-back tx!", Level.WARNING);
nodeEngine.getMetricsRegistry().scanAndRegister(this, "transactions");
}
public String getGroupName() {
return nodeEngine.getConfig().getGroupConfig().getName();
}
@Override
public <T> T executeTransaction(TransactionOptions options, TransactionalTask<T> task) throws TransactionException {
checkNotNull(task, "TransactionalTask is required!");
TransactionContext context = newTransactionContext(options);
context.beginTransaction();
try {
T value = task.execute(context);
context.commitTransaction();
return value;
} catch (Throwable e) {
context.rollbackTransaction();
if (e instanceof TransactionException) {
throw (TransactionException) e;
}
if (e.getCause() instanceof TransactionException) {
throw (TransactionException) e.getCause();
}
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
throw new TransactionException(e);
}
}
@Override
public TransactionContext newTransactionContext(TransactionOptions options) {
return new TransactionContextImpl(this, nodeEngine, options, null, false);
}
@Override
public TransactionContext newClientTransactionContext(TransactionOptions options, String clientUuid) {
return new TransactionContextImpl(this, nodeEngine, options, clientUuid, true);
}
/**
* Creates a plain transaction object, without wrapping it
* inside a TransactionContext.
* <p/>
* A Transaction is a lower level API than TransactionContext.
* It's not possible to create/access transactional
* data structures without TransactionContext.
* <p/>
* A Transaction object
* only allows starting/committing/rolling back transaction,
* accessing state of the transaction
* and adding TransactionLogRecord to the transaction.
*
* @param options transaction options
* @return a new transaction
*/
public Transaction newTransaction(TransactionOptions options) {
return new TransactionImpl(this, nodeEngine, options, null);
}
/**
* Creates a plain transaction object which can be used while cluster state is {@link ClusterState#PASSIVE},
* without wrapping it inside a TransactionContext.
* <p/>
* Also see {@link TransactionManagerServiceImpl#newTransaction(TransactionOptions)} for more details
*
* @param options transaction options
* @return a new transaction which can be used while cluster state is {@link ClusterState#PASSIVE}
*/
public Transaction newAllowedDuringPassiveStateTransaction(TransactionOptions options) {
return new AllowedDuringPassiveStateTransactionImpl(this, nodeEngine, options, null);
}
@Override
public void init(NodeEngine nodeEngine, Properties properties) {
}
@Override
public void reset() {
txBackupLogs.clear();
}
@Override
public void shutdown(boolean terminate) {
reset();
}
@Override
public void memberAdded(MembershipServiceEvent event) {
}
@Override
public void memberRemoved(MembershipServiceEvent event) {
MemberImpl member = event.getMember();
final String uuid = member.getUuid();
if (nodeEngine.isRunning()) {
logger.info("Committing/rolling-back alive transactions of " + member + ", UUID: " + uuid);
nodeEngine.getExecutionService().execute(ExecutionService.SYSTEM_EXECUTOR, new Runnable() {
@Override
public void run() {
finalizeTransactionsOf(uuid);
}
});
} else if (logger.isFinestEnabled()) {
logger.finest("Will not commit/roll-back transactions of " + member + ", UUID: " + uuid
+ " because this member is not running");
}
}
@Override
public void memberAttributeChanged(MemberAttributeServiceEvent event) {
}
private void finalizeTransactionsOf(String callerUuid) {
final Iterator<Map.Entry<String, TxBackupLog>> it = txBackupLogs.entrySet().iterator();
while (it.hasNext()) {
final Map.Entry<String, TxBackupLog> entry = it.next();
final String txnId = entry.getKey();
final TxBackupLog log = entry.getValue();
if (finalize(callerUuid, txnId, log)) {
it.remove();
}
}
}
private boolean finalize(String uuid, String txnId, TxBackupLog log) {
OperationService operationService = nodeEngine.getOperationService();
if (!uuid.equals(log.callerUuid)) {
return false;
}
if (log.state == ACTIVE) {
if (logger.isFinestEnabled()) {
logger.finest("Rolling-back transaction[id:" + txnId + ", state:ACTIVE] of endpoint " + uuid);
}
Collection<Member> memberList = nodeEngine.getClusterService().getMembers();
Collection<Future> futures = new ArrayList<Future>(memberList.size());
for (Member member : memberList) {
Operation op = new BroadcastTxRollbackOperation(txnId);
Future f = operationService.invokeOnTarget(SERVICE_NAME, op, member.getAddress());
futures.add(f);
}
long timeoutMillis = TransactionOptions.getDefault().getTimeoutMillis();
waitWithDeadline(futures, timeoutMillis, TimeUnit.MILLISECONDS, finalizeExceptionHandler);
} else {
TransactionImpl tx;
if (log.allowedDuringPassiveState) {
tx = new AllowedDuringPassiveStateTransactionImpl(this, nodeEngine, txnId, log.records,
log.timeoutMillis, log.startTime, log.callerUuid);
} else {
tx = new TransactionImpl(this, nodeEngine, txnId, log.records,
log.timeoutMillis, log.startTime, log.callerUuid);
}
if (log.state == COMMITTING) {
if (logger.isFinestEnabled()) {
logger.finest("Committing transaction[id:" + txnId + ", state:COMMITTING] of endpoint " + uuid);
}
try {
tx.commit();
} catch (Throwable e) {
logger.warning("Error during committing from tx backup!", e);
}
} else {
if (logger.isFinestEnabled()) {
logger.finest("Rolling-back transaction[id:" + txnId + ", state:" + log.state
+ "] of endpoint " + uuid);
}
try {
tx.rollback();
} catch (Throwable e) {
logger.warning("Error during rolling-back from tx backup!", e);
}
}
}
return true;
}
@Override
public void clientDisconnected(String clientUuid) {
logger.info("Committing/rolling-back alive transactions of client, UUID: " + clientUuid);
finalizeTransactionsOf(clientUuid);
}
Address[] pickBackupLogAddresses(int durability) {
if (durability == 0) {
return EMPTY_ADDRESSES;
}
// This should be cleaned up because this is quite a complex approach since it depends on
// the number of members in the cluster and creates litter.
ClusterService clusterService = nodeEngine.getClusterService();
List<MemberImpl> members = new ArrayList<MemberImpl>(clusterService.getMemberImpls());
members.remove(nodeEngine.getLocalMember());
int c = Math.min(members.size(), durability);
shuffle(members);
Address[] addresses = new Address[c];
for (int i = 0; i < c; i++) {
addresses[i] = members.get(i).getAddress();
}
return addresses;
}
public void createBackupLog(String callerUuid, String txnId) {
createBackupLog(callerUuid, txnId, false);
}
public void createAllowedDuringPassiveStateBackupLog(String callerUuid, String txnId) {
createBackupLog(callerUuid, txnId, true);
}
private void createBackupLog(String callerUuid, String txnId, boolean allowedDuringPassiveState) {
TxBackupLog log = new TxBackupLog(Collections.<TransactionLogRecord>emptyList(), callerUuid,
ACTIVE, -1, -1, allowedDuringPassiveState);
if (txBackupLogs.putIfAbsent(txnId, log) != null) {
throw new TransactionException("TxLog already exists!");
}
}
public void replicaBackupLog(List<TransactionLogRecord> records, String callerUuid, String txnId,
long timeoutMillis, long startTime) {
TxBackupLog beginLog = txBackupLogs.get(txnId);
if (beginLog == null) {
throw new TransactionException("Could not find begin tx log!");
}
if (beginLog.state != ACTIVE) {
// the exception message is very strange
throw new TransactionException("TxLog already exists!");
}
TxBackupLog newTxBackupLog = new TxBackupLog(records, callerUuid, COMMITTING, timeoutMillis, startTime,
beginLog.allowedDuringPassiveState);
if (!txBackupLogs.replace(txnId, beginLog, newTxBackupLog)) {
throw new TransactionException("TxLog already exists!");
}
}
public void rollbackBackupLog(String txnId) {
TxBackupLog log = txBackupLogs.get(txnId);
if (log == null) {
logger.warning("No tx backup log is found, tx -> " + txnId);
} else {
log.state = ROLLING_BACK;
}
}
public void purgeBackupLog(String txnId) {
txBackupLogs.remove(txnId);
}
static final class TxBackupLog {
final List<TransactionLogRecord> records;
final String callerUuid;
final long timeoutMillis;
final long startTime;
final boolean allowedDuringPassiveState;
volatile State state;
private TxBackupLog(List<TransactionLogRecord> records, String callerUuid, State state,
long timeoutMillis, long startTime, boolean allowedDuringPassiveState) {
this.records = records;
this.callerUuid = callerUuid;
this.state = state;
this.timeoutMillis = timeoutMillis;
this.startTime = startTime;
this.allowedDuringPassiveState = allowedDuringPassiveState;
}
@Override
public String toString() {
return "TxBackupLog{"
+ "records=" + records
+ ", callerUuid='" + callerUuid + '\''
+ ", timeoutMillis=" + timeoutMillis
+ ", startTime=" + startTime
+ ", state=" + state
+ ", allowedDuringPassiveState=" + allowedDuringPassiveState
+ '}';
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.notifications.dispatchers;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.ambari.server.notifications.DispatchCallback;
import org.apache.ambari.server.notifications.Notification;
import org.apache.ambari.server.notifications.NotificationDispatcher;
import org.apache.ambari.server.notifications.Recipient;
import org.apache.ambari.server.notifications.TargetConfigurationResult;
import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
import org.apache.ambari.server.orm.entities.AlertHistoryEntity;
import org.apache.ambari.server.state.AlertState;
import org.apache.ambari.server.state.alert.AlertNotification;
import org.apache.ambari.server.state.services.AlertNoticeDispatchService;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.snmp4j.PDU;
import org.snmp4j.Snmp;
import org.snmp4j.Target;
import org.snmp4j.mp.SnmpConstants;
import org.snmp4j.smi.Integer32;
import org.snmp4j.smi.OID;
import org.snmp4j.smi.OctetString;
import org.snmp4j.smi.VariableBinding;
public class AmbariSNMPDispatcherTest {
private static final int DEFAULT_SNMP_PORT = 31444;
public static final String DEFINITION_NAME = "definition name";
public static final String ALERT_LABEL = "alert name";
public static final String ALERT_TEXT = "alert text";
public static final String ALERT_HOSTNAME = "hostname";
public static final String ALERT_SERVICE_NAME = "service name";
public static final String ALERT_COMPONENT_NAME = "component name";
public static final Long DEFINITION_ID = 1L;
public static final AlertState ALERT_STATE = AlertState.OK;
@Test
public void testDispatch_nullProperties() throws Exception {
AmbariSNMPDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
Notification notification = mock(AlertNotification.class);
notification.Callback = mock(DispatchCallback.class);
notification.CallbackIds = mock(List.class);
dispatcher.dispatch(notification);
verify(notification.Callback).onFailure(notification.CallbackIds);
verify(notification.Callback, never()).onSuccess(notification.CallbackIds);
}
@Test
public void testDispatchUdpTransportMappingCrash() throws Exception {
AmbariSNMPDispatcher dispatcher = spy(new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT));
AmbariSNMPDispatcher.SnmpVersion snmpVersion = AmbariSNMPDispatcher.SnmpVersion.SNMPv1;
Notification notification = mock(AlertNotification.class);
notification.Callback = mock(DispatchCallback.class);
notification.CallbackIds = mock(List.class);
Map<String, String> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "3");
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "4");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv1");
notification.DispatchProperties = properties;
notification.Recipients = Arrays.asList(new Recipient());
doThrow(new IOException()).when(dispatcher).sendTraps(notification, snmpVersion);
dispatcher.dispatch(notification);
verify(notification.Callback).onFailure(notification.CallbackIds);
verify(notification.Callback, never()).onSuccess(notification.CallbackIds);
assertNull(dispatcher.getTransportMapping());
}
@Test
public void testDispatch_notDefinedProperties() throws Exception {
AmbariSNMPDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
Notification notification = mock(AlertNotification.class);
notification.Callback = mock(DispatchCallback.class);
notification.CallbackIds = mock(List.class);
notification.DispatchProperties = new HashMap<>();
dispatcher.dispatch(notification);
verify(notification.Callback).onFailure(notification.CallbackIds);
verify(notification.Callback, never()).onSuccess(notification.CallbackIds);
}
@Test
public void testDispatch_nullRecipients() throws Exception {
AmbariSNMPDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
Notification notification = getAlertNotification(true);
notification.Callback = mock(DispatchCallback.class);
notification.CallbackIds = mock(List.class);
Map<String, String> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "3");
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "4");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv1");
notification.DispatchProperties = properties;
dispatcher.dispatch(notification);
verify(notification.Callback).onFailure(notification.CallbackIds);
verify(notification.Callback, never()).onSuccess(notification.CallbackIds);
}
@Test
public void testDispatch_noRecipients() throws Exception {
AmbariSNMPDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
Notification notification = getAlertNotification(true);
notification.Callback = mock(DispatchCallback.class);
notification.CallbackIds = mock(List.class);
Map<String, String> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "3");
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "4");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv1");
notification.DispatchProperties = properties;
notification.Recipients = new ArrayList<>();
dispatcher.dispatch(notification);
verify(notification.Callback).onFailure(notification.CallbackIds);
verify(notification.Callback, never()).onSuccess(notification.CallbackIds);
}
@Test
public void testDispatch_sendTrapError() throws Exception {
AmbariSNMPDispatcher dispatcher = spy(new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT));
Notification notification = mock(AlertNotification.class);
notification.Callback = mock(DispatchCallback.class);
notification.CallbackIds = mock(List.class);
Map<String, String> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "3");
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "4");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv1");
notification.DispatchProperties = properties;
notification.Recipients = Arrays.asList(new Recipient());
doThrow(new RuntimeException()).when(dispatcher).sendTraps(eq(notification), any(AmbariSNMPDispatcher.SnmpVersion.class));
dispatcher.dispatch(notification);
verify(notification.Callback).onFailure(notification.CallbackIds);
verify(notification.Callback, never()).onSuccess(notification.CallbackIds);
}
@Test
public void testDispatch_incorrectSnmpVersion() throws Exception {
AmbariSNMPDispatcher dispatcher = spy(new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT));
Notification notification = mock(AlertNotification.class);
notification.Callback = mock(DispatchCallback.class);
notification.CallbackIds = mock(List.class);
Map<String, String> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "3");
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "4");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv11");
notification.DispatchProperties = properties;
notification.Recipients = Arrays.asList(new Recipient());
dispatcher.dispatch(notification);
verify(notification.Callback).onFailure(notification.CallbackIds);
verify(notification.Callback, never()).onSuccess(notification.CallbackIds);
}
@Test
public void testDispatch_successful_v1() throws Exception {
AmbariSNMPDispatcher dispatcher = spy(new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT));
AmbariSNMPDispatcher.SnmpVersion snmpVersion = AmbariSNMPDispatcher.SnmpVersion.SNMPv1;
Notification notification = mock(AlertNotification.class);
notification.Callback = mock(DispatchCallback.class);
notification.CallbackIds = mock(List.class);
Map<String, String> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "3");
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "4");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv1");
notification.DispatchProperties = properties;
notification.Recipients = Arrays.asList(new Recipient());
doNothing().when(dispatcher).sendTraps(notification, snmpVersion);
dispatcher.dispatch(notification);
verify(notification.Callback, never()).onFailure(notification.CallbackIds);
verify(notification.Callback).onSuccess(notification.CallbackIds);
}
@Test
public void testDispatch_successful_v2() throws Exception {
AmbariSNMPDispatcher dispatcher = spy(new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT));
AmbariSNMPDispatcher.SnmpVersion snmpVersion = AmbariSNMPDispatcher.SnmpVersion.SNMPv2c;
Notification notification = mock(AlertNotification.class);
notification.Callback = mock(DispatchCallback.class);
notification.CallbackIds = mock(List.class);
Map<String, String> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "3");
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "4");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv2c");
notification.DispatchProperties = properties;
notification.Recipients = Arrays.asList(new Recipient());
doNothing().when(dispatcher).sendTraps(notification, snmpVersion);
dispatcher.dispatch(notification);
verify(notification.Callback, never()).onFailure(notification.CallbackIds);
verify(notification.Callback).onSuccess(notification.CallbackIds);
}
@Test
public void testDispatch_successful_v3() throws Exception {
AmbariSNMPDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
Notification notification = getAlertNotification(true);
notification.Callback = mock(DispatchCallback.class);
notification.CallbackIds = mock(List.class);
Map<String, String> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "public");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv3");
properties.put(AmbariSNMPDispatcher.SECURITY_USERNAME_PROPERTY, "USER");
properties.put(AmbariSNMPDispatcher.SECURITY_AUTH_PASSPHRASE_PROPERTY, "PASSPHRASE1");
properties.put(AmbariSNMPDispatcher.SECURITY_PRIV_PASSPHRASE_PROPERTY, "PASSPHRASE2");
properties.put(AmbariSNMPDispatcher.SECURITY_LEVEL_PROPERTY, "AUTH_NOPRIV");
notification.DispatchProperties = properties;
Recipient recipient = new Recipient();
recipient.Identifier = "192.168.0.2";
notification.Recipients = Arrays.asList(recipient);
dispatcher.dispatch(notification);
verify(notification.Callback, never()).onFailure(notification.CallbackIds);
verify(notification.Callback).onSuccess(notification.CallbackIds);
}
@Test
public void testPrepareTrap_v1() throws Exception {
AmbariSNMPDispatcher.SnmpVersion snmpVersion = AmbariSNMPDispatcher.SnmpVersion.SNMPv1;
AmbariSNMPDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
Notification notification = getAlertNotification(true);
PDU pdu = dispatcher.prepareTrap(notification, snmpVersion);
assertEquals(PDU.V1TRAP, pdu.getType());
Map<String, VariableBinding> variableBindings = new HashMap<>();
for (VariableBinding variableBinding : pdu.toArray()) {
variableBindings.put(variableBinding.getOid().toString(), variableBinding);
}
assertEquals(11, variableBindings.size());
assertEquals(AmbariSNMPDispatcher.AMBARI_ALERT_TRAP_OID, variableBindings.get(SnmpConstants.snmpTrapOID.toString()).toValueString());
assertTrue(variableBindings.get(SnmpConstants.snmpTrapOID.toString()).getVariable() instanceof OID);
assertEquals(String.valueOf(DEFINITION_ID), variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_ID_OID).toValueString());
assertTrue(variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_ID_OID).getVariable() instanceof Integer32);
assertEquals(DEFINITION_NAME, variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_NAME_OID).toValueString());
assertTrue(variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_NAME_OID).getVariable() instanceof OctetString);
assertEquals(ALERT_LABEL, variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_NAME_OID).toValueString());
assertTrue(variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_NAME_OID).getVariable() instanceof OctetString);
assertEquals(ALERT_TEXT, variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_TEXT_OID).toValueString());
assertTrue(variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_TEXT_OID).getVariable() instanceof OctetString);
assertEquals(String.valueOf(ALERT_STATE.getIntValue()), variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_STATE_OID).toValueString());
assertTrue(variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_STATE_OID).getVariable() instanceof Integer32);
assertEquals(ALERT_HOSTNAME, variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_HOST_NAME_OID).toValueString());
assertTrue(variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_HOST_NAME_OID).getVariable() instanceof OctetString);
assertEquals(ALERT_SERVICE_NAME, variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_SERVICE_NAME_OID).toValueString());
assertTrue(variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_SERVICE_NAME_OID).getVariable() instanceof OctetString);
assertEquals(ALERT_COMPONENT_NAME, variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_COMPONENT_NAME_OID).toValueString());
assertTrue(variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_COMPONENT_NAME_OID).getVariable() instanceof OctetString);
}
@Test
public void testPrepareTrapNull() throws Exception {
AmbariSNMPDispatcher.SnmpVersion snmpVersion = AmbariSNMPDispatcher.SnmpVersion.SNMPv1;
AmbariSNMPDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
AlertNotification notification = (AlertNotification) getAlertNotification(false);
PDU pdu = dispatcher.prepareTrap(notification, snmpVersion);
assertEquals(PDU.V1TRAP, pdu.getType());
Map<String, VariableBinding> variableBindings = new HashMap<>();
for (VariableBinding variableBinding : pdu.toArray()) {
variableBindings.put(variableBinding.getOid().toString(), variableBinding);
}
assertEquals(11, variableBindings.size());
assertEquals("null", variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_COMPONENT_NAME_OID).toValueString());
}
@Test
public void testPrepareTrap_v2c() throws Exception {
AmbariSNMPDispatcher.SnmpVersion snmpVersion = AmbariSNMPDispatcher.SnmpVersion.SNMPv2c;
AmbariSNMPDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
Notification notification = getAlertNotification(true);
PDU pdu = dispatcher.prepareTrap(notification, snmpVersion);
assertEquals(PDU.TRAP, pdu.getType());
Map<String, VariableBinding> variableBindings = new HashMap<>();
for (VariableBinding variableBinding : pdu.toArray()) {
variableBindings.put(variableBinding.getOid().toString(), variableBinding);
}
assertEquals(11, variableBindings.size());
assertEquals(AmbariSNMPDispatcher.AMBARI_ALERT_TRAP_OID, variableBindings.get(SnmpConstants.snmpTrapOID.toString()).toValueString());
assertEquals(String.valueOf(DEFINITION_ID), variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_ID_OID).toValueString());
assertEquals(DEFINITION_NAME, variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_NAME_OID).toValueString());
assertEquals(ALERT_LABEL, variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_NAME_OID).toValueString());
assertEquals(ALERT_TEXT, variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_TEXT_OID).toValueString());
assertEquals(String.valueOf(ALERT_STATE.getIntValue()), variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_STATE_OID).toValueString());
assertEquals(ALERT_HOSTNAME, variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_HOST_NAME_OID).toValueString());
assertEquals(ALERT_SERVICE_NAME, variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_SERVICE_NAME_OID).toValueString());
assertEquals(ALERT_COMPONENT_NAME, variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_COMPONENT_NAME_OID).toValueString());
}
@Test
public void testSendTraps_v1() throws Exception {
AmbariSNMPDispatcher.SnmpVersion snmpVersion = AmbariSNMPDispatcher.SnmpVersion.SNMPv1;
Snmp snmp = mock(Snmp.class);
AmbariSNMPDispatcher dispatcher = spy(new AmbariSNMPDispatcher(snmp));
PDU trap = mock(PDU.class);
Notification notification = new AlertNotification();
Map<String, String> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "public");
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
notification.DispatchProperties = properties;
Recipient rec1 = new Recipient();
rec1.Identifier = "192.168.0.2";
notification.Recipients = Arrays.asList(rec1);
doReturn(trap).when(dispatcher).prepareTrap(notification, snmpVersion);
dispatcher.sendTraps(notification, snmpVersion);
ArgumentCaptor<Target> argument = ArgumentCaptor.forClass(Target.class);
verify(snmp, times(1)).send(eq(trap), argument.capture());
assertEquals("192.168.0.2/162", argument.getValue().getAddress().toString());
assertEquals(SnmpConstants.version1, argument.getValue().getVersion());
}
@Test
public void testSendTraps_v2() throws Exception {
AmbariSNMPDispatcher.SnmpVersion snmpVersion = AmbariSNMPDispatcher.SnmpVersion.SNMPv2c;
Snmp snmp = mock(Snmp.class);
AmbariSNMPDispatcher dispatcher = spy(new AmbariSNMPDispatcher(snmp));
PDU trap = mock(PDU.class);
Notification notification = new AlertNotification();
Map<String, String> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "public");
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
notification.DispatchProperties = properties;
Recipient rec1 = new Recipient();
rec1.Identifier = "192.168.0.2";
notification.Recipients = Arrays.asList(rec1);
doReturn(trap).when(dispatcher).prepareTrap(notification, snmpVersion);
dispatcher.sendTraps(notification, snmpVersion);
ArgumentCaptor<Target> argument = ArgumentCaptor.forClass(Target.class);
verify(snmp, times(1)).send(eq(trap), argument.capture());
assertEquals("192.168.0.2/162", argument.getValue().getAddress().toString());
assertEquals(SnmpConstants.version2c, argument.getValue().getVersion());
}
@Test
public void testSendTraps_v3() throws Exception {
AmbariSNMPDispatcher.SnmpVersion snmpVersion = AmbariSNMPDispatcher.SnmpVersion.SNMPv3;
Snmp snmp = mock(Snmp.class);
AmbariSNMPDispatcher dispatcher = spy(new AmbariSNMPDispatcher(snmp));
PDU trap = mock(PDU.class);
Notification notification = new AlertNotification();
Map<String, String> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv3");
properties.put(AmbariSNMPDispatcher.SECURITY_USERNAME_PROPERTY, "USER");
properties.put(AmbariSNMPDispatcher.SECURITY_AUTH_PASSPHRASE_PROPERTY, "PASSPHRASE1");
properties.put(AmbariSNMPDispatcher.SECURITY_PRIV_PASSPHRASE_PROPERTY, "PASSPHRASE2");
properties.put(AmbariSNMPDispatcher.SECURITY_LEVEL_PROPERTY, "AUTH_NOPRIV");
notification.DispatchProperties = properties;
Recipient rec1 = new Recipient();
rec1.Identifier = "192.168.0.2";
notification.Recipients = Arrays.asList(rec1);
doReturn(trap).when(dispatcher).prepareTrap(notification, snmpVersion);
dispatcher.sendTraps(notification, snmpVersion);
ArgumentCaptor<Target> argument = ArgumentCaptor.forClass(Target.class);
verify(snmp, times(1)).send(eq(trap), argument.capture());
assertEquals("192.168.0.2/162", argument.getValue().getAddress().toString());
assertEquals(SnmpConstants.version3, argument.getValue().getVersion());
}
@Test(expected = AmbariSNMPDispatcher.InvalidSnmpConfigurationException.class)
public void testSendTraps_v3_incorrectSecurityLevelVersion() throws Exception {
AmbariSNMPDispatcher.SnmpVersion snmpVersion = AmbariSNMPDispatcher.SnmpVersion.SNMPv3;
Snmp snmp = mock(Snmp.class);
AmbariSNMPDispatcher dispatcher = spy(new AmbariSNMPDispatcher(snmp));
PDU trap = mock(PDU.class);
Notification notification = new AlertNotification();
Map<String, String> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv3");
properties.put(AmbariSNMPDispatcher.SECURITY_USERNAME_PROPERTY, "USER");
properties.put(AmbariSNMPDispatcher.SECURITY_AUTH_PASSPHRASE_PROPERTY, "PASSPHRASE1");
properties.put(AmbariSNMPDispatcher.SECURITY_PRIV_PASSPHRASE_PROPERTY, "PASSPHRASE2");
properties.put(AmbariSNMPDispatcher.SECURITY_LEVEL_PROPERTY, "INCORRECT");
notification.DispatchProperties = properties;
Recipient rec1 = new Recipient();
rec1.Identifier = "192.168.0.2";
notification.Recipients = Arrays.asList(rec1);
doReturn(trap).when(dispatcher).prepareTrap(notification, snmpVersion);
dispatcher.sendTraps(notification, snmpVersion);
}
@Test
public void testValidateAlertValidation_SNMPv1() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put(SNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv1");
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "public");
NotificationDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
TargetConfigurationResult configValidationResult = dispatcher.validateTargetConfig(properties);
assertEquals(TargetConfigurationResult.Status.VALID, configValidationResult.getStatus());
}
@Test
public void testValidateAlertValidation_incorrectSNMPversion() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv4");
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "public");
NotificationDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
TargetConfigurationResult configValidationResult = dispatcher.validateTargetConfig(properties);
assertEquals(TargetConfigurationResult.Status.INVALID, configValidationResult.getStatus());
}
@Test
public void testValidateAlertValidation_SNMPv1_invalid_noPort() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv1");
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "public");
NotificationDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
TargetConfigurationResult configValidationResult = dispatcher.validateTargetConfig(properties);
assertEquals(TargetConfigurationResult.Status.INVALID, configValidationResult.getStatus());
}
@Test
public void testValidateAlertValidation_SNMPv2c() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv2c");
properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "public");
NotificationDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
TargetConfigurationResult configValidationResult = dispatcher.validateTargetConfig(properties);
assertEquals(TargetConfigurationResult.Status.VALID, configValidationResult.getStatus());
}
@Test
public void testValidateAlertValidation_SNMPv2c_invalid() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv2c");
NotificationDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
TargetConfigurationResult configValidationResult = dispatcher.validateTargetConfig(properties);
assertEquals(TargetConfigurationResult.Status.INVALID, configValidationResult.getStatus());
}
@Test
public void testValidateAlertValidation_SNMPv3_incorrectSecurityLevel() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv3");
properties.put(AmbariSNMPDispatcher.SECURITY_USERNAME_PROPERTY, "USER");
properties.put(AmbariSNMPDispatcher.SECURITY_AUTH_PASSPHRASE_PROPERTY, "PASSPHRASE1");
properties.put(AmbariSNMPDispatcher.SECURITY_PRIV_PASSPHRASE_PROPERTY, "PASSPHRASE2");
properties.put(AmbariSNMPDispatcher.SECURITY_LEVEL_PROPERTY, "INCORRECT");
NotificationDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
TargetConfigurationResult configValidationResult = dispatcher.validateTargetConfig(properties);
assertEquals(TargetConfigurationResult.Status.INVALID, configValidationResult.getStatus());
}
@Test
public void testValidateAlertValidation_SNMPv3_noAuthNoPriv() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv3");
properties.put(AmbariSNMPDispatcher.SECURITY_USERNAME_PROPERTY, "USER");
properties.put(AmbariSNMPDispatcher.SECURITY_LEVEL_PROPERTY, "NOAUTH_NOPRIV");
NotificationDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
TargetConfigurationResult configValidationResult = dispatcher.validateTargetConfig(properties);
assertEquals(TargetConfigurationResult.Status.VALID, configValidationResult.getStatus());
}
@Test
public void testValidateAlertValidation_SNMPv3_AuthNoPriv_valid() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv3");
properties.put(AmbariSNMPDispatcher.SECURITY_USERNAME_PROPERTY, "USER");
properties.put(AmbariSNMPDispatcher.SECURITY_AUTH_PASSPHRASE_PROPERTY, "PASSPHRASE1");
properties.put(AmbariSNMPDispatcher.SECURITY_LEVEL_PROPERTY, "AUTH_NOPRIV");
NotificationDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
TargetConfigurationResult configValidationResult = dispatcher.validateTargetConfig(properties);
assertEquals(TargetConfigurationResult.Status.VALID, configValidationResult.getStatus());
}
@Test
public void testValidateAlertValidation_SNMPv3_AuthNoPriv_invalid() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv3");
properties.put(AmbariSNMPDispatcher.SECURITY_USERNAME_PROPERTY, "USER");
properties.put(AmbariSNMPDispatcher.SECURITY_LEVEL_PROPERTY, "AUTH_NOPRIV");
NotificationDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
TargetConfigurationResult configValidationResult = dispatcher.validateTargetConfig(properties);
assertEquals(TargetConfigurationResult.Status.INVALID, configValidationResult.getStatus());
}
@Test
public void testValidateAlertValidation_SNMPv3_AuthPriv_valid() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv3");
properties.put(AmbariSNMPDispatcher.SECURITY_USERNAME_PROPERTY, "USER");
properties.put(AmbariSNMPDispatcher.SECURITY_AUTH_PASSPHRASE_PROPERTY, "PASSPHRASE1");
properties.put(AmbariSNMPDispatcher.SECURITY_PRIV_PASSPHRASE_PROPERTY, "PASSPHRASE2");
properties.put(AmbariSNMPDispatcher.SECURITY_LEVEL_PROPERTY, "AUTH_PRIV");
NotificationDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
TargetConfigurationResult configValidationResult = dispatcher.validateTargetConfig(properties);
assertEquals(TargetConfigurationResult.Status.VALID, configValidationResult.getStatus());
}
@Test
public void testValidateAlertValidation_SNMPv3_AuthPriv_noPassphrases() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv3");
properties.put(AmbariSNMPDispatcher.SECURITY_USERNAME_PROPERTY, "USER");
properties.put(AmbariSNMPDispatcher.SECURITY_LEVEL_PROPERTY, "AUTH_PRIV");
NotificationDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
TargetConfigurationResult configValidationResult = dispatcher.validateTargetConfig(properties);
assertEquals(TargetConfigurationResult.Status.INVALID, configValidationResult.getStatus());
}
@Test
public void testValidateAlertValidation_SNMPv3_AuthPriv_onlyAuthPassphrase() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
properties.put(AmbariSNMPDispatcher.SNMP_VERSION_PROPERTY, "SNMPv3");
properties.put(AmbariSNMPDispatcher.SECURITY_USERNAME_PROPERTY, "USER");
properties.put(AmbariSNMPDispatcher.SECURITY_AUTH_PASSPHRASE_PROPERTY, "PASSPHRASE1");
properties.put(AmbariSNMPDispatcher.SECURITY_LEVEL_PROPERTY, "AUTH_PRIV");
NotificationDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
TargetConfigurationResult configValidationResult = dispatcher.validateTargetConfig(properties);
assertEquals(TargetConfigurationResult.Status.INVALID, configValidationResult.getStatus());
}
private Notification getAlertNotification(boolean hasComponent) {
AlertNotification notification = new AlertNotification();
AlertDefinitionEntity alertDefinitionEntity = new AlertDefinitionEntity();
alertDefinitionEntity.setDefinitionName(DEFINITION_NAME);
alertDefinitionEntity.setLabel(ALERT_LABEL);
alertDefinitionEntity.setDefinitionId(DEFINITION_ID);
AlertHistoryEntity alertHistoryEntity = new AlertHistoryEntity();
alertHistoryEntity.setAlertDefinition(alertDefinitionEntity);
alertHistoryEntity.setAlertLabel(ALERT_LABEL);
alertHistoryEntity.setAlertState(ALERT_STATE);
alertHistoryEntity.setAlertText(ALERT_TEXT);
alertHistoryEntity.setHostName(ALERT_HOSTNAME);
alertHistoryEntity.setServiceName(ALERT_SERVICE_NAME);
if (hasComponent) {
alertHistoryEntity.setComponentName(ALERT_COMPONENT_NAME);
}
AlertNoticeDispatchService.AlertInfo alertInfo = new AlertNoticeDispatchService.AlertInfo(alertHistoryEntity);
notification.setAlertInfo(alertInfo);
return notification;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.index;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.action.WriteFailureException;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.AutoCreateIndex;
import org.elasticsearch.action.support.replication.TransportShardReplicationOperationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndexAlreadyExistsException;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
/**
* Performs the index operation.
* <p/>
* <p>Allows for the following settings:
* <ul>
* <li><b>autoCreateIndex</b>: When set to <tt>true</tt>, will automatically create an index if one does not exists.
* Defaults to <tt>true</tt>.
* <li><b>allowIdGeneration</b>: If the id is set not, should it be generated. Defaults to <tt>true</tt>.
* </ul>
*/
public class TransportIndexAction extends TransportShardReplicationOperationAction<IndexRequest, IndexRequest, IndexResponse> {
private final AutoCreateIndex autoCreateIndex;
private final boolean allowIdGeneration;
private final TransportCreateIndexAction createIndexAction;
private final MappingUpdatedAction mappingUpdatedAction;
@Inject
public TransportIndexAction(Settings settings, TransportService transportService, ClusterService clusterService,
IndicesService indicesService, ThreadPool threadPool, ShardStateAction shardStateAction,
TransportCreateIndexAction createIndexAction, MappingUpdatedAction mappingUpdatedAction, ActionFilters actionFilters) {
super(settings, IndexAction.NAME, transportService, clusterService, indicesService, threadPool, shardStateAction, actionFilters);
this.createIndexAction = createIndexAction;
this.mappingUpdatedAction = mappingUpdatedAction;
this.autoCreateIndex = new AutoCreateIndex(settings);
this.allowIdGeneration = settings.getAsBoolean("action.allow_id_generation", true);
}
@Override
protected void doExecute(final IndexRequest request, final ActionListener<IndexResponse> listener) {
// if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API
if (autoCreateIndex.shouldAutoCreate(request.index(), clusterService.state())) {
CreateIndexRequest createIndexRequest = new CreateIndexRequest(request);
createIndexRequest.index(request.index());
createIndexRequest.mapping(request.type());
createIndexRequest.cause("auto(index api)");
createIndexRequest.masterNodeTimeout(request.timeout());
createIndexAction.execute(createIndexRequest, new ActionListener<CreateIndexResponse>() {
@Override
public void onResponse(CreateIndexResponse result) {
innerExecute(request, listener);
}
@Override
public void onFailure(Throwable e) {
if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) {
// we have the index, do it
try {
innerExecute(request, listener);
} catch (Throwable e1) {
listener.onFailure(e1);
}
} else {
listener.onFailure(e);
}
}
});
} else {
innerExecute(request, listener);
}
}
@Override
protected boolean resolveIndex() {
return true;
}
@Override
protected void resolveRequest(ClusterState state, InternalRequest request, ActionListener<IndexResponse> indexResponseActionListener) {
MetaData metaData = clusterService.state().metaData();
MappingMetaData mappingMd = null;
if (metaData.hasIndex(request.concreteIndex())) {
mappingMd = metaData.index(request.concreteIndex()).mappingOrDefault(request.request().type());
}
request.request().process(metaData, mappingMd, allowIdGeneration, request.concreteIndex());
}
private void innerExecute(final IndexRequest request, final ActionListener<IndexResponse> listener) {
super.doExecute(request, listener);
}
@Override
protected boolean checkWriteConsistency() {
return true;
}
@Override
protected IndexRequest newRequestInstance() {
return new IndexRequest();
}
@Override
protected IndexRequest newReplicaRequestInstance() {
return newRequestInstance();
}
@Override
protected IndexResponse newResponseInstance() {
return new IndexResponse();
}
@Override
protected String executor() {
return ThreadPool.Names.INDEX;
}
@Override
protected ShardIterator shards(ClusterState clusterState, InternalRequest request) {
return clusterService.operationRouting()
.indexShards(clusterService.state(), request.concreteIndex(), request.request().type(), request.request().id(), request.request().routing());
}
@Override
protected Tuple<IndexResponse, IndexRequest> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable {
final IndexRequest request = shardRequest.request;
// validate, if routing is required, that we got routing
IndexMetaData indexMetaData = clusterState.metaData().index(shardRequest.shardId.getIndex());
MappingMetaData mappingMd = indexMetaData.mappingOrDefault(request.type());
if (mappingMd != null && mappingMd.routing().required()) {
if (request.routing() == null) {
throw new RoutingMissingException(shardRequest.shardId.getIndex(), request.type(), request.id());
}
}
IndexService indexService = indicesService.indexServiceSafe(shardRequest.shardId.getIndex());
IndexShard indexShard = indexService.shardSafe(shardRequest.shardId.id());
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.PRIMARY, request.source()).type(request.type()).id(request.id())
.routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl());
long version;
boolean created;
try {
if (request.opType() == IndexRequest.OpType.INDEX) {
Engine.Index index = indexShard.prepareIndex(sourceToParse, request.version(), request.versionType(), Engine.Operation.Origin.PRIMARY, request.canHaveDuplicates());
if (index.parsedDoc().mappingsModified()) {
mappingUpdatedAction.updateMappingOnMaster(shardRequest.shardId.getIndex(), index.docMapper(), indexService.indexUUID());
}
indexShard.index(index);
version = index.version();
created = index.created();
} else {
Engine.Create create = indexShard.prepareCreate(sourceToParse,
request.version(), request.versionType(), Engine.Operation.Origin.PRIMARY, request.canHaveDuplicates(), request.autoGeneratedId());
if (create.parsedDoc().mappingsModified()) {
mappingUpdatedAction.updateMappingOnMaster(shardRequest.shardId.getIndex(), create.docMapper(), indexService.indexUUID());
}
indexShard.create(create);
version = create.version();
created = true;
}
if (request.refresh()) {
try {
indexShard.refresh("refresh_flag_index");
} catch (Throwable e) {
// ignore
}
}
// update the version on the request, so it will be used for the replicas
request.version(version);
request.versionType(request.versionType().versionTypeForReplicationAndRecovery());
assert request.versionType().validateVersionForWrites(request.version());
return new Tuple<>(new IndexResponse(shardRequest.shardId.getIndex(), request.type(), request.id(), version, created), shardRequest.request);
} catch (WriteFailureException e) {
if (e.getMappingTypeToUpdate() != null){
DocumentMapper docMapper = indexService.mapperService().documentMapper(e.getMappingTypeToUpdate());
if (docMapper != null) {
mappingUpdatedAction.updateMappingOnMaster(indexService.index().name(), docMapper, indexService.indexUUID());
}
}
throw e.getCause();
}
}
@Override
protected void shardOperationOnReplica(ReplicaOperationRequest shardRequest) {
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).shardSafe(shardRequest.shardId.id());
IndexRequest request = shardRequest.request;
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.REPLICA, request.source()).type(request.type()).id(request.id())
.routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl());
if (request.opType() == IndexRequest.OpType.INDEX) {
Engine.Index index = indexShard.prepareIndex(sourceToParse, request.version(), request.versionType(), Engine.Operation.Origin.REPLICA, request.canHaveDuplicates());
indexShard.index(index);
} else {
Engine.Create create = indexShard.prepareCreate(sourceToParse,
request.version(), request.versionType(), Engine.Operation.Origin.REPLICA, request.canHaveDuplicates(), request.autoGeneratedId());
indexShard.create(create);
}
if (request.refresh()) {
try {
indexShard.refresh("refresh_flag_index");
} catch (Exception e) {
// ignore
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.jstorm.task.group;
import backtype.storm.generated.GlobalStreamId;
import backtype.storm.generated.Grouping;
import backtype.storm.generated.JavaObject;
import backtype.storm.grouping.CustomStreamGrouping;
import backtype.storm.task.TopologyContext;
import backtype.storm.tuple.Fields;
import backtype.storm.utils.Utils;
import com.alibaba.jstorm.daemon.worker.WorkerData;
import com.alibaba.jstorm.task.execute.MsgInfo;
import com.alibaba.jstorm.utils.JStormUtils;
import com.alibaba.jstorm.utils.RandomRange;
import com.alibaba.jstorm.utils.Thrift;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
/**
* Grouper, get which task should be send to for one tuple
*
* @author yannian
*
*/
public class MkGrouper {
private static final Logger LOG = LoggerFactory.getLogger(MkGrouper.class);
private TopologyContext topology_context;
// this component output fields
private Fields out_fields;
private Grouping thrift_grouping;
private Grouping._Fields fields;
private GrouperType grouptype;
private List<Integer> out_tasks;
private List<Integer> local_tasks;
private String streamId;
// grouping method
private RandomRange randomrange;
private Random random;
private MkShuffer shuffer;
private MkCustomGrouper custom_grouper;
private MkFieldsGrouper fields_grouper;
private MkLocalShuffer local_shuffer_grouper;
private MkLocalFirst localFirst;
public MkGrouper(TopologyContext _topology_context, Fields _out_fields, Grouping _thrift_grouping, List<Integer> _outTasks, String streamId,
WorkerData workerData) {
this.topology_context = _topology_context;
this.out_fields = _out_fields;
this.thrift_grouping = _thrift_grouping;
this.streamId = streamId;
this.out_tasks = new ArrayList<Integer>();
this.out_tasks.addAll(_outTasks);
Collections.sort(this.out_tasks);
this.local_tasks = _topology_context.getThisWorkerTasks();
this.fields = Thrift.groupingType(thrift_grouping);
this.grouptype = this.parseGroupType(workerData);
String id = _topology_context.getThisTaskId() + ":" + streamId;
LOG.info(id + " grouptype is " + grouptype + ", out_tasks is " + out_tasks + ", local_tasks" + local_tasks);
}
public GrouperType gettype() {
return grouptype;
}
private GrouperType parseGroupType(WorkerData workerData) {
GrouperType grouperType = null;
if (Grouping._Fields.FIELDS.equals(fields)) {
if (Thrift.isGlobalGrouping(thrift_grouping)) {
// global grouping, just send tuple to first task
grouperType = GrouperType.global;
} else {
List<String> fields_group = Thrift.fieldGrouping(thrift_grouping);
Fields fields = new Fields(fields_group);
fields_grouper = new MkFieldsGrouper(out_fields, fields, out_tasks);
// hashcode by fields
grouperType = GrouperType.fields;
}
} else if (Grouping._Fields.ALL.equals(fields)) {
// send to every task
grouperType = GrouperType.all;
} else if (Grouping._Fields.SHUFFLE.equals(fields)) {
grouperType = GrouperType.shuffle;
shuffer = new MkShuffer(out_tasks, workerData);
} else if (Grouping._Fields.NONE.equals(fields)) {
// random send one task
this.random = new Random();
grouperType = GrouperType.none;
} else if (Grouping._Fields.CUSTOM_OBJECT.equals(fields)) {
// user custom grouping by JavaObject
JavaObject jobj = thrift_grouping.get_custom_object();
CustomStreamGrouping g = Thrift.instantiateJavaObject(jobj);
int myTaskId = topology_context.getThisTaskId();
String componentId = topology_context.getComponentId(myTaskId);
GlobalStreamId stream = new GlobalStreamId(componentId, streamId);
custom_grouper = new MkCustomGrouper(topology_context, g, stream, out_tasks, myTaskId);
grouperType = GrouperType.custom_obj;
} else if (Grouping._Fields.CUSTOM_SERIALIZED.equals(fields)) {
// user custom group by serialized Object
byte[] obj = thrift_grouping.get_custom_serialized();
CustomStreamGrouping g = (CustomStreamGrouping) Utils.javaDeserialize(obj);
int myTaskId = topology_context.getThisTaskId();
String componentId = topology_context.getComponentId(myTaskId);
GlobalStreamId stream = new GlobalStreamId(componentId, streamId);
custom_grouper = new MkCustomGrouper(topology_context, g, stream, out_tasks, myTaskId);
grouperType = GrouperType.custom_serialized;
} else if (Grouping._Fields.DIRECT.equals(fields)) {
// directly send to a special task
grouperType = GrouperType.direct;
} else if (Grouping._Fields.LOCAL_OR_SHUFFLE.equals(fields)) {
grouperType = GrouperType.local_or_shuffle;
local_shuffer_grouper = new MkLocalShuffer(local_tasks, out_tasks, workerData);
} else if (Grouping._Fields.LOCAL_FIRST.equals(fields)) {
grouperType = GrouperType.localFirst;
localFirst = new MkLocalFirst(local_tasks, out_tasks, workerData);
}
return grouperType;
}
/**
* get which task should tuple be sent to
*
* @param values
* @return
*/
public List<Integer> grouper(List<Object> values) {
if (GrouperType.global.equals(grouptype)) {
// send to task which taskId is 0
return JStormUtils.mk_list(out_tasks.get(0));
} else if (GrouperType.fields.equals(grouptype)) {
// field grouping
return fields_grouper.grouper(values);
} else if (GrouperType.all.equals(grouptype)) {
// send to every task
return out_tasks;
} else if (GrouperType.shuffle.equals(grouptype)) {
// random, but the random is different from none
return shuffer.grouper(values);
} else if (GrouperType.none.equals(grouptype)) {
int rnd = Math.abs(random.nextInt() % out_tasks.size());
return JStormUtils.mk_list(out_tasks.get(rnd));
} else if (GrouperType.custom_obj.equals(grouptype)) {
return custom_grouper.grouper(values);
} else if (GrouperType.custom_serialized.equals(grouptype)) {
return custom_grouper.grouper(values);
} else if (GrouperType.local_or_shuffle.equals(grouptype)) {
return local_shuffer_grouper.grouper(values);
} else if (GrouperType.localFirst.equals(grouptype)) {
return localFirst.grouper(values);
} else {
LOG.warn("Unsupportted group type");
}
return new ArrayList<Integer>();
}
public Map<List<Integer>, List<MsgInfo>> grouperBatch(List<MsgInfo> batch) {
Map<List<Integer>, List<MsgInfo>> ret = new HashMap<List<Integer>, List<MsgInfo>>();
//optimize fieldGrouping & customGrouping
if (GrouperType.local_or_shuffle.equals(grouptype)) {
ret.put(local_shuffer_grouper.grouper(null), batch);
} else if (GrouperType.global.equals(grouptype)) {
// send to task which taskId is 0
ret.put(JStormUtils.mk_list(out_tasks.get(0)), batch);
} else if (GrouperType.fields.equals(grouptype)) {
fields_grouper.batchGrouper(batch, ret);
} else if (GrouperType.all.equals(grouptype)) {
// send to every task
ret.put(out_tasks, batch);
} else if (GrouperType.shuffle.equals(grouptype)) {
// random, but the random is different from none
ret.put(shuffer.grouper(null), batch);
} else if (GrouperType.none.equals(grouptype)) {
int rnd = Math.abs(random.nextInt() % out_tasks.size());
ret.put(JStormUtils.mk_list(out_tasks.get(rnd)), batch);
} else if (GrouperType.custom_obj.equals(grouptype) || GrouperType.custom_serialized.equals(grouptype)) {
for (int i = 0; i < batch.size(); i++ ) {
MsgInfo msg = batch.get(i);
List<Integer> out = custom_grouper.grouper(msg.values);
List<MsgInfo> customBatch = ret.get(out);
if (customBatch == null) {
customBatch = JStormUtils.mk_list();
ret.put(out, customBatch);
}
customBatch.add(msg);
}
} else if (GrouperType.localFirst.equals(grouptype)) {
ret.put(localFirst.grouper(null), batch);
} else {
LOG.warn("Unsupportted group type");
}
return ret;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.xdebugger.impl.actions;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.process.OSProcessUtil;
import com.intellij.execution.process.ProcessInfo;
import com.intellij.execution.runners.ExecutionUtil;
import com.intellij.internal.statistic.UsageTrigger;
import com.intellij.internal.statistic.beans.ConvertUsagesUtil;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.*;
import com.intellij.openapi.ui.popup.util.BaseListPopupStep;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.UserDataHolderBase;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.ToolWindowId;
import com.intellij.ui.popup.list.ListPopupImpl;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.containers.hash.LinkedHashMap;
import com.intellij.util.ui.StatusText;
import com.intellij.xdebugger.XDebuggerBundle;
import com.intellij.xdebugger.attach.XLocalAttachDebugger;
import com.intellij.xdebugger.attach.XLocalAttachDebuggerProvider;
import com.intellij.xdebugger.attach.XLocalAttachGroup;
import org.intellij.lang.annotations.MagicConstant;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import java.awt.event.InputEvent;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
public class AttachToLocalProcessAction extends AnAction {
private static final Key<LinkedHashMap<String, HistoryItem>> HISTORY_KEY = Key.create("AttachToLocalProcessAction.HISTORY_KEY");
public AttachToLocalProcessAction() {
super(XDebuggerBundle.message("xdebugger.attach.toLocal.action"),
XDebuggerBundle.message("xdebugger.attach.toLocal.action.description"), null);
}
@Override
public void update(AnActionEvent e) {
super.update(e);
Project project = getEventProject(e);
boolean enabled = project != null && Extensions.getExtensions(XLocalAttachDebuggerProvider.EP).length > 0;
e.getPresentation().setEnabledAndVisible(enabled);
}
@Override
public void actionPerformed(AnActionEvent e) {
final Project project = getEventProject(e);
if (project == null) return;
ProcessInfo[] processList = OSProcessUtil.getProcessList();
XLocalAttachDebuggerProvider[] providers = Extensions.getExtensions(XLocalAttachDebuggerProvider.EP);
ProcessListStep step = new ProcessListStep(collectAttachItems(project, processList, providers), project);
final ListPopup popup = JBPopupFactory.getInstance().createListPopup(step);
final JList mainList = ((ListPopupImpl)popup).getList();
ListSelectionListener listener = event -> {
if (event.getValueIsAdjusting()) return;
Object item = ((JList)event.getSource()).getSelectedValue();
// if a sub-list is closed, fallback to the selected value from the main list
if (item == null) {
item = mainList.getSelectedValue();
}
if (item instanceof AttachItem) {
String debuggerName = ((AttachItem)item).getSelectedDebugger().getDebuggerDisplayName();
debuggerName = StringUtil.shortenTextWithEllipsis(debuggerName, 50, 0);
((ListPopupImpl)popup).setCaption(XDebuggerBundle.message("xdebugger.attach.toLocal.popup.adText", debuggerName));
}
};
popup.addListSelectionListener(listener);
// force first valueChanged event
listener.valueChanged(new ListSelectionEvent(mainList, mainList.getMinSelectionIndex(), mainList.getMaxSelectionIndex(), false));
popup.showCenteredInCurrentWindow(project);
}
@NotNull
public static List<AttachItem> collectAttachItems(@NotNull final Project project,
@NotNull ProcessInfo[] processList,
@NotNull XLocalAttachDebuggerProvider... providers) {
MultiMap<XLocalAttachGroup, Pair<ProcessInfo, ArrayList<XLocalAttachDebugger>>> groupWithItems = new MultiMap<>();
UserDataHolderBase dataHolder = new UserDataHolderBase();
for (ProcessInfo eachInfo : processList) {
MultiMap<XLocalAttachGroup, XLocalAttachDebugger> groupsWithDebuggers = new MultiMap<>();
for (XLocalAttachDebuggerProvider eachProvider : providers) {
groupsWithDebuggers.putValues(eachProvider.getAttachGroup(), eachProvider.getAvailableDebuggers(project, eachInfo, dataHolder));
}
for (XLocalAttachGroup eachGroup : groupsWithDebuggers.keySet()) {
Collection<XLocalAttachDebugger> debuggers = groupsWithDebuggers.get(eachGroup);
if (!debuggers.isEmpty()) {
groupWithItems.putValue(eachGroup, Pair.create(eachInfo, new ArrayList<>(debuggers)));
}
}
}
ArrayList<XLocalAttachGroup> sortedGroups = new ArrayList<>(groupWithItems.keySet());
Collections.sort(sortedGroups, (a, b) -> a.getOrder() - b.getOrder());
List<AttachItem> currentItems = new ArrayList<>();
for (final XLocalAttachGroup eachGroup : sortedGroups) {
List<Pair<ProcessInfo, ArrayList<XLocalAttachDebugger>>> sortedItems
= new ArrayList<>(groupWithItems.get(eachGroup));
Collections.sort(sortedItems, (a, b) -> eachGroup.compare(project, a.first, b.first));
boolean first = true;
for (Pair<ProcessInfo, ArrayList<XLocalAttachDebugger>> eachItem : sortedItems) {
currentItems.add(new AttachItem(eachGroup, first, eachItem.first, eachItem.second));
first = false;
}
}
List<AttachItem> currentHistoryItems = new ArrayList<>();
List<HistoryItem> history = getHistory(project);
for (int i = history.size() - 1; i >= 0; i--) {
HistoryItem eachHistoryItem = history.get(i);
for (AttachItem eachCurrentItem : currentItems) {
boolean isSuitableItem = eachHistoryItem.getGroup().equals(eachCurrentItem.getGroup()) &&
eachHistoryItem.getProcessInfo().getCommandLine().equals(eachCurrentItem.getProcessInfo().getCommandLine());
if (!isSuitableItem) continue;
List<XLocalAttachDebugger> debuggers = eachCurrentItem.getDebuggers();
int selectedDebugger = -1;
for (int j = 0; j < debuggers.size(); j++) {
XLocalAttachDebugger eachDebugger = debuggers.get(j);
if (eachDebugger.getDebuggerDisplayName().equals(eachHistoryItem.getDebuggerName())) {
selectedDebugger = j;
break;
}
}
if (selectedDebugger == -1) continue;
currentHistoryItems.add(new AttachItem(eachCurrentItem.getGroup(),
currentHistoryItems.isEmpty(),
XDebuggerBundle.message("xdebugger.attach.toLocal.popup.recent"),
eachCurrentItem.getProcessInfo(),
debuggers,
selectedDebugger));
}
}
currentHistoryItems.addAll(currentItems);
return currentHistoryItems;
}
public static void addToHistory(@NotNull Project project, @NotNull AttachItem item) {
LinkedHashMap<String, HistoryItem> history = project.getUserData(HISTORY_KEY);
if (history == null) {
project.putUserData(HISTORY_KEY, history = new LinkedHashMap<>());
}
ProcessInfo processInfo = item.getProcessInfo();
history.remove(processInfo.getCommandLine());
history.put(processInfo.getCommandLine(), new HistoryItem(processInfo, item.getGroup(),
item.getSelectedDebugger().getDebuggerDisplayName()));
while (history.size() > 4) {
history.remove(history.keySet().iterator().next());
}
}
@NotNull
public static List<HistoryItem> getHistory(@NotNull Project project) {
LinkedHashMap<String, HistoryItem> history = project.getUserData(HISTORY_KEY);
return history == null ? Collections.emptyList()
: Collections.unmodifiableList(new ArrayList<>(history.values()));
}
public static class HistoryItem {
@NotNull private final ProcessInfo myProcessInfo;
@NotNull private final XLocalAttachGroup myGroup;
@NotNull private final String myDebuggerName;
public HistoryItem(@NotNull ProcessInfo processInfo,
@NotNull XLocalAttachGroup group,
@NotNull String debuggerName) {
myProcessInfo = processInfo;
myGroup = group;
myDebuggerName = debuggerName;
}
@NotNull
public ProcessInfo getProcessInfo() {
return myProcessInfo;
}
@NotNull
public XLocalAttachGroup getGroup() {
return myGroup;
}
@NotNull
public String getDebuggerName() {
return myDebuggerName;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
HistoryItem item = (HistoryItem)o;
if (!myProcessInfo.equals(item.myProcessInfo)) return false;
if (!myGroup.equals(item.myGroup)) return false;
if (!myDebuggerName.equals(item.myDebuggerName)) return false;
return true;
}
@Override
public int hashCode() {
int result = myProcessInfo.hashCode();
result = 31 * result + myGroup.hashCode();
result = 31 * result + myDebuggerName.hashCode();
return result;
}
}
public static class AttachItem {
@NotNull private final XLocalAttachGroup myGroup;
private final boolean myIsFirstInGroup;
@NotNull private final String myGroupName;
@NotNull private final ProcessInfo myProcessInfo;
@NotNull private final List<XLocalAttachDebugger> myDebuggers;
private final int mySelectedDebugger;
@NotNull private final List<AttachItem> mySubItems;
public AttachItem(@NotNull XLocalAttachGroup group,
boolean isFirstInGroup,
@NotNull ProcessInfo info,
@NotNull List<XLocalAttachDebugger> debuggers) {
this(group, isFirstInGroup, group.getGroupName(), info, debuggers, 0);
}
public AttachItem(@NotNull XLocalAttachGroup group,
boolean isFirstInGroup,
@NotNull String groupName,
@NotNull ProcessInfo info,
@NotNull List<XLocalAttachDebugger> debuggers,
int selectedDebugger) {
myGroupName = groupName;
assert !debuggers.isEmpty() : "debugger list should not be empty";
assert selectedDebugger >= 0 && selectedDebugger < debuggers.size() : "wrong selected debugger index";
myGroup = group;
myIsFirstInGroup = isFirstInGroup;
myProcessInfo = info;
myDebuggers = debuggers;
mySelectedDebugger = selectedDebugger;
if (debuggers.size() > 1) {
mySubItems = ContainerUtil.map(debuggers, debugger -> {
return new AttachItem(myGroup, false, myProcessInfo, Collections.singletonList(debugger));
});
}
else {
mySubItems = Collections.emptyList();
}
}
@NotNull
public ProcessInfo getProcessInfo() {
return myProcessInfo;
}
@NotNull
public XLocalAttachGroup getGroup() {
return myGroup;
}
@Nullable
public String getSeparatorTitle() {
return myIsFirstInGroup ? myGroupName : null;
}
@Nullable
public Icon getIcon(@NotNull Project project) {
return myGroup.getProcessIcon(project, myProcessInfo);
}
@NotNull
public String getText(@NotNull Project project) {
String shortenedText = StringUtil.shortenTextWithEllipsis(myGroup.getProcessDisplayText(project, myProcessInfo), 80, 0);
return myProcessInfo.getPid() + " " + shortenedText;
}
@NotNull
public List<XLocalAttachDebugger> getDebuggers() {
return myDebuggers;
}
@NotNull
public XLocalAttachDebugger getSelectedDebugger() {
return myDebuggers.get(mySelectedDebugger);
}
@NotNull
public List<AttachItem> getSubItems() {
return mySubItems;
}
public void startDebugSession(@NotNull Project project) {
XLocalAttachDebugger debugger = getSelectedDebugger();
UsageTrigger.trigger(ConvertUsagesUtil.ensureProperKey("debugger.attach.local"));
UsageTrigger.trigger(ConvertUsagesUtil.ensureProperKey("debugger.attach.local." + debugger.getDebuggerDisplayName()));
try {
debugger.attachDebugSession(project, myProcessInfo);
}
catch (ExecutionException e) {
ExecutionUtil.handleExecutionError(project, ToolWindowId.DEBUG, myProcessInfo.getExecutableName(), e);
}
}
}
private static class MyBasePopupStep extends BaseListPopupStep<AttachItem> {
@NotNull final Project myProject;
public MyBasePopupStep(@NotNull Project project,
@Nullable String title,
List<? extends AttachItem> values) {
super(title, values);
myProject = project;
}
@Override
public boolean isSpeedSearchEnabled() {
return true;
}
@Override
public boolean isAutoSelectionEnabled() {
return false;
}
@Override
public boolean hasSubstep(AttachItem selectedValue) {
return !selectedValue.getSubItems().isEmpty();
}
@Override
public PopupStep onChosen(AttachItem selectedValue, boolean finalChoice) {
addToHistory(myProject, selectedValue);
selectedValue.startDebugSession(myProject);
return FINAL_CHOICE;
}
}
private static class ProcessListStep extends MyBasePopupStep implements ListPopupStepEx<AttachItem> {
public ProcessListStep(@NotNull List<AttachItem> items, @NotNull Project project) {
super(project, XDebuggerBundle.message("xdebugger.attach.toLocal.popup.adText", ""), items);
}
@Nullable
@Override
public ListSeparator getSeparatorAbove(AttachItem value) {
String separatorTitle = value.getSeparatorTitle();
return separatorTitle == null ? null : new ListSeparator(separatorTitle);
}
@Override
public Icon getIconFor(AttachItem value) {
return value.getIcon(myProject);
}
@NotNull
@Override
public String getTextFor(AttachItem value) {
return value.getText(myProject);
}
@Nullable
@Override
public String getTooltipTextFor(AttachItem value) {
return null;
}
@Override
public void setEmptyText(@NotNull StatusText emptyText) {
emptyText.setText(XDebuggerBundle.message("xdebugger.attach.toLocal.popup.emptyText"));
}
@Override
public PopupStep onChosen(AttachItem selectedValue, boolean finalChoice) {
if (finalChoice) {
return super.onChosen(selectedValue, true);
}
return new DebuggerListStep(selectedValue.getSubItems(), selectedValue.mySelectedDebugger);
}
@Override
public PopupStep onChosen(AttachItem selectedValue,
boolean finalChoice,
@MagicConstant(flagsFromClass = InputEvent.class) int eventModifiers) {
return onChosen(selectedValue, finalChoice);
}
private class DebuggerListStep extends MyBasePopupStep {
public DebuggerListStep(List<AttachItem> items, int selectedItem) {
super(ProcessListStep.this.myProject,
XDebuggerBundle.message("xdebugger.attach.toLocal.popup.selectDebugger.title"), items);
setDefaultOptionIndex(selectedItem);
}
@NotNull
@Override
public String getTextFor(AttachItem value) {
return value.getSelectedDebugger().getDebuggerDisplayName();
}
}
}
}
| |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package net.opengis.gml.provider;
import java.util.Collection;
import java.util.List;
import net.opengis.citygml.building.provider.CityGMLEditPlugin;
import net.opengis.gml.GmlFactory;
import net.opengis.gml.GmlPackage;
import net.opengis.gml.LinearCSRefType;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.util.ResourceLocator;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
import org.eclipse.emf.edit.provider.ItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.ItemProviderAdapter;
import org.eclipse.emf.edit.provider.ViewerNotification;
import org.w3._1999.xlink.ActuateType;
/**
* This is the item provider adapter for a {@link net.opengis.gml.LinearCSRefType} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class LinearCSRefTypeItemProvider
extends ItemProviderAdapter
implements
IEditingDomainItemProvider,
IStructuredItemContentProvider,
ITreeItemContentProvider,
IItemLabelProvider,
IItemPropertySource {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public LinearCSRefTypeItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors == null) {
super.getPropertyDescriptors(object);
addActuatePropertyDescriptor(object);
addArcrolePropertyDescriptor(object);
addHrefPropertyDescriptor(object);
addRemoteSchemaPropertyDescriptor(object);
addRolePropertyDescriptor(object);
addShowPropertyDescriptor(object);
addTitlePropertyDescriptor(object);
addTypePropertyDescriptor(object);
}
return itemPropertyDescriptors;
}
/**
* This adds a property descriptor for the Actuate feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addActuatePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_LinearCSRefType_actuate_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_LinearCSRefType_actuate_feature", "_UI_LinearCSRefType_type"),
GmlPackage.eINSTANCE.getLinearCSRefType_Actuate(),
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Arcrole feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addArcrolePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_LinearCSRefType_arcrole_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_LinearCSRefType_arcrole_feature", "_UI_LinearCSRefType_type"),
GmlPackage.eINSTANCE.getLinearCSRefType_Arcrole(),
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Href feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addHrefPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_LinearCSRefType_href_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_LinearCSRefType_href_feature", "_UI_LinearCSRefType_type"),
GmlPackage.eINSTANCE.getLinearCSRefType_Href(),
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Remote Schema feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addRemoteSchemaPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_LinearCSRefType_remoteSchema_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_LinearCSRefType_remoteSchema_feature", "_UI_LinearCSRefType_type"),
GmlPackage.eINSTANCE.getLinearCSRefType_RemoteSchema(),
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Role feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addRolePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_LinearCSRefType_role_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_LinearCSRefType_role_feature", "_UI_LinearCSRefType_type"),
GmlPackage.eINSTANCE.getLinearCSRefType_Role(),
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Show feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addShowPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_LinearCSRefType_show_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_LinearCSRefType_show_feature", "_UI_LinearCSRefType_type"),
GmlPackage.eINSTANCE.getLinearCSRefType_Show(),
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Title feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addTitlePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_LinearCSRefType_title_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_LinearCSRefType_title_feature", "_UI_LinearCSRefType_type"),
GmlPackage.eINSTANCE.getLinearCSRefType_Title(),
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Type feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addTypePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_LinearCSRefType_type_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_LinearCSRefType_type_feature", "_UI_LinearCSRefType_type"),
GmlPackage.eINSTANCE.getLinearCSRefType_Type(),
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
* {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) {
if (childrenFeatures == null) {
super.getChildrenFeatures(object);
childrenFeatures.add(GmlPackage.eINSTANCE.getLinearCSRefType_LinearCS());
}
return childrenFeatures;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EStructuralFeature getChildFeature(Object object, Object child) {
// Check the type of the specified child object and return the proper feature to use for
// adding (see {@link AddCommand}) it as a child.
return super.getChildFeature(object, child);
}
/**
* This returns LinearCSRefType.gif.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object getImage(Object object) {
return overlayImage(object, getResourceLocator().getImage("full/obj16/LinearCSRefType"));
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getText(Object object) {
ActuateType labelValue = ((LinearCSRefType)object).getActuate();
String label = labelValue == null ? null : labelValue.toString();
return label == null || label.length() == 0 ?
getString("_UI_LinearCSRefType_type") :
getString("_UI_LinearCSRefType_type") + " " + label;
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
switch (notification.getFeatureID(LinearCSRefType.class)) {
case GmlPackage.LINEAR_CS_REF_TYPE__ACTUATE:
case GmlPackage.LINEAR_CS_REF_TYPE__ARCROLE:
case GmlPackage.LINEAR_CS_REF_TYPE__HREF:
case GmlPackage.LINEAR_CS_REF_TYPE__REMOTE_SCHEMA:
case GmlPackage.LINEAR_CS_REF_TYPE__ROLE:
case GmlPackage.LINEAR_CS_REF_TYPE__SHOW:
case GmlPackage.LINEAR_CS_REF_TYPE__TITLE:
case GmlPackage.LINEAR_CS_REF_TYPE__TYPE:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true));
return;
case GmlPackage.LINEAR_CS_REF_TYPE__LINEAR_CS:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false));
return;
}
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
newChildDescriptors.add
(createChildParameter
(GmlPackage.eINSTANCE.getLinearCSRefType_LinearCS(),
GmlFactory.eINSTANCE.createLinearCSType()));
}
/**
* Return the resource locator for this item provider's resources.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public ResourceLocator getResourceLocator() {
return CityGMLEditPlugin.INSTANCE;
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.keymap.impl;
import com.intellij.ide.WelcomeWizardUtil;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.components.*;
import com.intellij.openapi.keymap.Keymap;
import com.intellij.openapi.keymap.KeymapManagerListener;
import com.intellij.openapi.keymap.ex.KeymapManagerEx;
import com.intellij.openapi.options.BaseSchemeProcessor;
import com.intellij.openapi.options.SchemesManager;
import com.intellij.openapi.options.SchemesManagerFactory;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.InvalidDataException;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.util.containers.ContainerUtil;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.event.KeyEvent;
import java.util.*;
@State(
name = "KeymapManager",
storages = @Storage(file = StoragePathMacros.APP_CONFIG + "/keymap.xml", roamingType = RoamingType.PER_PLATFORM),
additionalExportFile = KeymapManagerImpl.KEYMAPS_DIR_PATH
)
public class KeymapManagerImpl extends KeymapManagerEx implements PersistentStateComponent<Element>, ApplicationComponent {
static final String KEYMAPS_DIR_PATH = StoragePathMacros.ROOT_CONFIG + "/keymaps";
private final List<KeymapManagerListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private String myActiveKeymapName;
private final Map<String, String> myBoundShortcuts = new HashMap<String, String>();
@NonNls private static final String ACTIVE_KEYMAP = "active_keymap";
@NonNls private static final String NAME_ATTRIBUTE = "name";
private final SchemesManager<Keymap, KeymapImpl> mySchemesManager;
public static boolean ourKeymapManagerInitialized = false;
KeymapManagerImpl(DefaultKeymap defaultKeymap, SchemesManagerFactory factory) {
mySchemesManager = factory.createSchemesManager(KEYMAPS_DIR_PATH,
new BaseSchemeProcessor<KeymapImpl>() {
@NotNull
@Override
public KeymapImpl readScheme(@NotNull Element element) throws InvalidDataException {
KeymapImpl keymap = new KeymapImpl();
keymap.readExternal(element, getAllIncludingDefaultsKeymaps());
return keymap;
}
@Override
public Element writeScheme(@NotNull final KeymapImpl scheme) {
return scheme.writeExternal();
}
@NotNull
@Override
public State getState(@NotNull KeymapImpl scheme) {
return scheme.canModify() ? State.POSSIBLY_CHANGED : State.NON_PERSISTENT;
}
},
RoamingType.PER_USER);
Keymap[] keymaps = defaultKeymap.getKeymaps();
String systemDefaultKeymap = WelcomeWizardUtil.getWizardMacKeymap() != null
? WelcomeWizardUtil.getWizardMacKeymap()
: defaultKeymap.getDefaultKeymapName();
for (Keymap keymap : keymaps) {
addKeymap(keymap);
if (keymap.getName().equals(systemDefaultKeymap)) {
setActiveKeymap(keymap);
}
}
mySchemesManager.loadSchemes();
if (Registry.is("editor.add.carets.on.double.control.arrows")) {
int modifierKeyCode = SystemInfo.isMac ? KeyEvent.VK_ALT : KeyEvent.VK_CONTROL;
ModifierKeyDoubleClickHandler.getInstance().registerAction(IdeActions.ACTION_EDITOR_CLONE_CARET_ABOVE, modifierKeyCode, KeyEvent.VK_UP);
ModifierKeyDoubleClickHandler.getInstance().registerAction(IdeActions.ACTION_EDITOR_CLONE_CARET_BELOW, modifierKeyCode, KeyEvent.VK_DOWN);
ModifierKeyDoubleClickHandler.getInstance().registerAction(IdeActions.ACTION_EDITOR_MOVE_CARET_LEFT_WITH_SELECTION, modifierKeyCode, KeyEvent.VK_LEFT);
ModifierKeyDoubleClickHandler.getInstance().registerAction(IdeActions.ACTION_EDITOR_MOVE_CARET_RIGHT_WITH_SELECTION, modifierKeyCode, KeyEvent.VK_RIGHT);
ModifierKeyDoubleClickHandler.getInstance().registerAction(IdeActions.ACTION_EDITOR_MOVE_LINE_START_WITH_SELECTION, modifierKeyCode, KeyEvent.VK_HOME);
ModifierKeyDoubleClickHandler.getInstance().registerAction(IdeActions.ACTION_EDITOR_MOVE_LINE_END_WITH_SELECTION, modifierKeyCode, KeyEvent.VK_END);
}
//noinspection AssignmentToStaticFieldFromInstanceMethod
ourKeymapManagerInitialized = true;
}
@Override
public Keymap[] getAllKeymaps() {
List<Keymap> answer = new ArrayList<Keymap>();
for (Keymap keymap : mySchemesManager.getAllSchemes()) {
if (!keymap.getPresentableName().startsWith("$")) {
answer.add(keymap);
}
}
return answer.toArray(new Keymap[answer.size()]);
}
public Keymap[] getAllIncludingDefaultsKeymaps() {
Collection<Keymap> keymaps = mySchemesManager.getAllSchemes();
return keymaps.toArray(new Keymap[keymaps.size()]);
}
@Override
@Nullable
public Keymap getKeymap(@NotNull String name) {
return mySchemesManager.findSchemeByName(name);
}
@Override
public Keymap getActiveKeymap() {
return mySchemesManager.getCurrentScheme();
}
@Override
public void setActiveKeymap(Keymap activeKeymap) {
mySchemesManager.setCurrentSchemeName(activeKeymap == null ? null : activeKeymap.getName());
fireActiveKeymapChanged();
}
@Override
public void bindShortcuts(String sourceActionId, String targetActionId) {
myBoundShortcuts.put(targetActionId, sourceActionId);
}
@Override
public void unbindShortcuts(String targetActionId) {
myBoundShortcuts.remove(targetActionId);
}
@Override
public Set<String> getBoundActions() {
return myBoundShortcuts.keySet();
}
@Override
public String getActionBinding(String actionId) {
Set<String> visited = null;
String id = actionId, next;
while ((next = myBoundShortcuts.get(id)) != null) {
if (visited == null) visited = ContainerUtil.newHashSet();
if (!visited.add(id = next)) break;
}
return Comparing.equal(id, actionId) ? null : id;
}
@Override
public SchemesManager<Keymap, KeymapImpl> getSchemesManager() {
return mySchemesManager;
}
public void addKeymap(Keymap keymap) {
mySchemesManager.addNewScheme(keymap, true);
}
public void removeAllKeymapsExceptUnmodifiable() {
List<Keymap> schemes = mySchemesManager.getAllSchemes();
for (int i = schemes.size() - 1; i >= 0; i--) {
Keymap keymap = schemes.get(i);
if (keymap.canModify()) {
mySchemesManager.removeScheme(keymap);
}
}
mySchemesManager.setCurrentSchemeName(null);
Collection<Keymap> keymaps = mySchemesManager.getAllSchemes();
if (!keymaps.isEmpty()) {
mySchemesManager.setCurrentSchemeName(keymaps.iterator().next().getName());
}
}
@Override
public Element getState() {
Element result = new Element("component");
if (mySchemesManager.getCurrentScheme() != null) {
Element e = new Element(ACTIVE_KEYMAP);
Keymap currentScheme = mySchemesManager.getCurrentScheme();
if (currentScheme != null) {
e.setAttribute(NAME_ATTRIBUTE, currentScheme.getName());
}
result.addContent(e);
}
return result;
}
@Override
public void loadState(final Element state) {
Element child = state.getChild(ACTIVE_KEYMAP);
if (child != null) {
myActiveKeymapName = child.getAttributeValue(NAME_ATTRIBUTE);
}
if (myActiveKeymapName != null) {
Keymap keymap = getKeymap(myActiveKeymapName);
if (keymap != null) {
setActiveKeymap(keymap);
}
}
}
private void fireActiveKeymapChanged() {
for (KeymapManagerListener listener : myListeners) {
listener.activeKeymapChanged(mySchemesManager.getCurrentScheme());
}
}
@Override
public void addKeymapManagerListener(@NotNull KeymapManagerListener listener) {
pollQueue();
myListeners.add(listener);
}
private void pollQueue() {
// assume it is safe to remove elements during iteration, as is the case with the COWAL
for (KeymapManagerListener listener : myListeners) {
if (listener instanceof WeakKeymapManagerListener && ((WeakKeymapManagerListener)listener).isDead()) {
myListeners.remove(listener);
}
}
}
@Override
public void removeKeymapManagerListener(@NotNull KeymapManagerListener listener) {
pollQueue();
myListeners.remove(listener);
}
@Override
public void addWeakListener(@NotNull KeymapManagerListener listener) {
addKeymapManagerListener(new WeakKeymapManagerListener(this, listener));
}
@Override
public void removeWeakListener(@NotNull KeymapManagerListener listenerToRemove) {
// assume it is safe to remove elements during iteration, as is the case with the COWAL
for (KeymapManagerListener listener : myListeners) {
if (listener instanceof WeakKeymapManagerListener && ((WeakKeymapManagerListener)listener).isWrapped(listenerToRemove)) {
myListeners.remove(listener);
}
}
}
@Override
@NotNull
public String getComponentName() {
return "KeymapManager";
}
@Override
public void initComponent() {
}
@Override
public void disposeComponent() {
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package com.intellij.codeInspection.ex;
import com.intellij.codeHighlighting.HighlightDisplayLevel;
import com.intellij.codeInsight.daemon.HighlightDisplayKey;
import com.intellij.codeInspection.InspectionEP;
import com.intellij.codeInspection.InspectionProfileEntry;
import com.intellij.configurationStore.SchemeDataHolder;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.options.SchemeState;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.profile.codeInspection.BaseInspectionProfileManager;
import com.intellij.profile.codeInspection.InspectionProfileManager;
import com.intellij.profile.codeInspection.ProjectInspectionProfileManager;
import com.intellij.project.ProjectKt;
import com.intellij.psi.PsiElement;
import com.intellij.psi.search.scope.packageSet.NamedScope;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Consumer;
import com.intellij.util.containers.NotNullList;
import com.intellij.util.graph.DFSTBuilder;
import com.intellij.util.graph.GraphGenerator;
import com.intellij.util.graph.InboundSemiGraph;
import com.intellij.util.xmlb.annotations.Attribute;
import com.intellij.util.xmlb.annotations.Tag;
import com.intellij.util.xmlb.annotations.Transient;
import gnu.trove.THashMap;
import gnu.trove.THashSet;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.util.*;
import java.util.function.Supplier;
public class InspectionProfileImpl extends NewInspectionProfile {
@NonNls static final String INSPECTION_TOOL_TAG = "inspection_tool";
@NonNls static final String CLASS_TAG = "class";
protected static final Logger LOG = Logger.getInstance(InspectionProfileImpl.class);
@NonNls private static final String VALID_VERSION = "1.0";
@NonNls private static final String VERSION_TAG = "version";
@NonNls private static final String USED_LEVELS = "used_levels";
@TestOnly
public static boolean INIT_INSPECTIONS;
@NotNull protected final Supplier<List<InspectionToolWrapper>> myToolSupplier;
protected final Map<String, Element> myUninitializedSettings = new TreeMap<>(); // accessed in EDT
protected Map<String, ToolsImpl> myTools = new THashMap<>();
protected volatile Set<String> myChangedToolNames;
@Attribute("is_locked")
protected boolean myLockedProfile;
protected final InspectionProfileImpl myBaseProfile;
private volatile String myToolShortName;
private String[] myScopesOrder;
private String myDescription;
private SchemeDataHolder<? super InspectionProfileImpl> myDataHolder;
public InspectionProfileImpl(@NotNull String profileName,
@NotNull Supplier<List<InspectionToolWrapper>> toolSupplier,
@NotNull BaseInspectionProfileManager profileManager) {
this(profileName, toolSupplier, profileManager, InspectionProfileKt.getBASE_PROFILE(), null);
}
public InspectionProfileImpl(@NotNull String profileName) {
this(profileName, InspectionToolRegistrar.getInstance(), (BaseInspectionProfileManager)InspectionProfileManager.getInstance(), null, null);
}
public InspectionProfileImpl(@NotNull String profileName,
@NotNull Supplier<List<InspectionToolWrapper>> toolSupplier,
@Nullable InspectionProfileImpl baseProfile) {
this(profileName, toolSupplier, (BaseInspectionProfileManager)InspectionProfileManager.getInstance(), baseProfile, null);
}
protected InspectionProfileImpl(@NotNull String profileName,
@NotNull Supplier<List<InspectionToolWrapper>> toolSupplier,
@NotNull BaseInspectionProfileManager profileManager,
@Nullable InspectionProfileImpl baseProfile,
@Nullable SchemeDataHolder<? super InspectionProfileImpl> dataHolder) {
super(profileName, profileManager);
myToolSupplier = toolSupplier;
myBaseProfile = baseProfile;
myDataHolder = dataHolder;
if (dataHolder != null) {
schemeState = SchemeState.UNCHANGED;
}
}
public InspectionProfileImpl(@NotNull String profileName,
@NotNull Supplier<List<InspectionToolWrapper>> toolSupplier,
@NotNull BaseInspectionProfileManager profileManager,
@Nullable SchemeDataHolder<? super InspectionProfileImpl> dataHolder) {
this(profileName, toolSupplier, profileManager, InspectionProfileKt.getBASE_PROFILE(), dataHolder);
}
private static boolean toolSettingsAreEqual(@NotNull String toolName, @NotNull InspectionProfileImpl profile1, @NotNull InspectionProfileImpl profile2) {
final Tools toolList1 = profile1.myTools.get(toolName);
final Tools toolList2 = profile2.myTools.get(toolName);
return Comparing.equal(toolList1, toolList2);
}
@NotNull
protected static InspectionToolWrapper copyToolSettings(@NotNull InspectionToolWrapper toolWrapper) {
final InspectionToolWrapper inspectionTool = toolWrapper.createCopy();
if (toolWrapper.isInitialized()) {
Element config = new Element("config");
ScopeToolState.tryWriteSettings(toolWrapper.getTool(), config);
ScopeToolState.tryReadSettings(inspectionTool.getTool(), config);
}
return inspectionTool;
}
@Override
public HighlightDisplayLevel getErrorLevel(@NotNull HighlightDisplayKey inspectionToolKey, PsiElement element) {
Project project = element == null ? null : element.getProject();
final ToolsImpl tools = getToolsOrNull(inspectionToolKey.toString(), project);
HighlightDisplayLevel level = tools != null ? tools.getLevel(element) : HighlightDisplayLevel.WARNING;
if (!getProfileManager().getSeverityRegistrar().isSeverityValid(level.getSeverity().getName())) {
level = HighlightDisplayLevel.WARNING;
setErrorLevel(inspectionToolKey, level, project);
}
return level;
}
@Override
public void readExternal(@NotNull Element element) {
mySerializer.readExternal(this, element);
final Element highlightElement = element.getChild(USED_LEVELS);
if (highlightElement != null) {
// from old profiles
getProfileManager().getSeverityRegistrar().readExternal(highlightElement);
}
String version = element.getAttributeValue(VERSION_TAG);
if (!VALID_VERSION.equals(version)) {
InspectionToolWrapper[] tools = getInspectionTools(null);
for (Element toolElement : element.getChildren("inspection_tool")) {
String toolClassName = toolElement.getAttributeValue(CLASS_TAG);
String shortName = convertToShortName(toolClassName, tools);
if (shortName == null) {
continue;
}
toolElement.setAttribute(CLASS_TAG, shortName);
myUninitializedSettings.put(shortName, JDOMUtil.internElement(toolElement));
}
}
else {
List<Element> children = element.getChildren(INSPECTION_TOOL_TAG);
for (Element toolElement : children) {
myUninitializedSettings.put(toolElement.getAttributeValue(CLASS_TAG), JDOMUtil.internElement(toolElement));
}
}
}
@Nullable
private static String convertToShortName(@Nullable String displayName, InspectionToolWrapper[] tools) {
if (displayName == null) return null;
for (InspectionToolWrapper tool : tools) {
if (displayName.equals(tool.getDisplayName())) {
return tool.getShortName();
}
}
return null;
}
@NotNull
public Set<HighlightSeverity> getUsedSeverities() {
LOG.assertTrue(wasInitialized());
Set<HighlightSeverity> result = new THashSet<>();
for (Tools tools : myTools.values()) {
for (ScopeToolState state : tools.getTools()) {
result.add(state.getLevel().getSeverity());
}
}
return result;
}
@Override
@NotNull
public Element writeScheme() {
return writeScheme(true);
}
@NotNull
public Element writeScheme(boolean setSchemeStateToUnchanged) {
if (myDataHolder != null) {
return myDataHolder.read();
}
Element element = new Element(PROFILE);
writeExternal(element);
if (isProjectLevel()) {
element.setAttribute("version", "1.0");
}
if (isProjectLevel() && ProjectKt.isDirectoryBased(((ProjectInspectionProfileManager)getProfileManager()).getProject())) {
return new Element("component").setAttribute("name", "InspectionProjectProfileManager").addContent(element);
}
if (setSchemeStateToUnchanged) {
schemeState = SchemeState.UNCHANGED;
}
return element;
}
public void writeExternal(@NotNull Element element) {
// must be first - compatibility
writeVersion(element);
mySerializer.writeExternal(this, element);
synchronized (myLock) {
if (!wasInitialized()) {
for (Element el : myUninitializedSettings.values()) {
element.addContent(el.clone());
}
return;
}
}
Set<String> changedToolNames = getChangedToolNames();
if (changedToolNames == null) {
return;
}
List<String> allToolNames = new ArrayList<>(myTools.keySet());
allToolNames.addAll(myUninitializedSettings.keySet());
allToolNames.sort(null);
for (String toolName : allToolNames) {
Element toolElement = myUninitializedSettings.get(toolName);
if (toolElement != null) {
element.addContent(toolElement.clone());
continue;
}
if (!myLockedProfile && !changedToolNames.contains(toolName)) {
markSettingsMerged(toolName, element);
continue;
}
ToolsImpl toolList = myTools.get(toolName);
LOG.assertTrue(toolList != null);
Element inspectionElement = new Element(INSPECTION_TOOL_TAG);
inspectionElement.setAttribute(CLASS_TAG, toolName);
try {
toolList.writeExternal(inspectionElement);
}
catch (WriteExternalException e) {
LOG.error(e);
continue;
}
if (!areSettingsMerged(toolName, inspectionElement)) {
element.addContent(inspectionElement);
}
}
getPathMacroManager().collapsePaths(element);
}
protected static void writeVersion(@NotNull Element element) {
element.setAttribute(VERSION_TAG, VALID_VERSION);
}
private void markSettingsMerged(@NotNull String toolName, @NotNull Element element) {
//add marker if already merged but result is now default (-> empty node)
String mergedName = InspectionElementsMergerBase.getMergedMarkerName(toolName);
if (!myUninitializedSettings.containsKey(mergedName)) {
InspectionElementsMergerBase merger = getMerger(toolName);
if (merger != null && merger.markSettingsMerged(myUninitializedSettings)) {
element.addContent(new Element(INSPECTION_TOOL_TAG).setAttribute(CLASS_TAG, mergedName));
}
}
}
private boolean areSettingsMerged(String toolName, Element inspectionElement) {
//skip merged settings as they could be restored from already provided data
final InspectionElementsMergerBase merger = getMerger(toolName);
return merger != null && merger.areSettingsMerged(myUninitializedSettings, inspectionElement);
}
public void collectDependentInspections(@NotNull InspectionToolWrapper toolWrapper,
@NotNull Set<InspectionToolWrapper<?, ?>> dependentEntries,
Project project) {
String mainToolId = toolWrapper.getMainToolId();
if (mainToolId != null) {
InspectionToolWrapper dependentEntryWrapper = getInspectionTool(mainToolId, project);
if (dependentEntryWrapper == null) {
LOG.error("Can't find main tool: '" + mainToolId+"' which was specified in "+toolWrapper);
return;
}
if (!dependentEntries.add(dependentEntryWrapper)) {
collectDependentInspections(dependentEntryWrapper, dependentEntries, project);
}
}
}
@Override
@Nullable
public InspectionToolWrapper getInspectionTool(@NotNull String shortName, @Nullable PsiElement element) {
final Tools toolList = getToolsOrNull(shortName, element == null ? null : element.getProject());
return toolList == null ? null : toolList.getInspectionTool(element);
}
@Nullable
@Override
public InspectionProfileEntry getUnwrappedTool(@NotNull String shortName, @NotNull PsiElement element) {
InspectionToolWrapper tool = getInspectionTool(shortName, element);
return tool == null ? null : tool.getTool();
}
@Override
public <T extends InspectionProfileEntry> T getUnwrappedTool(@NotNull Key<T> shortNameKey, @NotNull PsiElement element) {
//noinspection unchecked
return (T) getUnwrappedTool(shortNameKey.toString(), element);
}
public void modifyProfile(@NotNull Consumer<InspectionProfileModifiableModel> modelConsumer) {
InspectionProfileModifiableModelKt.edit(this, it -> {
modelConsumer.consume(it);
return null;
});
}
@Override
public <T extends InspectionProfileEntry> void modifyToolSettings(@NotNull final Key<T> shortNameKey,
@NotNull final PsiElement psiElement,
@NotNull final Consumer<T> toolConsumer) {
modifyProfile(model -> {
InspectionProfileEntry tool = model.getUnwrappedTool(shortNameKey.toString(), psiElement);
//noinspection unchecked
toolConsumer.consume((T) tool);
});
}
/**
* Warning: Usage of this method is discouraged as if separate tool options are defined for different scopes, it just returns
* the options for the first scope which may lead to unexpected results. Consider using {@link #getInspectionTool(String, PsiElement)} instead.
*
* @param shortName an inspection short name
* @param project a project
* @return an InspectionToolWrapper associated with this tool.
*/
@Override
@Nullable
public InspectionToolWrapper getInspectionTool(@NotNull String shortName, Project project) {
final ToolsImpl tools = getToolsOrNull(shortName, project);
return tools != null ? tools.getTool() : null;
}
public InspectionToolWrapper getToolById(@NotNull String id, @NotNull PsiElement element) {
initInspectionTools(element.getProject());
for (Tools toolList : myTools.values()) {
final InspectionToolWrapper tool = toolList.getInspectionTool(element);
if (id.equals(tool.getID())) return tool;
}
return null;
}
@Nullable
public List<InspectionToolWrapper> findToolsById(@NotNull String id, @NotNull PsiElement element) {
List<InspectionToolWrapper> result = null;
initInspectionTools(element.getProject());
for (Tools toolList : myTools.values()) {
final InspectionToolWrapper tool = toolList.getInspectionTool(element);
if (id.equals(tool.getID())) {
if (result == null) {
result = new ArrayList<>();
}
result.add(tool);
}
}
return result;
}
@Nullable
@Override
public String getSingleTool() {
return myToolShortName;
}
public void setSingleTool(@NotNull final String toolShortName) {
myToolShortName = toolShortName;
}
@Override
@NotNull
public String getDisplayName() {
return getName();
}
public void scopesChanged() {
if (!wasInitialized()) {
return;
}
for (ToolsImpl tools : myTools.values()) {
tools.scopesChanged();
}
getProfileManager().fireProfileChanged(this);
}
@Transient
public boolean isProfileLocked() {
return myLockedProfile;
}
public void lockProfile(boolean isLocked) {
myLockedProfile = isLocked;
schemeState = SchemeState.POSSIBLY_CHANGED;
}
@Override
@NotNull
public InspectionToolWrapper[] getInspectionTools(@Nullable PsiElement element) {
initInspectionTools(element == null ? null : element.getProject());
List<InspectionToolWrapper> result = new ArrayList<>();
for (Tools toolList : myTools.values()) {
result.add(toolList.getInspectionTool(element));
}
return result.toArray(InspectionToolWrapper.EMPTY_ARRAY);
}
@Override
@NotNull
public List<Tools> getAllEnabledInspectionTools(Project project) {
initInspectionTools(project);
List<Tools> result = new ArrayList<>();
for (final ToolsImpl toolList : myTools.values()) {
if (toolList.isEnabled()) {
result.add(toolList);
}
}
return result;
}
public void disableToolByDefault(@NotNull Collection<String> toolShortNames, @Nullable Project project) {
for (String toolId : toolShortNames) {
getTools(toolId, project).setDefaultEnabled(false);
}
schemeState = SchemeState.POSSIBLY_CHANGED;
}
@NotNull
public ScopeToolState getToolDefaultState(@NotNull String toolShortName, @Nullable Project project) {
return getTools(toolShortName, project).getDefaultState();
}
public void enableToolsByDefault(@NotNull List<String> toolShortNames, Project project) {
for (final String shortName : toolShortNames) {
getTools(shortName, project).setDefaultEnabled(true);
}
schemeState = SchemeState.POSSIBLY_CHANGED;
}
@NotNull
protected List<InspectionToolWrapper> createTools(@Nullable Project project) {
return myToolSupplier.get();
}
@Override
protected void initialize(@Nullable Project project) {
SchemeDataHolder<? super InspectionProfileImpl> dataHolder = myDataHolder;
if (dataHolder != null) {
myDataHolder = null;
Element element = dataHolder.read();
if (element.getName().equals("component")) {
element = element.getChild("profile");
}
assert element != null;
readExternal(element);
}
if (myBaseProfile != null) {
myBaseProfile.initInspectionTools(project);
}
final List<InspectionToolWrapper> tools;
try {
tools = createTools(project);
}
catch (ProcessCanceledException ignored) {
return;
}
final Map<String, List<String>> dependencies = new THashMap<>();
for (InspectionToolWrapper toolWrapper : tools) {
addTool(project, toolWrapper, dependencies);
}
DFSTBuilder<String> builder = new DFSTBuilder<>(GraphGenerator.generate(new InboundSemiGraph<String>() {
@NotNull
@Override
public Collection<String> getNodes() {
return dependencies.keySet();
}
@NotNull
@Override
public Iterator<String> getIn(String n) {
return dependencies.get(n).iterator();
}
}));
if (builder.isAcyclic()) {
myScopesOrder = ArrayUtil.toStringArray(builder.getSortedNodes());
}
copyToolsConfigurations(project);
initialized = true;
if (dataHolder != null) {
// should be only after set myInitialized
dataHolder.updateDigest(this);
}
}
protected void copyToolsConfigurations(@Nullable Project project) {
}
public void addTool(@Nullable Project project, @NotNull InspectionToolWrapper toolWrapper, @NotNull Map<String, List<String>> dependencies) {
final String shortName = toolWrapper.getShortName();
HighlightDisplayKey key = HighlightDisplayKey.find(shortName);
if (key == null) {
final InspectionEP extension = toolWrapper.getExtension();
Computable<String> computable = extension == null ? new Computable.PredefinedValueComputable<>(toolWrapper.getDisplayName()) : extension::getDisplayName;
if (toolWrapper instanceof LocalInspectionToolWrapper) {
key = HighlightDisplayKey.register(shortName, computable, toolWrapper.getID(),
((LocalInspectionToolWrapper)toolWrapper).getAlternativeID());
}
else {
key = HighlightDisplayKey.register(shortName, computable);
}
}
if (key == null) {
LOG.error(shortName + " ; number of initialized tools: " + myTools.size());
return;
}
HighlightDisplayLevel baseLevel = myBaseProfile != null && myBaseProfile.getToolsOrNull(shortName, project) != null
? myBaseProfile.getErrorLevel(key, project)
: HighlightDisplayLevel.DO_NOT_SHOW;
HighlightDisplayLevel defaultLevel = toolWrapper.getDefaultLevel();
HighlightDisplayLevel level = baseLevel.getSeverity().compareTo(defaultLevel.getSeverity()) > 0 ? baseLevel : defaultLevel;
boolean enabled = myBaseProfile != null ? myBaseProfile.isToolEnabled(key) : toolWrapper.isEnabledByDefault();
final ToolsImpl toolsList = new ToolsImpl(toolWrapper, level, !myLockedProfile && enabled, enabled);
Element element = myUninitializedSettings.remove(shortName);
try {
if (element != null) {
element = element.clone();
getPathMacroManager().expandPaths(element);
toolsList.readExternal(element, getProfileManager(), dependencies);
}
else if (!myUninitializedSettings.containsKey(InspectionElementsMergerBase.getMergedMarkerName(shortName))) {
final InspectionElementsMergerBase merger = getMerger(shortName);
Element merged = merger == null ? null : merger.merge(myUninitializedSettings);
if (merged != null) {
getPathMacroManager().expandPaths(merged);
toolsList.readExternal(merged, getProfileManager(), dependencies);
}
else if (isProfileLocked()) {
// https://youtrack.jetbrains.com/issue/IDEA-158936
toolsList.setEnabled(false);
if (toolsList.getNonDefaultTools() == null) {
toolsList.getDefaultState().setEnabled(false);
}
}
}
}
catch (InvalidDataException e) {
LOG.error("Can't read settings for " + toolWrapper, e);
}
myTools.put(shortName, toolsList);
}
@Nullable
private static InspectionElementsMergerBase getMerger(String shortName) {
final InspectionElementsMerger merger = InspectionElementsMerger.getMerger(shortName);
if (merger instanceof InspectionElementsMergerBase) {
return (InspectionElementsMergerBase)merger;
}
return merger != null ? new InspectionElementsMergerBase() {
@NotNull
@Override
public String getMergedToolName() {
return merger.getMergedToolName();
}
@NotNull
@Override
public String[] getSourceToolNames() {
return merger.getSourceToolNames();
}
} : null;
}
@Nullable
@Transient
public String[] getScopesOrder() {
return myScopesOrder;
}
public void setScopesOrder(String[] scopesOrder) {
myScopesOrder = scopesOrder;
schemeState = SchemeState.POSSIBLY_CHANGED;
}
private HighlightDisplayLevel getErrorLevel(@NotNull HighlightDisplayKey key, @Nullable Project project) {
return getTools(key.toString(), project).getLevel();
}
@NotNull
@TestOnly
public InspectionProfileModifiableModel getModifiableModel() {
return new InspectionProfileModifiableModel(this);
}
public void cleanup(@NotNull Project project) {
if (!wasInitialized()) {
return;
}
for (ToolsImpl toolList : myTools.values()) {
if (toolList.isEnabled()) {
toolList.cleanupTools(project);
}
}
}
public void enableTool(@NotNull String toolShortName, @Nullable Project project) {
setToolEnabled(toolShortName, true, project);
}
public void enableTool(@NotNull String inspectionTool, @NotNull NamedScope namedScope, Project project) {
getTools(inspectionTool, project).enableTool(namedScope, project);
schemeState = SchemeState.POSSIBLY_CHANGED;
}
public void enableTools(@NotNull List<String> inspectionTools, @NotNull NamedScope namedScope, Project project) {
for (String inspectionTool : inspectionTools) {
enableTool(inspectionTool, namedScope, project);
}
}
public void disableTools(@NotNull List<String> inspectionTools, NamedScope namedScope, @NotNull Project project) {
for (String inspectionTool : inspectionTools) {
getTools(inspectionTool, project).disableTool(namedScope, project);
}
schemeState = SchemeState.POSSIBLY_CHANGED;
}
public void setErrorLevel(@NotNull HighlightDisplayKey key, @NotNull HighlightDisplayLevel level, Project project) {
getTools(key.toString(), project).setLevel(level);
schemeState = SchemeState.POSSIBLY_CHANGED;
}
@Override
public boolean isToolEnabled(@Nullable HighlightDisplayKey key, @Nullable PsiElement element) {
if (key == null) {
return false;
}
final Tools toolState = getToolsOrNull(key.toString(), element == null ? null : element.getProject());
return toolState != null && toolState.isEnabled(element);
}
@Override
public boolean isExecutable(@Nullable Project project) {
initInspectionTools(project);
for (Tools tools : myTools.values()) {
if (tools.isEnabled()) return true;
}
return false;
}
@Tag
public String getDescription() {
return myDescription;
}
public void setDescription(@Nullable String description) {
myDescription = StringUtil.nullize(description);
schemeState = SchemeState.POSSIBLY_CHANGED;
}
public void convert(@NotNull Element element, @NotNull Project project) {
final Element scopes = element.getChild("scopes");
if (scopes == null) {
return;
}
initInspectionTools(project);
for (Element scopeElement : scopes.getChildren(SCOPE)) {
final String profile = scopeElement.getAttributeValue(PROFILE);
InspectionProfileImpl inspectionProfile = profile == null ? null : getProfileManager().getProfile(profile);
NamedScope scope = inspectionProfile == null ? null : getProfileManager().getScopesManager().getScope(scopeElement.getAttributeValue(NAME));
if (scope == null) {
continue;
}
for (InspectionToolWrapper toolWrapper : inspectionProfile.getInspectionTools(null)) {
final HighlightDisplayKey key = HighlightDisplayKey.find(toolWrapper.getShortName());
try {
InspectionToolWrapper toolWrapperCopy = copyToolSettings(toolWrapper);
HighlightDisplayLevel errorLevel = inspectionProfile.getErrorLevel(key, null, project);
getTools(toolWrapper.getShortName(), project)
.addTool(scope, toolWrapperCopy, inspectionProfile.isToolEnabled(key), errorLevel);
}
catch (Exception e) {
LOG.error(e);
}
}
}
reduceConvertedScopes();
}
private void reduceConvertedScopes() {
for (ToolsImpl tools : myTools.values()) {
final ScopeToolState toolState = tools.getDefaultState();
final List<ScopeToolState> nonDefaultTools = tools.getNonDefaultTools();
if (nonDefaultTools != null) {
boolean equal = true;
boolean isEnabled = toolState.isEnabled();
for (ScopeToolState state : nonDefaultTools) {
isEnabled |= state.isEnabled();
if (!state.equalTo(toolState)) {
equal = false;
}
}
tools.setEnabled(isEnabled);
if (equal) {
tools.removeAllScopes();
}
}
}
}
@NotNull
public List<ScopeToolState> getAllTools() {
initInspectionTools();
List<ScopeToolState> result = new NotNullList<>();
for (Tools tools : myTools.values()) {
tools.collectTools(result);
}
return result;
}
@NotNull
public List<ScopeToolState> getDefaultStates(@Nullable Project project) {
initInspectionTools(project);
List<ScopeToolState> result = new ArrayList<>();
for (Tools tools : myTools.values()) {
result.add(tools.getDefaultState());
}
return result;
}
@NotNull
public List<ScopeToolState> getNonDefaultTools(@NotNull String shortName, Project project) {
final List<ScopeToolState> result = new ArrayList<>();
final List<ScopeToolState> nonDefaultTools = getTools(shortName, project).getNonDefaultTools();
if (nonDefaultTools != null) {
result.addAll(nonDefaultTools);
}
return result;
}
public boolean isToolEnabled(@NotNull HighlightDisplayKey key, NamedScope namedScope, Project project) {
return getTools(key.toString(), project).isEnabled(namedScope,project);
}
public void removeScope(@NotNull String toolShortName, @NotNull String scopeName, Project project) {
getTools(toolShortName, project).removeScope(scopeName);
schemeState = SchemeState.POSSIBLY_CHANGED;
}
public void removeScopes(@NotNull List<String> shortNames, @NotNull String scopeName, Project project) {
for (final String shortName : shortNames) {
removeScope(shortName, scopeName, project);
}
}
/**
* @return null if it has no base profile
*/
@Nullable
private Set<String> getChangedToolNames() {
if (myBaseProfile == null) return null;
if (myChangedToolNames == null) {
synchronized (myLock) {
if (myChangedToolNames == null) {
initInspectionTools(null);
Set<String> names = myTools.keySet();
Set<String> map = new THashSet<>(names.size());
for (String toolId : names) {
if (!toolSettingsAreEqual(toolId, myBaseProfile, this)) {
map.add(toolId);
}
}
myChangedToolNames = map;
return map;
}
}
}
return myChangedToolNames;
}
public void profileChanged() {
myChangedToolNames = null;
schemeState = SchemeState.POSSIBLY_CHANGED;
}
@NotNull
@Transient
public HighlightDisplayLevel getErrorLevel(@NotNull HighlightDisplayKey key, NamedScope scope, Project project) {
final ToolsImpl tools = getToolsOrNull(key.toString(), project);
return tools != null ? tools.getLevel(scope, project) : HighlightDisplayLevel.WARNING;
}
public ScopeToolState addScope(@NotNull InspectionToolWrapper toolWrapper,
NamedScope scope,
@NotNull HighlightDisplayLevel level,
boolean enabled,
Project project) {
return getTools(toolWrapper.getShortName(), project).prependTool(scope, toolWrapper, enabled, level);
}
public void setErrorLevel(@NotNull HighlightDisplayKey key, @NotNull HighlightDisplayLevel level, String scopeName, Project project) {
getTools(key.toString(), project).setLevel(level, scopeName, project);
schemeState = SchemeState.POSSIBLY_CHANGED;
}
public void setErrorLevel(@NotNull List<HighlightDisplayKey> keys, @NotNull HighlightDisplayLevel level, String scopeName, Project project) {
for (HighlightDisplayKey key : keys) {
setErrorLevel(key, level, scopeName, project);
}
}
@Override
@Nullable
public ToolsImpl getToolsOrNull(@NotNull String name, @Nullable Project project) {
initInspectionTools(project);
return myTools.get(name);
}
public void enableAllTools(Project project) {
for (InspectionToolWrapper entry : getInspectionTools(null)) {
enableTool(entry.getShortName(), project);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder;
import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.debezium.DebeziumSqlserverComponent;
/**
* Represents a Debezium SQL Server endpoint which is used to capture changes in
* SQL Server database so that that applications can see those changes and
* respond to them.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface DebeziumSqlserverComponentBuilderFactory {
/**
* Debezium SQL Server Connector (camel-debezium-sqlserver)
* Represents a Debezium SQL Server endpoint which is used to capture
* changes in SQL Server database so that that applications can see those
* changes and respond to them.
*
* Category: database,sql,sqlserver
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-debezium-sqlserver
*/
static DebeziumSqlserverComponentBuilder debeziumSqlserver() {
return new DebeziumSqlserverComponentBuilderImpl();
}
/**
* Builder for the Debezium SQL Server Connector component.
*/
interface DebeziumSqlserverComponentBuilder
extends
ComponentBuilder<DebeziumSqlserverComponent> {
/**
* Additional properties for debezium components in case they can't be
* set directly on the camel configurations (e.g: setting Kafka Connect
* properties needed by Debezium engine, for example setting
* KafkaOffsetBackingStore), the properties have to be prefixed with
* additionalProperties.. E.g:
* additionalProperties.transactional.id=12345&additionalProperties.schema.registry.url=http://localhost:8811/avro.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
*
* Group: common
*/
default DebeziumSqlserverComponentBuilder additionalProperties(
java.util.Map<java.lang.String, java.lang.Object> additionalProperties) {
doSetProperty("additionalProperties", additionalProperties);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default DebeziumSqlserverComponentBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allow pre-configured Configurations to be set.
*
* The option is a:
* <code>org.apache.camel.component.debezium.configuration.SqlServerConnectorEmbeddedDebeziumConfiguration</code> type.
*
* Group: consumer
*/
default DebeziumSqlserverComponentBuilder configuration(
org.apache.camel.component.debezium.configuration.SqlServerConnectorEmbeddedDebeziumConfiguration configuration) {
doSetProperty("configuration", configuration);
return this;
}
/**
* The Converter class that should be used to serialize and deserialize
* key data for offsets. The default is JSON converter.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: org.apache.kafka.connect.json.JsonConverter
* Group: consumer
*/
default DebeziumSqlserverComponentBuilder internalKeyConverter(
java.lang.String internalKeyConverter) {
doSetProperty("internalKeyConverter", internalKeyConverter);
return this;
}
/**
* The Converter class that should be used to serialize and deserialize
* value data for offsets. The default is JSON converter.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: org.apache.kafka.connect.json.JsonConverter
* Group: consumer
*/
default DebeziumSqlserverComponentBuilder internalValueConverter(
java.lang.String internalValueConverter) {
doSetProperty("internalValueConverter", internalValueConverter);
return this;
}
/**
* The name of the Java class of the commit policy. It defines when
* offsets commit has to be triggered based on the number of events
* processed and the time elapsed since the last commit. This class must
* implement the interface 'OffsetCommitPolicy'. The default is a
* periodic commit policy based upon time intervals.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default:
* io.debezium.embedded.spi.OffsetCommitPolicy.PeriodicCommitOffsetPolicy
* Group: consumer
*/
default DebeziumSqlserverComponentBuilder offsetCommitPolicy(
java.lang.String offsetCommitPolicy) {
doSetProperty("offsetCommitPolicy", offsetCommitPolicy);
return this;
}
/**
* Maximum number of milliseconds to wait for records to flush and
* partition offset data to be committed to offset storage before
* cancelling the process and restoring the offset data to be committed
* in a future attempt. The default is 5 seconds.
*
* The option is a: <code>long</code> type.
*
* Default: 5000
* Group: consumer
*/
default DebeziumSqlserverComponentBuilder offsetCommitTimeoutMs(
long offsetCommitTimeoutMs) {
doSetProperty("offsetCommitTimeoutMs", offsetCommitTimeoutMs);
return this;
}
/**
* Interval at which to try committing offsets. The default is 1 minute.
*
* The option is a: <code>long</code> type.
*
* Default: 60000
* Group: consumer
*/
default DebeziumSqlserverComponentBuilder offsetFlushIntervalMs(
long offsetFlushIntervalMs) {
doSetProperty("offsetFlushIntervalMs", offsetFlushIntervalMs);
return this;
}
/**
* The name of the Java class that is responsible for persistence of
* connector offsets.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: org.apache.kafka.connect.storage.FileOffsetBackingStore
* Group: consumer
*/
default DebeziumSqlserverComponentBuilder offsetStorage(
java.lang.String offsetStorage) {
doSetProperty("offsetStorage", offsetStorage);
return this;
}
/**
* Path to file where offsets are to be stored. Required when
* offset.storage is set to the FileOffsetBackingStore.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*/
default DebeziumSqlserverComponentBuilder offsetStorageFileName(
java.lang.String offsetStorageFileName) {
doSetProperty("offsetStorageFileName", offsetStorageFileName);
return this;
}
/**
* The number of partitions used when creating the offset storage topic.
* Required when offset.storage is set to the 'KafkaOffsetBackingStore'.
*
* The option is a: <code>int</code> type.
*
* Group: consumer
*/
default DebeziumSqlserverComponentBuilder offsetStoragePartitions(
int offsetStoragePartitions) {
doSetProperty("offsetStoragePartitions", offsetStoragePartitions);
return this;
}
/**
* Replication factor used when creating the offset storage topic.
* Required when offset.storage is set to the KafkaOffsetBackingStore.
*
* The option is a: <code>int</code> type.
*
* Group: consumer
*/
default DebeziumSqlserverComponentBuilder offsetStorageReplicationFactor(
int offsetStorageReplicationFactor) {
doSetProperty("offsetStorageReplicationFactor", offsetStorageReplicationFactor);
return this;
}
/**
* The name of the Kafka topic where offsets are to be stored. Required
* when offset.storage is set to the KafkaOffsetBackingStore.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*/
default DebeziumSqlserverComponentBuilder offsetStorageTopic(
java.lang.String offsetStorageTopic) {
doSetProperty("offsetStorageTopic", offsetStorageTopic);
return this;
}
/**
* Whether the component should use basic property binding (Camel 2.x)
* or the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default DebeziumSqlserverComponentBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Description is not available here, please check Debezium website for
* corresponding key 'column.blacklist' description.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder columnBlacklist(
java.lang.String columnBlacklist) {
doSetProperty("columnBlacklist", columnBlacklist);
return this;
}
/**
* The name of the database the connector should be monitoring. When
* working with a multi-tenant set-up, must be set to the CDB name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder databaseDbname(
java.lang.String databaseDbname) {
doSetProperty("databaseDbname", databaseDbname);
return this;
}
/**
* The name of the DatabaseHistory class that should be used to store
* and recover database schema changes. The configuration properties for
* the history are prefixed with the 'database.history.' string.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: io.debezium.relational.history.FileDatabaseHistory
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder databaseHistory(
java.lang.String databaseHistory) {
doSetProperty("databaseHistory", databaseHistory);
return this;
}
/**
* The path to the file that will be used to record the database
* history.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder databaseHistoryFileFilename(
java.lang.String databaseHistoryFileFilename) {
doSetProperty("databaseHistoryFileFilename", databaseHistoryFileFilename);
return this;
}
/**
* A list of host/port pairs that the connector will use for
* establishing the initial connection to the Kafka cluster for
* retrieving database schema history previously stored by the
* connector. This should point to the same Kafka cluster used by the
* Kafka Connect process.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder databaseHistoryKafkaBootstrapServers(
java.lang.String databaseHistoryKafkaBootstrapServers) {
doSetProperty("databaseHistoryKafkaBootstrapServers", databaseHistoryKafkaBootstrapServers);
return this;
}
/**
* The number of attempts in a row that no data are returned from Kafka
* before recover completes. The maximum amount of time to wait after
* receiving no data is (recovery.attempts) x
* (recovery.poll.interval.ms).
*
* The option is a: <code>int</code> type.
*
* Default: 100
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder databaseHistoryKafkaRecoveryAttempts(
int databaseHistoryKafkaRecoveryAttempts) {
doSetProperty("databaseHistoryKafkaRecoveryAttempts", databaseHistoryKafkaRecoveryAttempts);
return this;
}
/**
* The number of milliseconds to wait while polling for persisted data
* during recovery.
*
* The option is a: <code>int</code> type.
*
* Default: 100
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder databaseHistoryKafkaRecoveryPollIntervalMs(
int databaseHistoryKafkaRecoveryPollIntervalMs) {
doSetProperty("databaseHistoryKafkaRecoveryPollIntervalMs", databaseHistoryKafkaRecoveryPollIntervalMs);
return this;
}
/**
* The name of the topic for the database schema history.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder databaseHistoryKafkaTopic(
java.lang.String databaseHistoryKafkaTopic) {
doSetProperty("databaseHistoryKafkaTopic", databaseHistoryKafkaTopic);
return this;
}
/**
* Resolvable hostname or IP address of the SQL Server database server.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder databaseHostname(
java.lang.String databaseHostname) {
doSetProperty("databaseHostname", databaseHostname);
return this;
}
/**
* Password of the SQL Server database user to be used when connecting
* to the database.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder databasePassword(
java.lang.String databasePassword) {
doSetProperty("databasePassword", databasePassword);
return this;
}
/**
* Port of the SQL Server database server.
*
* The option is a: <code>int</code> type.
*
* Default: 1433
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder databasePort(int databasePort) {
doSetProperty("databasePort", databasePort);
return this;
}
/**
* Unique name that identifies the database server and all recorded
* offsets, and that is used as a prefix for all schemas and topics.
* Each distinct installation should have a separate namespace and be
* monitored by at most one Debezium connector.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder databaseServerName(
java.lang.String databaseServerName) {
doSetProperty("databaseServerName", databaseServerName);
return this;
}
/**
* The timezone of the server used to correctly shift the commit
* transaction timestamp on the client sideOptions include: Any valid
* Java ZoneId.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder databaseServerTimezone(
java.lang.String databaseServerTimezone) {
doSetProperty("databaseServerTimezone", databaseServerTimezone);
return this;
}
/**
* Name of the SQL Server database user to be used when connecting to
* the database.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder databaseUser(
java.lang.String databaseUser) {
doSetProperty("databaseUser", databaseUser);
return this;
}
/**
* Specify how DECIMAL and NUMERIC columns should be represented in
* change events, including:'precise' (the default) uses
* java.math.BigDecimal to represent values, which are encoded in the
* change events using a binary representation and Kafka Connect's
* 'org.apache.kafka.connect.data.Decimal' type; 'string' uses string to
* represent values; 'double' represents values using Java's 'double',
* which may not offer the precision but will be far easier to use in
* consumers.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: precise
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder decimalHandlingMode(
java.lang.String decimalHandlingMode) {
doSetProperty("decimalHandlingMode", decimalHandlingMode);
return this;
}
/**
* Specify how failures during processing of events (i.e. when
* encountering a corrupted event) should be handled, including:'fail'
* (the default) an exception indicating the problematic event and its
* position is raised, causing the connector to be stopped; 'warn' the
* problematic event and its position will be logged and the event will
* be skipped;'ignore' the problematic event will be skipped.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: fail
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder eventProcessingFailureHandlingMode(
java.lang.String eventProcessingFailureHandlingMode) {
doSetProperty("eventProcessingFailureHandlingMode", eventProcessingFailureHandlingMode);
return this;
}
/**
* Length of an interval in milli-seconds in in which the connector
* periodically sends heartbeat messages to a heartbeat topic. Use 0 to
* disable heartbeat messages. Disabled by default.
*
* The option is a: <code>int</code> type.
*
* Default: 0
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder heartbeatIntervalMs(
int heartbeatIntervalMs) {
doSetProperty("heartbeatIntervalMs", heartbeatIntervalMs);
return this;
}
/**
* The prefix that is used to name heartbeat topics.Defaults to
* __debezium-heartbeat.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: __debezium-heartbeat
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder heartbeatTopicsPrefix(
java.lang.String heartbeatTopicsPrefix) {
doSetProperty("heartbeatTopicsPrefix", heartbeatTopicsPrefix);
return this;
}
/**
* Maximum size of each batch of source records. Defaults to 2048.
*
* The option is a: <code>int</code> type.
*
* Default: 2048
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder maxBatchSize(int maxBatchSize) {
doSetProperty("maxBatchSize", maxBatchSize);
return this;
}
/**
* Maximum size of the queue for change events read from the database
* log but not yet recorded or forwarded. Defaults to 8192, and should
* always be larger than the maximum batch size.
*
* The option is a: <code>int</code> type.
*
* Default: 8192
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder maxQueueSize(int maxQueueSize) {
doSetProperty("maxQueueSize", maxQueueSize);
return this;
}
/**
* A semicolon-separated list of expressions that match fully-qualified
* tables and column(s) to be used as message key. Each expression must
* match the pattern ':',where the table names could be defined as
* (DB_NAME.TABLE_NAME) or (SCHEMA_NAME.TABLE_NAME), depending on the
* specific connector,and the key columns are a comma-separated list of
* columns representing the custom key. For any table without an
* explicit key configuration the table's primary key column(s) will be
* used as message key.Example:
* dbserver1.inventory.orderlines:orderId,orderLineId;dbserver1.inventory.orders:id.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder messageKeyColumns(
java.lang.String messageKeyColumns) {
doSetProperty("messageKeyColumns", messageKeyColumns);
return this;
}
/**
* Frequency in milliseconds to wait for new change events to appear
* after receiving no events. Defaults to 500ms.
*
* The option is a: <code>long</code> type.
*
* Default: 500
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder pollIntervalMs(
long pollIntervalMs) {
doSetProperty("pollIntervalMs", pollIntervalMs);
return this;
}
/**
* Enables transaction metadata extraction together with event counting.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder provideTransactionMetadata(
boolean provideTransactionMetadata) {
doSetProperty("provideTransactionMetadata", provideTransactionMetadata);
return this;
}
/**
* The number of milliseconds to delay before a snapshot will begin.
*
* The option is a: <code>long</code> type.
*
* Default: 0
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder snapshotDelayMs(
long snapshotDelayMs) {
doSetProperty("snapshotDelayMs", snapshotDelayMs);
return this;
}
/**
* The maximum number of records that should be loaded into memory while
* performing a snapshot.
*
* The option is a: <code>int</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder snapshotFetchSize(
int snapshotFetchSize) {
doSetProperty("snapshotFetchSize", snapshotFetchSize);
return this;
}
/**
* The maximum number of millis to wait for table locks at the beginning
* of a snapshot. If locks cannot be acquired in this time frame, the
* snapshot will be aborted. Defaults to 10 seconds.
*
* The option is a: <code>long</code> type.
*
* Default: 10000
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder snapshotLockTimeoutMs(
long snapshotLockTimeoutMs) {
doSetProperty("snapshotLockTimeoutMs", snapshotLockTimeoutMs);
return this;
}
/**
* The criteria for running a snapshot upon startup of the connector.
* Options include: 'initial' (the default) to specify the connector
* should run a snapshot only when no offsets are available for the
* logical server name; 'schema_only' to specify the connector should
* run a snapshot of the schema when no offsets are available for the
* logical server name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: initial
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder snapshotMode(
java.lang.String snapshotMode) {
doSetProperty("snapshotMode", snapshotMode);
return this;
}
/**
* This property contains a comma-separated list of fully-qualified
* tables (DB_NAME.TABLE_NAME) or (SCHEMA_NAME.TABLE_NAME), depending on
* thespecific connectors . Select statements for the individual tables
* are specified in further configuration properties, one for each
* table, identified by the id
* 'snapshot.select.statement.overrides.DB_NAME.TABLE_NAME' or
* 'snapshot.select.statement.overrides.SCHEMA_NAME.TABLE_NAME',
* respectively. The value of those properties is the select statement
* to use when retrieving data from the specific table during
* snapshotting. A possible use case for large append-only tables is
* setting a specific point where to start (resume) snapshotting, in
* case a previous snapshotting was interrupted.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder snapshotSelectStatementOverrides(
java.lang.String snapshotSelectStatementOverrides) {
doSetProperty("snapshotSelectStatementOverrides", snapshotSelectStatementOverrides);
return this;
}
/**
* A version of the format of the publicly visible source part in the
* message.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: v2
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder sourceStructVersion(
java.lang.String sourceStructVersion) {
doSetProperty("sourceStructVersion", sourceStructVersion);
return this;
}
/**
* Description is not available here, please check Debezium website for
* corresponding key 'table.blacklist' description.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder tableBlacklist(
java.lang.String tableBlacklist) {
doSetProperty("tableBlacklist", tableBlacklist);
return this;
}
/**
* Flag specifying whether built-in tables should be ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder tableIgnoreBuiltin(
boolean tableIgnoreBuiltin) {
doSetProperty("tableIgnoreBuiltin", tableIgnoreBuiltin);
return this;
}
/**
* The tables for which changes are to be captured.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder tableWhitelist(
java.lang.String tableWhitelist) {
doSetProperty("tableWhitelist", tableWhitelist);
return this;
}
/**
* Time, date, and timestamps can be represented with different kinds of
* precisions, including:'adaptive' (the default) bases the precision of
* time, date, and timestamp values on the database column's precision;
* 'adaptive_time_microseconds' like 'adaptive' mode, but TIME fields
* always use microseconds precision;'connect' always represents time,
* date, and timestamp values using Kafka Connect's built-in
* representations for Time, Date, and Timestamp, which uses millisecond
* precision regardless of the database columns' precision .
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: adaptive
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder timePrecisionMode(
java.lang.String timePrecisionMode) {
doSetProperty("timePrecisionMode", timePrecisionMode);
return this;
}
/**
* Whether delete operations should be represented by a delete event and
* a subsquenttombstone event (true) or only by a delete event (false).
* Emitting the tombstone event (the default behavior) allows Kafka to
* completely delete all events pertaining to the given key once the
* source record got deleted.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: sqlserver
*/
default DebeziumSqlserverComponentBuilder tombstonesOnDelete(
boolean tombstonesOnDelete) {
doSetProperty("tombstonesOnDelete", tombstonesOnDelete);
return this;
}
}
class DebeziumSqlserverComponentBuilderImpl
extends
AbstractComponentBuilder<DebeziumSqlserverComponent>
implements
DebeziumSqlserverComponentBuilder {
@Override
protected DebeziumSqlserverComponent buildConcreteComponent() {
return new DebeziumSqlserverComponent();
}
private org.apache.camel.component.debezium.configuration.SqlServerConnectorEmbeddedDebeziumConfiguration getOrCreateConfiguration(
org.apache.camel.component.debezium.DebeziumSqlserverComponent component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.debezium.configuration.SqlServerConnectorEmbeddedDebeziumConfiguration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "additionalProperties": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setAdditionalProperties((java.util.Map) value); return true;
case "bridgeErrorHandler": ((DebeziumSqlserverComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "configuration": ((DebeziumSqlserverComponent) component).setConfiguration((org.apache.camel.component.debezium.configuration.SqlServerConnectorEmbeddedDebeziumConfiguration) value); return true;
case "internalKeyConverter": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setInternalKeyConverter((java.lang.String) value); return true;
case "internalValueConverter": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setInternalValueConverter((java.lang.String) value); return true;
case "offsetCommitPolicy": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setOffsetCommitPolicy((java.lang.String) value); return true;
case "offsetCommitTimeoutMs": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setOffsetCommitTimeoutMs((long) value); return true;
case "offsetFlushIntervalMs": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setOffsetFlushIntervalMs((long) value); return true;
case "offsetStorage": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setOffsetStorage((java.lang.String) value); return true;
case "offsetStorageFileName": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setOffsetStorageFileName((java.lang.String) value); return true;
case "offsetStoragePartitions": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setOffsetStoragePartitions((int) value); return true;
case "offsetStorageReplicationFactor": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setOffsetStorageReplicationFactor((int) value); return true;
case "offsetStorageTopic": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setOffsetStorageTopic((java.lang.String) value); return true;
case "basicPropertyBinding": ((DebeziumSqlserverComponent) component).setBasicPropertyBinding((boolean) value); return true;
case "columnBlacklist": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setColumnBlacklist((java.lang.String) value); return true;
case "databaseDbname": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDatabaseDbname((java.lang.String) value); return true;
case "databaseHistory": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDatabaseHistory((java.lang.String) value); return true;
case "databaseHistoryFileFilename": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDatabaseHistoryFileFilename((java.lang.String) value); return true;
case "databaseHistoryKafkaBootstrapServers": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDatabaseHistoryKafkaBootstrapServers((java.lang.String) value); return true;
case "databaseHistoryKafkaRecoveryAttempts": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDatabaseHistoryKafkaRecoveryAttempts((int) value); return true;
case "databaseHistoryKafkaRecoveryPollIntervalMs": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDatabaseHistoryKafkaRecoveryPollIntervalMs((int) value); return true;
case "databaseHistoryKafkaTopic": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDatabaseHistoryKafkaTopic((java.lang.String) value); return true;
case "databaseHostname": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDatabaseHostname((java.lang.String) value); return true;
case "databasePassword": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDatabasePassword((java.lang.String) value); return true;
case "databasePort": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDatabasePort((int) value); return true;
case "databaseServerName": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDatabaseServerName((java.lang.String) value); return true;
case "databaseServerTimezone": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDatabaseServerTimezone((java.lang.String) value); return true;
case "databaseUser": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDatabaseUser((java.lang.String) value); return true;
case "decimalHandlingMode": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setDecimalHandlingMode((java.lang.String) value); return true;
case "eventProcessingFailureHandlingMode": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setEventProcessingFailureHandlingMode((java.lang.String) value); return true;
case "heartbeatIntervalMs": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setHeartbeatIntervalMs((int) value); return true;
case "heartbeatTopicsPrefix": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setHeartbeatTopicsPrefix((java.lang.String) value); return true;
case "maxBatchSize": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setMaxBatchSize((int) value); return true;
case "maxQueueSize": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setMaxQueueSize((int) value); return true;
case "messageKeyColumns": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setMessageKeyColumns((java.lang.String) value); return true;
case "pollIntervalMs": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setPollIntervalMs((long) value); return true;
case "provideTransactionMetadata": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setProvideTransactionMetadata((boolean) value); return true;
case "snapshotDelayMs": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setSnapshotDelayMs((long) value); return true;
case "snapshotFetchSize": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setSnapshotFetchSize((int) value); return true;
case "snapshotLockTimeoutMs": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setSnapshotLockTimeoutMs((long) value); return true;
case "snapshotMode": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setSnapshotMode((java.lang.String) value); return true;
case "snapshotSelectStatementOverrides": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setSnapshotSelectStatementOverrides((java.lang.String) value); return true;
case "sourceStructVersion": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setSourceStructVersion((java.lang.String) value); return true;
case "tableBlacklist": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setTableBlacklist((java.lang.String) value); return true;
case "tableIgnoreBuiltin": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setTableIgnoreBuiltin((boolean) value); return true;
case "tableWhitelist": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setTableWhitelist((java.lang.String) value); return true;
case "timePrecisionMode": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setTimePrecisionMode((java.lang.String) value); return true;
case "tombstonesOnDelete": getOrCreateConfiguration((DebeziumSqlserverComponent) component).setTombstonesOnDelete((boolean) value); return true;
default: return false;
}
}
}
}
| |
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.ext.wires.bpmn.client.commands.impl;
import org.junit.Before;
import org.junit.Test;
import org.uberfire.ext.wires.bpmn.api.model.impl.edges.BpmnEdgeImpl;
import org.uberfire.ext.wires.bpmn.api.model.impl.nodes.EndProcessNode;
import org.uberfire.ext.wires.bpmn.api.model.impl.nodes.ProcessNode;
import org.uberfire.ext.wires.bpmn.api.model.impl.nodes.StartProcessNode;
import org.uberfire.ext.wires.bpmn.api.model.impl.roles.DefaultRoleImpl;
import org.uberfire.ext.wires.bpmn.api.model.BpmnEdge;
import org.uberfire.ext.wires.bpmn.api.model.rules.Rule;
import org.uberfire.ext.wires.bpmn.client.AbstractBaseRuleTest;
import org.uberfire.ext.wires.bpmn.client.TestDummyNode;
import org.uberfire.ext.wires.bpmn.client.commands.CommandManager;
import org.uberfire.ext.wires.bpmn.client.commands.ResultType;
import org.uberfire.ext.wires.bpmn.client.commands.Results;
import org.uberfire.ext.wires.bpmn.client.rules.RuleManager;
import org.uberfire.ext.wires.bpmn.client.rules.impl.DefaultRuleManagerImpl;
import static junit.framework.Assert.*;
public class AddEdgeCommandTest extends AbstractBaseRuleTest {
private ProcessNode process;
private RuleManager ruleManager;
private StartProcessNode node1;
private TestDummyNode node2;
private EndProcessNode node3;
private CommandManager commandManager;
@Before
public void setupNodes() {
//Dummy process for each test consists of 3 unconnected nodes
//-----------------------------------------------------------
//
// [StartNode] [DummyNode] [EndNode]
//
process = new ProcessNode();
ruleManager = new DefaultRuleManagerImpl();
node1 = new StartProcessNode();
node2 = new TestDummyNode();
node3 = new EndProcessNode();
commandManager = new DefaultCommandManagerImpl();
for ( Rule rule : getConnectionRules() ) {
ruleManager.addRule( rule );
}
for ( Rule rule : getCardinalityRules() ) {
ruleManager.addRule( rule );
}
//Add StartProcessNode
final Results results1 = commandManager.execute( ruleManager,
new AddGraphNodeCommand( process,
node1 ) );
assertNotNull( results1 );
assertEquals( 0,
results1.getMessages().size() );
//Add TestDummyNode
final Results results2 = commandManager.execute( ruleManager,
new AddGraphNodeCommand( process,
node2 ) );
assertNotNull( results2 );
assertEquals( 0,
results2.getMessages().size() );
//Add EndProcessNode
final Results results3 = commandManager.execute( ruleManager,
new AddGraphNodeCommand( process,
node3 ) );
assertNotNull( results3 );
assertEquals( 0,
results3.getMessages().size() );
}
@Test
public void testAddEdgeBetweenStartNodeAndDummyNode() {
final BpmnEdge e1 = new BpmnEdgeImpl( new DefaultRoleImpl( "general_edge" ) );
//An Edge with role "general_edge" is permitted between StartNode and DummyNode
final Results results1 = commandManager.execute( ruleManager,
new AddEdgeCommand( node1,
node2,
e1 ) );
assertNotNull( results1 );
assertEquals( 0,
results1.getMessages().size() );
assertEquals( 0,
node1.getInEdges().size() );
assertEquals( 1,
node1.getOutEdges().size() );
assertEquals( 1,
node2.getInEdges().size() );
assertEquals( 0,
node2.getOutEdges().size() );
assertEquals( e1,
node1.getOutEdges().toArray()[ 0 ] );
assertEquals( e1,
node2.getInEdges().toArray()[ 0 ] );
}
@Test
public void testAddEdgeBetweenDummyNodeAndEndNode() {
final BpmnEdge e1 = new BpmnEdgeImpl( new DefaultRoleImpl( "general_edge" ) );
//An Edge with role "general_edge" is permitted between DummyNode and EndNode
final Results results1 = commandManager.execute( ruleManager,
new AddEdgeCommand( node2,
node3,
e1 ) );
assertNotNull( results1 );
assertEquals( 0,
results1.getMessages().size() );
assertEquals( 0,
node2.getInEdges().size() );
assertEquals( 1,
node2.getOutEdges().size() );
assertEquals( 1,
node3.getInEdges().size() );
assertEquals( 0,
node3.getOutEdges().size() );
assertEquals( e1,
node2.getOutEdges().toArray()[ 0 ] );
assertEquals( e1,
node3.getInEdges().toArray()[ 0 ] );
}
@Test
public void testAddEdgeBetweenStartNodeAndEndNode() {
final BpmnEdge e1 = new BpmnEdgeImpl( new DefaultRoleImpl( "general_edge" ) );
//An Edge with role "general_edge" is NOT permitted between StartNode and EndNode
final Results results1 = commandManager.execute( ruleManager,
new AddEdgeCommand( node1,
node3,
e1 ) );
assertNotNull( results1 );
assertEquals( 1,
results1.getMessages().size() );
assertEquals( 1,
results1.getMessages( ResultType.ERROR ).size() );
assertEquals( 0,
node1.getInEdges().size() );
assertEquals( 0,
node1.getOutEdges().size() );
assertEquals( 0,
node3.getInEdges().size() );
assertEquals( 0,
node3.getOutEdges().size() );
}
@Test
public void testStartNodeOutgoingCardinalityAndDummyNode() {
final BpmnEdge e1 = new BpmnEdgeImpl( new DefaultRoleImpl( "general_edge" ) );
//An Edge with role "general_edge" is permitted between StartNode and DummyNode
final Results results1 = commandManager.execute( ruleManager,
new AddEdgeCommand( node1,
node2,
e1 ) );
assertNotNull( results1 );
assertEquals( 0,
results1.getMessages().size() );
assertEquals( 0,
node1.getInEdges().size() );
assertEquals( 1,
node1.getOutEdges().size() );
assertEquals( 1,
node2.getInEdges().size() );
assertEquals( 0,
node2.getOutEdges().size() );
assertEquals( e1,
node1.getOutEdges().toArray()[ 0 ] );
assertEquals( e1,
node2.getInEdges().toArray()[ 0 ] );
//Try to add another Edge with role "general_edge" between StartNode and DummyNode. This should not be allowed.
final Results results2 = commandManager.execute( ruleManager,
new AddEdgeCommand( node1,
node2,
e1 ) );
assertNotNull( results2 );
assertEquals( 1,
results2.getMessages().size() );
assertEquals( 1,
results2.getMessages( ResultType.ERROR ).size() );
assertEquals( 0,
node1.getInEdges().size() );
assertEquals( 1,
node1.getOutEdges().size() );
assertEquals( 1,
node2.getInEdges().size() );
assertEquals( 0,
node2.getOutEdges().size() );
assertEquals( e1,
node1.getOutEdges().toArray()[ 0 ] );
assertEquals( e1,
node2.getInEdges().toArray()[ 0 ] );
}
@Test
public void testDummyNodeAndEndNodeIncomingCardinality() {
final BpmnEdge e1 = new BpmnEdgeImpl( new DefaultRoleImpl( "general_edge" ) );
//An Edge with role "general_edge" is permitted between DummyNode and EndNode
final Results results1 = commandManager.execute( ruleManager,
new AddEdgeCommand( node2,
node3,
e1 ) );
assertNotNull( results1 );
assertEquals( 0,
results1.getMessages().size() );
assertEquals( 0,
node2.getInEdges().size() );
assertEquals( 1,
node2.getOutEdges().size() );
assertEquals( 1,
node3.getInEdges().size() );
assertEquals( 0,
node3.getOutEdges().size() );
assertEquals( e1,
node2.getOutEdges().toArray()[ 0 ] );
assertEquals( e1,
node3.getInEdges().toArray()[ 0 ] );
//Try to add another Edge with role "general_edge" between DummyNode and EndNode. This should not be allowed.
final Results results2 = commandManager.execute( ruleManager,
new AddEdgeCommand( node2,
node3,
e1 ) );
assertNotNull( results2 );
assertEquals( 1,
results2.getMessages().size() );
assertEquals( 1,
results2.getMessages( ResultType.ERROR ).size() );
assertEquals( 0,
node2.getInEdges().size() );
assertEquals( 1,
node2.getOutEdges().size() );
assertEquals( 1,
node3.getInEdges().size() );
assertEquals( 0,
node3.getOutEdges().size() );
assertEquals( e1,
node2.getOutEdges().toArray()[ 0 ] );
assertEquals( e1,
node3.getInEdges().toArray()[ 0 ] );
}
}
| |
package com.cloud.consoleproxy;
import com.cloud.agent.AgentManager;
import com.cloud.configuration.Config;
import com.cloud.framework.config.dao.ConfigurationDao;
import com.cloud.framework.security.keys.KeysManager;
import com.cloud.framework.security.keystore.KeystoreManager;
import com.cloud.host.HostVO;
import com.cloud.host.dao.HostDao;
import com.cloud.legacymodel.communication.answer.AgentControlAnswer;
import com.cloud.legacymodel.communication.answer.Answer;
import com.cloud.legacymodel.communication.answer.ConsoleAccessAuthenticationAnswer;
import com.cloud.legacymodel.communication.answer.GetVncPortAnswer;
import com.cloud.legacymodel.communication.command.GetVncPortCommand;
import com.cloud.legacymodel.communication.command.StartConsoleProxyAgentHttpHandlerCommand;
import com.cloud.legacymodel.communication.command.agentcontrol.ConsoleAccessAuthenticationCommand;
import com.cloud.legacymodel.communication.command.agentcontrol.ConsoleProxyLoadReportCommand;
import com.cloud.legacymodel.communication.command.startup.StartupCommand;
import com.cloud.legacymodel.communication.command.startup.StartupProxyCommand;
import com.cloud.legacymodel.dc.Host;
import com.cloud.legacymodel.dc.HostStatus;
import com.cloud.legacymodel.exceptions.AgentUnavailableException;
import com.cloud.legacymodel.exceptions.OperationTimedoutException;
import com.cloud.legacymodel.utils.Ternary;
import com.cloud.legacymodel.vm.VirtualMachine;
import com.cloud.servlet.ConsoleProxyPasswordBasedEncryptor;
import com.cloud.servlet.ConsoleProxyServlet;
import com.cloud.vm.dao.VMInstanceDao;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.apache.commons.codec.binary.Base64;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.Date;
/**
* Utility class to manage interactions with agent-based console access
* Extracted from ConsoleProxyManagerImpl so that other console proxy managers
* can reuse
*/
public abstract class AgentHookBase implements AgentHook {
private static final Logger s_logger = LoggerFactory.getLogger(AgentHookBase.class);
VMInstanceDao _instanceDao;
HostDao _hostDao;
ConfigurationDao _configDao;
AgentManager _agentMgr;
KeystoreManager _ksMgr;
KeysManager _keysMgr;
public AgentHookBase(final VMInstanceDao instanceDao, final HostDao hostDao, final ConfigurationDao cfgDao, final KeystoreManager ksMgr, final AgentManager agentMgr, final
KeysManager keysMgr) {
_instanceDao = instanceDao;
_hostDao = hostDao;
_agentMgr = agentMgr;
_configDao = cfgDao;
_ksMgr = ksMgr;
_keysMgr = keysMgr;
}
@Override
public void onLoadReport(final ConsoleProxyLoadReportCommand cmd) {
// no-op since we do not auto-scale
}
@Override
public AgentControlAnswer onConsoleAccessAuthentication(final ConsoleAccessAuthenticationCommand cmd) {
final Long vmId = null;
final String ticketInUrl = cmd.getTicket();
if (ticketInUrl == null) {
s_logger.error("Access ticket could not be found, you could be running an old version of console proxy. vmId: " + cmd.getVmId());
return new ConsoleAccessAuthenticationAnswer(cmd, false);
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Console authentication. Ticket in url for " + cmd.getHost() + ":" + cmd.getPort() + "-" + cmd.getVmId() + " is " + ticketInUrl);
}
if (!cmd.isReauthenticating()) {
final String ticket = ConsoleProxyServlet.genAccessTicket(cmd.getHost(), cmd.getPort(), cmd.getSid(), cmd.getVmId());
if (s_logger.isDebugEnabled()) {
s_logger.debug("Console authentication. Ticket in 1 minute boundary for " + cmd.getHost() + ":" + cmd.getPort() + "-" + cmd.getVmId() + " is " + ticket);
}
if (!ticket.equals(ticketInUrl)) {
final Date now = new Date();
// considering of minute round-up
final String minuteEarlyTicket =
ConsoleProxyServlet.genAccessTicket(cmd.getHost(), cmd.getPort(), cmd.getSid(), cmd.getVmId(), new Date(now.getTime() - 60 * 1000));
if (s_logger.isDebugEnabled()) {
s_logger.debug("Console authentication. Ticket in 2-minute boundary for " + cmd.getHost() + ":" + cmd.getPort() + "-" + cmd.getVmId() + " is " +
minuteEarlyTicket);
}
if (!minuteEarlyTicket.equals(ticketInUrl)) {
s_logger.error("Access ticket expired or has been modified. vmId: " + cmd.getVmId() + "ticket in URL: " + ticketInUrl +
", tickets to check against: " + ticket + "," + minuteEarlyTicket);
return new ConsoleAccessAuthenticationAnswer(cmd, false);
}
}
}
if (cmd.getVmId() != null && cmd.getVmId().isEmpty()) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Invalid vm id sent from proxy(happens when proxy session has terminated)");
}
return new ConsoleAccessAuthenticationAnswer(cmd, false);
}
VirtualMachine vm = _instanceDao.findByUuid(cmd.getVmId());
if (vm == null) {
vm = _instanceDao.findById(Long.parseLong(cmd.getVmId()));
}
if (vm == null) {
s_logger.error("Invalid vm id " + cmd.getVmId() + " sent from console access authentication");
return new ConsoleAccessAuthenticationAnswer(cmd, false);
}
if (vm.getHostId() == null) {
s_logger.warn("VM " + vmId + " lost host info, failed authentication request");
return new ConsoleAccessAuthenticationAnswer(cmd, false);
}
final HostVO host = _hostDao.findById(vm.getHostId());
if (host == null) {
s_logger.warn("VM " + vmId + "'s host does not exist, fail authentication request");
return new ConsoleAccessAuthenticationAnswer(cmd, false);
}
final String sid = cmd.getSid();
if (sid == null || !sid.equals(vm.getVncPassword())) {
s_logger.warn("sid " + sid + " in url does not match stored sid.");
return new ConsoleAccessAuthenticationAnswer(cmd, false);
}
if (cmd.isReauthenticating()) {
final ConsoleAccessAuthenticationAnswer authenticationAnswer = new ConsoleAccessAuthenticationAnswer(cmd, true);
authenticationAnswer.setReauthenticating(true);
s_logger.info("Re-authentication request, ask host " + vm.getHostId() + " for new console info");
final GetVncPortAnswer answer = (GetVncPortAnswer) _agentMgr.easySend(vm.getHostId(), new GetVncPortCommand(vm.getId(), vm.getInstanceName()));
if (answer != null && answer.getResult()) {
final Ternary<String, String, String> parsedHostInfo = ConsoleProxyServlet.parseHostInfo(answer.getAddress());
if (parsedHostInfo.second() != null && parsedHostInfo.third() != null) {
s_logger.info("Re-authentication result. vm: " + vm.getId() + ", tunnel url: " + parsedHostInfo.second() + ", tunnel session: " +
parsedHostInfo.third());
authenticationAnswer.setTunnelUrl(parsedHostInfo.second());
authenticationAnswer.setTunnelSession(parsedHostInfo.third());
} else {
s_logger.info("Re-authentication result. vm: " + vm.getId() + ", host address: " + parsedHostInfo.first() + ", port: " + answer.getPort());
authenticationAnswer.setHost(parsedHostInfo.first());
authenticationAnswer.setPort(answer.getPort());
}
} else {
s_logger.warn("Re-authentication request failed");
authenticationAnswer.setSuccess(false);
}
return authenticationAnswer;
}
return new ConsoleAccessAuthenticationAnswer(cmd, true);
}
@Override
public void onAgentConnect(final Host host, final StartupCommand cmd) {
// no-op
}
@Override
public void onAgentDisconnect(final long agentId, final HostStatus state) {
// no-op
}
@Override
public void startAgentHttpHandlerInVM(final StartupProxyCommand startupCmd) {
final StartConsoleProxyAgentHttpHandlerCommand cmd;
try {
final SecureRandom random = SecureRandom.getInstance("SHA1PRNG");
final byte[] randomBytes = new byte[16];
random.nextBytes(randomBytes);
final String storePassword = Base64.encodeBase64String(randomBytes);
byte[] ksBits = null;
final String consoleProxyUrlDomain = _configDao.getValue(Config.ConsoleProxyUrlDomain.key());
if (consoleProxyUrlDomain == null || consoleProxyUrlDomain.isEmpty()) {
s_logger.debug("SSL is disabled for console proxy based on global config, skip loading certificates");
} else {
ksBits = _ksMgr.getKeystoreBits(ConsoleProxyManager.CERTIFICATE_NAME, ConsoleProxyManager.CERTIFICATE_NAME, storePassword);
//ks manager raises exception if ksBits are null, hence no need to explicltly handle the condition
}
cmd = new StartConsoleProxyAgentHttpHandlerCommand(ksBits, storePassword, _keysMgr.getAuthenticationKey());
cmd.setEncryptorPassword(getEncryptorPassword());
final HostVO consoleProxyHost = findConsoleProxyHost(startupCmd);
assert (consoleProxyHost != null);
if (consoleProxyHost != null) {
final Answer answer = _agentMgr.send(consoleProxyHost.getId(), cmd);
if (answer == null || !answer.getResult()) {
s_logger.error("Console proxy agent reported that it failed to execute http handling startup command");
} else {
s_logger.info("Successfully sent out command to start HTTP handling in console proxy agent");
}
}
} catch (final NoSuchAlgorithmException e) {
s_logger.error("Unexpected exception in SecureRandom Algorithm selection ", e);
} catch (final AgentUnavailableException e) {
s_logger.error("Unable to send http handling startup command to the console proxy resource for proxy:" + startupCmd.getProxyVmId(), e);
} catch (final OperationTimedoutException e) {
s_logger.error("Unable to send http handling startup command(time out) to the console proxy resource for proxy:" + startupCmd.getProxyVmId(), e);
} catch (final OutOfMemoryError e) {
s_logger.error("Unrecoverable OutOfMemory Error, exit and let it be re-launched");
System.exit(1);
} catch (final Exception e) {
s_logger.error(
"Unexpected exception when sending http handling startup command(time out) to the console proxy resource for proxy:" + startupCmd.getProxyVmId(), e);
}
}
private String getEncryptorPassword() {
String key;
String iv;
ConsoleProxyPasswordBasedEncryptor.KeyIVPair keyIvPair = null;
// if we failed after reset, something is definitely wrong
for (int i = 0; i < 2; i++) {
key = _keysMgr.getEncryptionKey();
iv = _keysMgr.getEncryptionIV();
keyIvPair = new ConsoleProxyPasswordBasedEncryptor.KeyIVPair(key, iv);
if (keyIvPair.getIvBytes() == null || keyIvPair.getIvBytes().length != 16 || keyIvPair.getKeyBytes() == null || keyIvPair.getKeyBytes().length != 16) {
s_logger.warn("Console access AES KeyIV sanity check failed, reset and regenerate");
_keysMgr.resetEncryptionKeyIV();
} else {
break;
}
}
final Gson gson = new GsonBuilder().create();
return gson.toJson(keyIvPair);
}
protected abstract HostVO findConsoleProxyHost(StartupProxyCommand cmd);
}
| |
package com.lb.multi_touch_placeholder_view;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Rect;
import android.support.v4.view.GestureDetectorCompat;
import android.view.GestureDetector.SimpleOnGestureListener;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.inputmethod.InputMethodManager;
import android.widget.ImageView;
import com.almeros.android.multitouch.BaseGestureDetector;
import com.almeros.android.multitouch.MoveGestureDetector;
import com.almeros.android.multitouch.RotateGestureDetector;
import com.almeros.android.multitouch.ShoveGestureDetector;
public class MultiTouchImageView extends ImageView
{
private final Paint _paint=new Paint();
private final Matrix _movingBitmapMatrix=new Matrix();
private float _scaleFactorX=1f, _scaleFactorY=1f, _rotationDegrees=0.f;
private float _translateX=0.f, _translateY=0.f;
private int _alpha=255, _movingImageHeight, _movingImageWidth;
private Bitmap _movingBitmap;
private float _minScale=0.1f, _maxScale=10f;
private ScaleGestureDetector _scaleDetector;
private BaseGestureDetector _rotateDetector, _moveDetector, _shoveDetector;
private boolean _isScaling;
private boolean _isRotating;
private boolean _isMoving;
private boolean _isShoving;
private boolean _initialized;
private boolean mEnableGestures=true;
private GestureDetectorCompat mGestureDetector;
public MultiTouchImageView(final Context context)
{
super(context);
}
public MultiTouchImageView(final Context context,final android.util.AttributeSet attrs)
{
super(context,attrs);
}
public MultiTouchImageView(final Context context,final android.util.AttributeSet attrs,final int defStyleAttr)
{
super(context,attrs,defStyleAttr);
}
@android.annotation.TargetApi(android.os.Build.VERSION_CODES.LOLLIPOP)
public MultiTouchImageView(final Context context,final android.util.AttributeSet attrs,final int defStyleAttr,final int defStyleRes)
{
super(context,attrs,defStyleAttr,defStyleRes);
}
public void setMinScale(final float minScale)
{
if(_minScale==minScale)
return;
_minScale=minScale;
float oldScaleFactorX=_scaleFactorX, oldScaleFactorY=_scaleFactorY;
_scaleFactorX=Math.max(_minScale,Math.min(_scaleFactorX,_maxScale));
_scaleFactorY=Math.max(_minScale,Math.min(_scaleFactorY,_maxScale));
if(oldScaleFactorX!=_scaleFactorX||oldScaleFactorY!=_scaleFactorY)
onTouch(null);
}
public void setMaxScale(final float maxScale)
{
if(_maxScale==maxScale)
return;
_maxScale=maxScale;
float oldScaleFactorX=_scaleFactorX, oldScaleFactorY=_scaleFactorY;
_scaleFactorX=Math.max(_minScale,Math.min(_scaleFactorX,_maxScale));
_scaleFactorY=Math.max(_minScale,Math.min(_scaleFactorY,_maxScale));
if(oldScaleFactorX!=_scaleFactorX||oldScaleFactorY!=_scaleFactorY)
onTouch(null);
}
// public void setScaleFactor(final float scaleFactor) {
// _scaleFactorX = _scaleFactorY = scaleFactor;
// onTouch(null);
// }
public void setScaleFactor(float scaleFactorX,float scaleFactorY)
{
_scaleFactorX=Math.max(_minScale,Math.min(scaleFactorX,_maxScale));
_scaleFactorY=Math.max(_minScale,Math.min(scaleFactorY,_maxScale));
onTouch(null);
}
public float getScaleFactorX()
{
return _scaleFactorX;
}
public float getScaleFactorY()
{
return _scaleFactorY;
}
public void setTranslate(float translateX,float translateY)
{
_translateX=translateX;
_translateY=translateY;
onTouch(null);
}
public float getTranslateX()
{
return _translateX;
}
public float getTranslateY()
{
return _translateY;
}
// public void setScaleFactor(final float scaleFactor, boolean checkRange) {
// float oldScaleFactor = _scaleFactor;
// _scaleFactor = checkRange ? Math.max(_minScale, Math.min(scaleFactor, _maxScale)) : scaleFactor;
// if (oldScaleFactor != _scaleFactor)
// onTouch(null);
// }
@Override
protected void onAttachedToWindow()
{
super.onAttachedToWindow();
Context appContext=getContext().getApplicationContext();
initGestures(appContext);
}
public void initGestures(final Context appContext)
{
_isMoving=_isRotating=_isScaling=_isShoving=false;
mGestureDetector=new GestureDetectorCompat(appContext,new SimpleOnGestureListener()
{
@Override
public boolean onSingleTapConfirmed(final MotionEvent e)
{
Rect rect=new Rect();
getHitRect(rect);
if(rect.contains((int)e.getX(),(int)e.getY()))
return performClick();
return false;
}
});
_scaleDetector=new ScaleGestureDetector(appContext,new ScaleGestureDetector.SimpleOnScaleGestureListener()
{
@Override
public boolean onScale(final ScaleGestureDetector detector)
{
if(_isShoving||!mEnableGestures)
return true;
_scaleFactorX=Math.max(_minScale,Math.min(_scaleFactorX*detector.getScaleFactor(),_maxScale));
_scaleFactorY=Math.max(_minScale,Math.min(_scaleFactorY*detector.getScaleFactor(),_maxScale));
_isScaling=true;
return true;
}
});
_rotateDetector=new RotateGestureDetector(appContext,new RotateGestureDetector.SimpleOnRotateGestureListener()
{
@Override
public boolean onRotate(RotateGestureDetector detector)
{
if(_isShoving||!mEnableGestures)
return true;
_rotationDegrees-=detector.getRotationDegreesDelta();
_isRotating=true;
return true;
}
});
_moveDetector=new MoveGestureDetector(appContext,new MoveGestureDetector.SimpleOnMoveGestureListener()
{
@Override
public boolean onMove(MoveGestureDetector detector)
{
if(_isShoving||_isScaling||!mEnableGestures)
return true;
android.graphics.PointF d=detector.getFocusDelta();
_translateX+=d.x;
_translateY+=d.y;
_isMoving=true;
return true;
}
});
_shoveDetector=new ShoveGestureDetector(appContext,new ShoveGestureDetector.SimpleOnShoveGestureListener()
{
@Override
public boolean onShove(ShoveGestureDetector detector)
{
if(!mEnableGestures)
return true;
_alpha=Math.min(255,Math.max(0,_alpha+(int)detector.getShovePixelsDelta()));
_isShoving=true;
return true;
}
});
}
public void setRotationDegrees(final float rotationDegrees)
{
this._rotationDegrees=rotationDegrees;
onTouch(null);
}
@Override
protected void onSizeChanged(final int w,final int h,final int oldW,final int oldH)
{
super.onSizeChanged(w,h,oldW,oldH);
//init location and size
if(!_initialized)
{
_initialized=true;
initCenterCrop(w,h);
// initTopLeftNoScale();
onTouch(null);
}
}
// private void initTopLeftNoScale() {
// _scaleFactorX = _scaleFactorY = 1;
// _translateX += (_movingImageWidth * _scaleFactorX) / 2;
// _translateY += (_movingImageHeight * _scaleFactorY) / 2;
// }
public void initCenterCrop(int w,int h)
{
if(w==0||h==0)
return;
_scaleFactorX=_scaleFactorY=(float)Math.max(w,h)/Math.min(_movingImageHeight,_movingImageWidth);
_translateX=w/2; // because we need : (_movingImageWidth * _scaleFactorX) / 2 + (w - _movingImageWidth * _scaleFactorX) / 2;
_translateY=h/2; // because we need : (_movingImageHeight * _scaleFactorY) / 2 + (h - _movingImageHeight * _scaleFactorY) / 2;
}
public boolean onTouch(final MotionEvent event)
{
if(!_initialized)
return true;
if(event!=null)
{
requestFocus();
boolean handledUsingNormalGestureDetector=mGestureDetector.onTouchEvent(event);
if(!handledUsingNormalGestureDetector)
{
_scaleDetector.onTouchEvent(event);
_rotateDetector.onTouchEvent(event);
_shoveDetector.onTouchEvent(event);
_moveDetector.onTouchEvent(event);
}
final int action=event.getAction();
if(action==MotionEvent.ACTION_CANCEL||action==MotionEvent.ACTION_UP)
_isShoving=false;
else if(action==MotionEvent.ACTION_DOWN)
{
requestFocus();
final InputMethodManager imm=(InputMethodManager)getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(getWindowToken(),0);
}
_isMoving=_isRotating=_isScaling=false;
if(handledUsingNormalGestureDetector)
return true;
}
float scaledImageCenterX=(_movingImageWidth*_scaleFactorX)/2;
float scaledImageCenterY=(_movingImageHeight*_scaleFactorY)/2;
_movingBitmapMatrix.reset();
_movingBitmapMatrix.postScale(_scaleFactorX,_scaleFactorY);
_movingBitmapMatrix.postRotate(_rotationDegrees,scaledImageCenterX,scaledImageCenterY);
_movingBitmapMatrix.postTranslate(_translateX-scaledImageCenterX,_translateY-scaledImageCenterY);
_paint.setAlpha(_alpha);
invalidate();
return true;
}
/**
* sets the bitmap to be used for moving,scaling,rotating and alpha changing
*/
public void setMovingBitmap(Bitmap movingBitmap)
{
_movingBitmap=movingBitmap;
boolean bitmapSizeChanged=false;
if(_movingImageHeight!=(_movingImageHeight=movingBitmap.getHeight()))
bitmapSizeChanged=true;
if(_movingImageWidth!=(_movingImageWidth=movingBitmap.getWidth()))
bitmapSizeChanged=true;
if(bitmapSizeChanged)
if(getWidth()!=0&&getHeight()!=0)
initCenterCrop(getWidth(),getHeight());
else onTouch(null);
}
@android.annotation.TargetApi(android.os.Build.VERSION_CODES.HONEYCOMB)
@Override
protected void onDraw(final Canvas canvas)
{
if(_movingBitmap!=null)
canvas.drawBitmap(_movingBitmap,_movingBitmapMatrix,_paint);
super.onDraw(canvas);
}
public void setEnableGestures(final boolean enableGestures)
{
mEnableGestures=enableGestures;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.transport;
import java.io.IOException;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.nio.ByteBuffer;
import java.security.SecureRandom;
import java.util.*;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
import java.util.function.LongConsumer;
import java.util.function.Predicate;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.*;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.handler.codec.MessageToMessageDecoder;
import org.apache.cassandra.auth.AllowAllAuthenticator;
import org.apache.cassandra.auth.AllowAllAuthorizer;
import org.apache.cassandra.auth.AllowAllNetworkAuthorizer;
import org.apache.cassandra.concurrent.NamedThreadFactory;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.config.EncryptionOptions;
import org.apache.cassandra.cql3.QueryProcessor;
import org.apache.cassandra.exceptions.ExceptionCode;
import org.apache.cassandra.exceptions.RequestExecutionException;
import org.apache.cassandra.net.*;
import org.apache.cassandra.net.proxy.InboundProxyHandler;
import org.apache.cassandra.service.NativeTransportService;
import org.apache.cassandra.transport.CQLMessageHandler.MessageConsumer;
import org.apache.cassandra.transport.messages.*;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.concurrent.SimpleCondition;
import static org.apache.cassandra.config.EncryptionOptions.TlsEncryptionPolicy.UNENCRYPTED;
import static org.apache.cassandra.net.FramingTest.randomishBytes;
import static org.apache.cassandra.transport.Flusher.MAX_FRAMED_PAYLOAD_SIZE;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class CQLConnectionTest
{
private static final Logger logger = LoggerFactory.getLogger(CQLConnectionTest.class);
private Random random;
private InetAddress address;
private int port;
private BufferPoolAllocator alloc;
@Before
public void setup()
{
DatabaseDescriptor.toolInitialization();
DatabaseDescriptor.setAuthenticator(new AllowAllAuthenticator());
DatabaseDescriptor.setAuthorizer(new AllowAllAuthorizer());
DatabaseDescriptor.setNetworkAuthorizer(new AllowAllNetworkAuthorizer());
long seed = new SecureRandom().nextLong();
logger.info("seed: {}", seed);
random = new Random(seed);
address = InetAddress.getLoopbackAddress();
try
{
try (ServerSocket serverSocket = new ServerSocket(0))
{
port = serverSocket.getLocalPort();
}
Thread.sleep(250);
}
catch (Exception e)
{
throw new RuntimeException(e);
}
alloc = GlobalBufferPoolAllocator.instance;
// set connection-local queue size to 0 so that all capacity is allocated from reserves
DatabaseDescriptor.setNativeTransportReceiveQueueCapacityInBytes(0);
}
@Test
public void handleErrorDuringNegotiation() throws Throwable
{
int messageCount = 0;
Codec codec = Codec.crc(alloc);
AllocationObserver observer = new AllocationObserver();
InboundProxyHandler.Controller controller = new InboundProxyHandler.Controller();
// Force protocol version to an unsupported version
controller.withPayloadTransform(msg -> {
ByteBuf bb = (ByteBuf)msg;
bb.setByte(0, 99 & Envelope.PROTOCOL_VERSION_MASK);
return msg;
});
ServerConfigurator configurator = ServerConfigurator.builder()
.withAllocationObserver(observer)
.withProxyController(controller)
.build();
Server server = server(configurator);
Client client = new Client(codec, messageCount);
server.start();
client.connect(address, port);
assertFalse(client.isConnected());
assertThat(client.getConnectionError())
.isNotNull()
.matches(message ->
message.error.getMessage()
.equals("Invalid or unsupported protocol version (99); " +
"supported versions are (3/v3, 4/v4, 5/v5-beta)"));
server.stop();
// the failure happens before any capacity is allocated
observer.verifier().accept(0);
}
@Test
public void handleCorruptionAfterNegotiation() throws Throwable
{
// A corrupt messaging frame should terminate the connection as clients
// generally don't track which stream IDs are present in the frame, and the
// server has no way to signal which streams are affected.
// Before closing, the server should send an ErrorMessage to inform the
// client of the corrupt message.
int messageCount = 10;
Codec codec = Codec.crc(alloc);
AllocationObserver observer = new AllocationObserver();
InboundProxyHandler.Controller controller = new InboundProxyHandler.Controller();
ServerConfigurator configurator = ServerConfigurator.builder()
.withAllocationObserver(observer)
.withProxyController(controller)
.build();
Server server = server(configurator);
Client client = new Client(codec, messageCount);
server.start();
client.connect(address, port);
assertTrue(client.isConnected());
// Only install the transform after protocol negotiation is complete
controller.withPayloadTransform(msg -> {
// Corrupt frame
ByteBuf bb = (ByteBuf) msg;
bb.setByte(bb.readableBytes() / 2, 0xffff);
return msg;
});
for (int i=0; i < messageCount; i++)
client.send(randomEnvelope(i, Message.Type.OPTIONS));
client.awaitResponses();
// Client has disconnected
assertFalse(client.isConnected());
// But before it did, it sent an error response
Envelope receieved = client.inboundMessages.poll();
assertNotNull(receieved);
Message.Response response = Message.responseDecoder().decode(client.channel, receieved);
assertEquals(Message.Type.ERROR, response.type);
assertTrue(((ErrorMessage)response).error.getMessage().contains("unrecoverable CRC mismatch detected in frame body"));
// the failure happens before any capacity is allocated
observer.verifier().accept(0);
server.stop();
}
@Test
public void handleCorruptionOfLargeMessage() throws Throwable
{
// A corrupt messaging frame should terminate the connection as clients
// generally don't track which stream IDs are present in the frame, and the
// server has no way to signal which streams are affected.
// Before closing, the server should send an ErrorMessage to inform the
// client of the corrupt message.
// Client needs to expect multiple responses or else awaitResponses returns
// after the error is first received and we race between handling the exception
// caused by remote disconnection and checking the connection status.
int messageCount = 2;
Codec codec = Codec.crc(alloc);
AllocationObserver observer = new AllocationObserver();
InboundProxyHandler.Controller controller = new InboundProxyHandler.Controller();
ServerConfigurator configurator = ServerConfigurator.builder()
.withAllocationObserver(observer)
.withProxyController(controller)
.build();
Server server = server(configurator);
Client client = new Client(codec, messageCount);
server.start();
client.connect(address, port);
assertTrue(client.isConnected());
// Only install the corrupting transform after protocol negotiation is complete
controller.withPayloadTransform(new Function<Object, Object>()
{
// Don't corrupt the first frame as this would fail early and bypass capacity allocation.
// Instead, allow enough bytes to fill the first frame through untouched. Then, corrupt
// a byte which will be in the second frame of the large message .
int seenBytes = 0;
int corruptedByte = 0;
public Object apply(Object o)
{
// If we've already injected some corruption, pass through
if (corruptedByte > 0)
return o;
// Will the current buffer size take us into the second frame? If so, corrupt it
ByteBuf bb = (ByteBuf)o;
if (seenBytes + bb.readableBytes() > MAX_FRAMED_PAYLOAD_SIZE + 100)
{
int frameBoundary = MAX_FRAMED_PAYLOAD_SIZE - seenBytes;
corruptedByte = bb.readerIndex() + frameBoundary + 100;
bb.setByte(corruptedByte, 0xffff);
}
else
{
seenBytes += bb.readableBytes();
}
return bb;
}
});
int totalBytes = MAX_FRAMED_PAYLOAD_SIZE * 2;
client.send(randomEnvelope(0, Message.Type.OPTIONS, totalBytes, totalBytes));
client.awaitResponses();
// Client has disconnected
assertFalse(client.isConnected());
// But before it did, it received an error response
Envelope received = client.inboundMessages.poll();
assertNotNull(received);
Message.Response response = Message.responseDecoder().decode(client.channel, received);
assertEquals(Message.Type.ERROR, response.type);
assertTrue(((ErrorMessage)response).error.getMessage().contains("unrecoverable CRC mismatch detected in frame"));
// total capacity is aquired when the first frame is read
observer.verifier().accept(totalBytes);
}
@Test
public void testAquireAndRelease()
{
acquireAndRelease(10, 100, Codec.crc(alloc));
acquireAndRelease(10, 100, Codec.lz4(alloc));
acquireAndRelease(100, 1000, Codec.crc(alloc));
acquireAndRelease(100, 1000, Codec.lz4(alloc));
acquireAndRelease(1000, 10000, Codec.crc(alloc));
acquireAndRelease(1000, 10000, Codec.lz4(alloc));
}
private void acquireAndRelease(int minMessages, int maxMessages, Codec codec)
{
final int messageCount = minMessages + random.nextInt(maxMessages - minMessages);
logger.info("Sending total of {} messages", messageCount);
TestConsumer consumer = new TestConsumer(new ResultMessage.Void(), codec.encoder);
AllocationObserver observer = new AllocationObserver();
Message.Decoder<Message.Request> decoder = new FixedDecoder();
Predicate<Envelope.Header> responseMatcher = h -> h.type == Message.Type.RESULT;
ServerConfigurator configurator = ServerConfigurator.builder()
.withConsumer(consumer)
.withAllocationObserver(observer)
.withDecoder(decoder)
.build();
runTest(configurator, codec, messageCount, responseMatcher, observer.verifier());
}
@Test
public void testMessageDecodingErrorEncounteredMidFrame()
{
messageDecodingErrorEncounteredMidFrame(10, Codec.crc(alloc));
messageDecodingErrorEncounteredMidFrame(10, Codec.lz4(alloc));
messageDecodingErrorEncounteredMidFrame(100, Codec.crc(alloc));
messageDecodingErrorEncounteredMidFrame(100, Codec.lz4(alloc));
messageDecodingErrorEncounteredMidFrame(1000, Codec.crc(alloc));
messageDecodingErrorEncounteredMidFrame(1000, Codec.lz4(alloc));
}
private void messageDecodingErrorEncounteredMidFrame(int messageCount, Codec codec)
{
final int streamWithError = messageCount / 2;
TestConsumer consumer = new TestConsumer(new ResultMessage.Void(), codec.encoder);
AllocationObserver observer = new AllocationObserver();
Message.Decoder<Message.Request> decoder = new FixedDecoder()
{
Message.Request decode(Channel channel, Envelope source)
{
if (source.header.streamId != streamWithError)
return super.decode(channel, source);
throw new RequestExecutionException(ExceptionCode.SYNTAX_ERROR,
"Error decoding message " + source.header.streamId)
{/*test exception*/};
}
};
Predicate<Envelope.Header> responseMatcher =
h -> (h.streamId == streamWithError && h.type == Message.Type.ERROR) || h.type == Message.Type.RESULT;
ServerConfigurator configurator = ServerConfigurator.builder()
.withConsumer(consumer)
.withAllocationObserver(observer)
.withDecoder(decoder)
.build();
runTest(configurator, codec, messageCount, responseMatcher, observer.verifier());
}
private void runTest(ServerConfigurator configurator,
Codec codec,
int messageCount,
Predicate<Envelope.Header> responseMatcher,
LongConsumer allocationVerifier)
{
Server server = server(configurator);
Client client = new Client(codec, messageCount);
try
{
server.start();
client.connect(address, port);
assertTrue(configurator.waitUntilReady());
for (int i = 0; i < messageCount; i++)
client.send(randomEnvelope(i, Message.Type.OPTIONS));
long totalBytes = client.sendSize;
// verify that all messages went through the pipeline & our test message consumer
client.awaitResponses();
Envelope response;
while ((response = client.pollResponses()) != null)
{
response.release();
assertThat(response.header).matches(responseMatcher);
}
// verify that we did have to acquire some resources from the global/endpoint reserves
allocationVerifier.accept(totalBytes);
}
catch (Throwable t)
{
logger.error("Unexpected error", t);
throw new RuntimeException(t);
}
finally
{
client.stop();
server.stop();
}
}
private Server server(ServerConfigurator configurator)
{
return new Server.Builder().withHost(address)
.withPort(port)
.withPipelineConfigurator(configurator)
.build();
}
private Envelope randomEnvelope(int streamId, Message.Type type)
{
return randomEnvelope(streamId, type, 100, 1024);
}
private Envelope randomEnvelope(int streamId, Message.Type type, int minSize, int maxSize)
{
byte[] bytes = randomishBytes(random, minSize, maxSize);
return Envelope.create(type,
streamId,
ProtocolVersion.V5,
EnumSet.of(Envelope.Header.Flag.USE_BETA),
Unpooled.wrappedBuffer(bytes));
}
// Every CQL Envelope received will be parsed as an OptionsMessage, which is trivial to execute
// on the server. This means we can randomise the actual content of the CQL messages to test
// resource allocation/release (which is based purely on request size), without having to
// worry about processing of the actual messages.
static class FixedDecoder extends Message.Decoder<Message.Request>
{
Message.Request decode(Channel channel, Envelope source)
{
Message.Request request = new OptionsMessage();
request.setSource(source);
request.setStreamId(source.header.streamId);
return request;
}
}
// A simple consumer which "serves" a static response and employs a naive flusher
static class TestConsumer implements MessageConsumer<Message.Request>
{
final Message.Response fixedResponse;
final Envelope responseTemplate;
final FrameEncoder frameEncoder;
SimpleClient.SimpleFlusher flusher;
TestConsumer(Message.Response fixedResponse, FrameEncoder frameEncoder)
{
this.fixedResponse = fixedResponse;
this.responseTemplate = fixedResponse.encode(ProtocolVersion.V5);
this.frameEncoder = frameEncoder;
}
public void accept(Channel channel, Message.Request message, Dispatcher.FlushItemConverter toFlushItem)
{
if (flusher == null)
flusher = new SimpleClient.SimpleFlusher(frameEncoder);
Flusher.FlushItem.Framed item = (Flusher.FlushItem.Framed)toFlushItem.toFlushItem(channel, message, fixedResponse);
Envelope response = Envelope.create(responseTemplate.header.type,
message.getStreamId(),
ProtocolVersion.V5,
responseTemplate.header.flags,
responseTemplate.body.copy());
item.release();
flusher.enqueue(response);
// Schedule the proto-flusher to collate any messages to be served
// and flush them to the outbound pipeline
flusher.schedule(channel.pipeline().lastContext());
}
}
static class ServerConfigurator extends PipelineConfigurator
{
private final SimpleCondition pipelineReady = new SimpleCondition();
private final MessageConsumer<Message.Request> consumer;
private final AllocationObserver allocationObserver;
private final Message.Decoder<Message.Request> decoder;
private final InboundProxyHandler.Controller proxyController;
public ServerConfigurator(Builder builder)
{
super(NativeTransportService.useEpoll(), false, false, UNENCRYPTED);
this.consumer = builder.consumer;
this.decoder = builder.decoder;
this.allocationObserver = builder.observer;
this.proxyController = builder.proxyController;
}
static Builder builder()
{
return new Builder();
}
static class Builder
{
MessageConsumer<Message.Request> consumer;
AllocationObserver observer;
Message.Decoder<Message.Request> decoder;
InboundProxyHandler.Controller proxyController;
Builder withConsumer(MessageConsumer<Message.Request> consumer)
{
this.consumer = consumer;
return this;
}
Builder withDecoder(Message.Decoder<Message.Request> decoder)
{
this.decoder = decoder;
return this;
}
Builder withAllocationObserver(AllocationObserver observer)
{
this.observer = observer;
return this;
}
Builder withProxyController(InboundProxyHandler.Controller proxyController)
{
this.proxyController = proxyController;
return this;
}
ServerConfigurator build()
{
return new ServerConfigurator(this);
}
}
protected Message.Decoder<Message.Request> messageDecoder()
{
return decoder == null ? super.messageDecoder() : decoder;
}
protected void onInitialPipelineReady(ChannelPipeline pipeline)
{
if (proxyController != null)
{
InboundProxyHandler proxy = new InboundProxyHandler(proxyController);
pipeline.addFirst("PROXY", proxy);
}
}
protected void onNegotiationComplete(ChannelPipeline pipeline)
{
pipelineReady.signalAll();
}
private boolean waitUntilReady() throws InterruptedException
{
return pipelineReady.await(10, TimeUnit.SECONDS);
}
protected ClientResourceLimits.ResourceProvider resourceProvider(ClientResourceLimits.Allocator limits)
{
final ClientResourceLimits.ResourceProvider.Default delegate =
new ClientResourceLimits.ResourceProvider.Default(limits);
if (null == allocationObserver)
return delegate;
return new ClientResourceLimits.ResourceProvider()
{
public ResourceLimits.Limit globalLimit()
{
return allocationObserver.global(delegate.globalLimit());
}
public AbstractMessageHandler.WaitQueue globalWaitQueue()
{
return delegate.globalWaitQueue();
}
public ResourceLimits.Limit endpointLimit()
{
return allocationObserver.endpoint(delegate.endpointLimit());
}
public AbstractMessageHandler.WaitQueue endpointWaitQueue()
{
return delegate.endpointWaitQueue();
}
public void release()
{
delegate.release();
}
};
}
protected MessageConsumer<Message.Request> messageConsumer()
{
return consumer == null ? super.messageConsumer() : consumer;
}
}
static class AllocationObserver
{
volatile InstrumentedLimit endpoint;
volatile InstrumentedLimit global;
long endpointAllocationTotal()
{
return endpoint == null ? 0 : endpoint.totalAllocated.get();
}
long endpointReleaseTotal()
{
return endpoint == null ? 0 : endpoint.totalReleased.get();
}
long globalAllocationTotal()
{
return global == null ? 0 : global.totalAllocated.get();
}
long globalReleaseTotal()
{
return global == null ? 0 : global.totalReleased.get();
}
synchronized InstrumentedLimit endpoint(ResourceLimits.Limit delegate)
{
if (endpoint == null)
endpoint = new InstrumentedLimit(delegate);
return endpoint;
}
synchronized InstrumentedLimit global(ResourceLimits.Limit delegate)
{
if (global == null)
global = new InstrumentedLimit(delegate);
return global;
}
LongConsumer verifier()
{
return totalBytes -> {
// verify that we did have to acquire some resources from the global/endpoint reserves
assertThat(endpointAllocationTotal()).isEqualTo(totalBytes);
assertThat(globalAllocationTotal()).isEqualTo(totalBytes);
// and that we released it all
assertThat(endpointReleaseTotal()).isEqualTo(totalBytes);
assertThat(globalReleaseTotal()).isEqualTo(totalBytes);
// assert that we definitely have no outstanding resources acquired from the reserves
ClientResourceLimits.Allocator tracker =
ClientResourceLimits.getAllocatorForEndpoint(FBUtilities.getJustLocalAddress());
assertThat(tracker.endpointUsing()).isEqualTo(0);
assertThat(tracker.globallyUsing()).isEqualTo(0);
};
}
}
static class InstrumentedLimit extends DelegatingLimit
{
AtomicLong totalAllocated = new AtomicLong(0);
AtomicLong totalReleased = new AtomicLong(0);
InstrumentedLimit(ResourceLimits.Limit wrapped)
{
super(wrapped);
}
public boolean tryAllocate(long amount)
{
totalAllocated.addAndGet(amount);
return super.tryAllocate(amount);
}
public ResourceLimits.Outcome release(long amount)
{
totalReleased.addAndGet(amount);
return super.release(amount);
}
}
static class DelegatingLimit implements ResourceLimits.Limit
{
private final ResourceLimits.Limit wrapped;
DelegatingLimit(ResourceLimits.Limit wrapped)
{
this.wrapped = wrapped;
}
public long limit()
{
return wrapped.limit();
}
public long setLimit(long newLimit)
{
return wrapped.setLimit(newLimit);
}
public long remaining()
{
return wrapped.remaining();
}
public long using()
{
return wrapped.using();
}
public boolean tryAllocate(long amount)
{
return wrapped.tryAllocate(amount);
}
public void allocate(long amount)
{
wrapped.allocate(amount);
}
public ResourceLimits.Outcome release(long amount)
{
return wrapped.release(amount);
}
}
static class Codec
{
final FrameEncoder encoder;
final FrameDecoder decoder;
Codec(FrameEncoder encoder, FrameDecoder decoder)
{
this.encoder = encoder;
this.decoder = decoder;
}
static Codec lz4(BufferPoolAllocator alloc)
{
return new Codec(FrameEncoderLZ4.fastInstance, FrameDecoderLZ4.fast(alloc));
}
static Codec crc(BufferPoolAllocator alloc)
{
return new Codec(FrameEncoderCrc.instance, new FrameDecoderCrc(alloc));
}
}
static class Client
{
private final Codec codec;
private Channel channel;
final int expectedResponses;
final CountDownLatch responsesReceived;
private volatile boolean connected = false;
final Queue<Envelope> inboundMessages = new LinkedBlockingQueue<>();
long sendSize = 0;
SimpleClient.SimpleFlusher flusher;
ErrorMessage connectionError;
Throwable disconnectionError;
Client(Codec codec, int expectedResponses)
{
this.codec = codec;
this.expectedResponses = expectedResponses;
this.responsesReceived = new CountDownLatch(expectedResponses);
flusher = new SimpleClient.SimpleFlusher(codec.encoder);
}
private void connect(InetAddress address, int port) throws IOException, InterruptedException
{
final CountDownLatch ready = new CountDownLatch(1);
Bootstrap bootstrap = new Bootstrap()
.group(new NioEventLoopGroup(0, new NamedThreadFactory("TEST-CLIENT")))
.channel(io.netty.channel.socket.nio.NioSocketChannel.class)
.option(ChannelOption.TCP_NODELAY, true);
bootstrap.handler(new ChannelInitializer<Channel>()
{
protected void initChannel(Channel channel) throws Exception
{
BufferPoolAllocator allocator = GlobalBufferPoolAllocator.instance;
channel.config().setOption(ChannelOption.ALLOCATOR, allocator);
ChannelPipeline pipeline = channel.pipeline();
// Outbound handlers to enable us to send the initial STARTUP
pipeline.addLast("envelopeEncoder", Envelope.Encoder.instance);
pipeline.addLast("messageEncoder", PreV5Handlers.ProtocolEncoder.instance);
pipeline.addLast("envelopeDecoder", new Envelope.Decoder());
// Inbound handler to perform the handshake & modify the pipeline on receipt of a READY
pipeline.addLast("handshake", new MessageToMessageDecoder<Envelope>()
{
final Envelope.Decoder decoder = new Envelope.Decoder();
protected void decode(ChannelHandlerContext ctx, Envelope msg, List<Object> out) throws Exception
{
// Handle ERROR responses during initial connection and protocol negotiation
if ( msg.header.type == Message.Type.ERROR)
{
connectionError = (ErrorMessage)Message.responseDecoder()
.decode(ctx.channel(), msg);
msg.release();
logger.info("ERROR");
stop();
ready.countDown();
return;
}
// As soon as we receive a READY message, modify the pipeline
assert msg.header.type == Message.Type.READY;
msg.release();
// just split the messaging into cql messages and stash them for verification
FrameDecoder.FrameProcessor processor = frame -> {
if (frame instanceof FrameDecoder.IntactFrame)
{
ByteBuffer bytes = ((FrameDecoder.IntactFrame)frame).contents.get();
while(bytes.hasRemaining())
{
ByteBuf buffer = Unpooled.wrappedBuffer(bytes);
try
{
inboundMessages.add(decoder.decode(buffer));
responsesReceived.countDown();
}
catch (Exception e)
{
throw new IOException(e);
}
bytes.position(bytes.position() + buffer.readerIndex());
}
}
return true;
};
// for testing purposes, don't actually encode CQL messages,
// we supply messaging frames directly to this client
channel.pipeline().remove("envelopeEncoder");
channel.pipeline().remove("messageEncoder");
channel.pipeline().remove("envelopeDecoder");
// replace this handshake handler with an inbound message frame decoder
channel.pipeline().replace(this, "frameDecoder", codec.decoder);
// add an outbound message frame encoder
channel.pipeline().addLast("frameEncoder", codec.encoder);
channel.pipeline().addLast("errorHandler", new ChannelInboundHandlerAdapter()
{
@Override
public void exceptionCaught(final ChannelHandlerContext ctx, Throwable cause) throws Exception
{
// if the connection is closed finish early as
// we don't want to wait for expected responses
if (cause instanceof IOException)
{
connected = false;
disconnectionError = cause;
int remaining = (int) responsesReceived.getCount();
for (int i=0; i < remaining; i++)
responsesReceived.countDown();
}
}
});
codec.decoder.activate(processor);
connected = true;
// Schedule the proto-flusher to collate any messages that have been
// written, via enqueue(Envelope message), and flush them to the outbound pipeline
flusher.schedule(channel.pipeline().lastContext());
ready.countDown();
}
});
}
});
ChannelFuture future = bootstrap.connect(address, port);
// Wait until the connection attempt succeeds or fails.
channel = future.awaitUninterruptibly().channel();
if (!future.isSuccess())
{
bootstrap.group().shutdownGracefully();
throw new IOException("Connection Error", future.cause());
}
// Send an initial STARTUP message to kick off the handshake with the server
Map<String, String> options = new HashMap<>();
options.put(StartupMessage.CQL_VERSION, QueryProcessor.CQL_VERSION.toString());
if (codec.encoder instanceof FrameEncoderLZ4)
options.put(StartupMessage.COMPRESSION, "LZ4");
Connection connection = new Connection(channel, ProtocolVersion.V5, (ch, connection1) -> {});
channel.attr(Connection.attributeKey).set(connection);
channel.writeAndFlush(new StartupMessage(options)).sync();
if (!ready.await(10, TimeUnit.SECONDS))
throw new RuntimeException("Failed to establish client connection in 10s");
}
void send(Envelope request)
{
flusher.enqueue(request);
sendSize += request.header.bodySizeInBytes;
}
private void awaitResponses() throws InterruptedException
{
if (!responsesReceived.await(10, TimeUnit.SECONDS))
{
fail(String.format("Didn't receive all responses, expected %d, actual %d",
expectedResponses,
inboundMessages.size()));
}
}
private boolean isConnected()
{
return connected;
}
private ErrorMessage getConnectionError()
{
return connectionError;
}
private Envelope pollResponses()
{
return inboundMessages.poll();
}
private void stop()
{
if (channel != null && channel.isOpen())
channel.close().awaitUninterruptibly();
flusher.releaseAll();
Envelope f;
while ((f = inboundMessages.poll()) != null)
f.release();
}
}
}
| |
package edu.berkeley.MetadataRepo;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCursor;
import com.mongodb.MongoClient;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.MongoDatabase;
import org.bson.Document;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import com.mongodb.Mongo;
import com.mongodb.MongoException;
import java.net.UnknownHostException;
import java.util.Calendar;
import java.util.List;
/**
* A class that connects to a MongoDB database and handles all interactions with the database
*/
public class MetadataRepo
{
private static final String TIMESTAMP = "__timestamp__";
private MongoClient mongoClient;
private MongoDatabase database;
public MetadataRepo(String address)
{
mongoClient = new MongoClient(address);
database = mongoClient.getDatabase("MetadataRepo");
}
public void execute(String command)
{
if (command.length() == 0)
return;
String[] cmds = command.split(" ");
String act = cmds[0];
try
{
if (act.equals("commit"))
{
commit(cmds[1], cmds[2], cmds[3], Long.parseLong(cmds[4]));
}
else if (act.equals("dump"))
{
dump();
}
else if (act.equals("show"))
{
show(cmds[1], cmds[2]);
// Note: Should also implement time parameter in 'show', similar to how it is implemented in 'find'
// This will allow the user to query files as far back in time as they wish.
}
else if (act.equals("find"))
{
if (cmds.length == 4)
find(cmds[1], cmds[2], cmds[3]);
else
find(cmds[1], cmds[2], "None");
}
else if (act.equals("clear"))
{
clear(cmds[1]);
}
else
{
System.out.println("Error: Unrecognized command");
}
}
catch (Exception e)
{
System.out.println("Error: Syntax error in command");
System.out.println(e.toString());
}
}
public void dump()
{
for (String namespace : database.listCollectionNames()) {
if (namespace.equals("system.indexes"))
continue;
MongoCollection<Document> collection = database.getCollection(namespace);
System.out.println("=======================================================================");
System.out.println("Namespace: " + namespace);
System.out.println("-----------------------------------------------------------------------");
for (Document d : collection.find())
System.out.println(d.toJson());
System.out.println("=======================================================================");
}
}
public void commit(String namespace, String file, String jsonMetadata, long timestamp)
{
MongoCollection<Document> collection = database.getCollection(namespace);
// Find a document with the given name
Document fdoc = new Document("file", file);
FindIterable<Document> found = collection.find(fdoc);
if (found.iterator().hasNext())
{
// If a document is found, it should be the only one
Document doc = found.iterator().next();
ArrayList<Document> metadataList = (ArrayList<Document>) doc.get("metadata");
Document metadata = Document.parse(jsonMetadata);
metadata.append(TIMESTAMP, new Date(timestamp));
metadataList.add(metadata);
// Updates the metadata
collection.updateOne(fdoc, new Document("$set", new Document("metadata", metadataList)));
}
else
{
// Document not found, creating new document
Document doc = new Document();
doc.append("file", file);
ArrayList<Document> metadataList = new ArrayList<Document>();
Document metadata = Document.parse(jsonMetadata);
metadata.append(TIMESTAMP, new Date(timestamp));
metadataList.add(metadata);
doc.append("metadata", metadataList);
// Inserts the new document to the db
collection.insertOne(doc);
}
System.out.println("Committed '" + file + "' to namespace '" + namespace + "'");
}
public void show(String namespace, String file)
{
MongoCollection<Document> collection = database.getCollection(namespace);
System.out.println("=======================================================================");
System.out.println("Namespace: " + namespace);
System.out.println("-----------------------------------------------------------------------");
/*
* Note: for timestamp ranges, consider:
* Date start = new java.util.Date(2012, 06, 20, 10, 05);
* Date end = new java.util.Date(2012, 06, 20, 10, 30);
*
* BasicDBObject query = new BasicDBObject("Date",
* new BasicDBObject("$gt", start)).
* append("$lte", end) ));
* */
/*
BasicDBObject allQuery = new BasicDBObject();
BasicDBObject fields = new BasicDBObject();
fields.put("file", "a.csv");
FindIterable<Document> found = collection.find(fields);
int count = 0;
while (found.iterator().hasNext() && count < 10) {
count++;
System.out.println(found.iterator().next());
}
*/
/*
String time = "01/30/1992";
long startTime = 0;
long endTime = 0;
Date date;
SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yy");
try {
date = sdf.parse(time);
} catch (ParseException e) {
System.out.println("Time should be in MM/dd/yy format.");
return;
}
startTime = date.getTime();
endTime = startTime + 86400000;
BasicDBObject query;
query = new BasicDBObject("metadata", new BasicDBObject("$elemMatch",
new BasicDBObject(TIMESTAMP, new BasicDBObject("$gt", new Date(startTime))
.append("$lte", new Date(endTime))) ));
FindIterable<Document> cursor = collection.find(query);
MongoCursor k = cursor.iterator();
*/
/*
Date start = new Date(2015, 03, 20, 10, 05);
Date end = new Date(2015, 06, 20, 10, 30);
BasicDBObject dateQuery = new BasicDBObject("Date",
new BasicDBObject("$gt", start)).
append("$lte", end);
BasicDBObject query = new BasicDBObject();
//query.put("metadata.__timestamp__", new BasicDBObject("$eq", "Tue Apr 28 01:36:39 PDT 2015"));
query.put("metadata.__timestamp__", dateQuery);
FindIterable<Document> found = collection.find(query);
int count = 0;
while (found.iterator().hasNext() && count < 4) {
count++;
System.out.println(found.iterator().next());
}
*/
// Find a document with the given name
Document fdoc = new Document("file", file);
FindIterable<Document> found = collection.find(fdoc); // Returns "a CURSOR to the documents that match the query criteria"
if (found.iterator().hasNext()) {
System.out.println(found.iterator().next());
}
// Note: also consider using collection.findOne, which will return "only the first result
// from the result set, and not a MongoCursor that can be iterated over"
// if (found.iterator().hasNext()) {
// Comment from Enrico:
// "You can query mongodb to return only one set of metadata,
// like for example, only the most recent metadata.
// Right now, you make mongodb return ALL metadata,
// and then you are taking the last one only, which is not efficient."
// If a document is found, it should be the only one
// Document doc = found.iterator().next();
// ArrayList<Document> metadataList = (ArrayList<Document>) doc.get("metadata");
/*
// This section of code would show ALL metadata commits ever
System.out.println("Metadata for " + file + ":");
for (Document d : metadataList)
System.out.println(d.toJson());
*/
// System.out.println("Most recent metadata for " + file + ":");
// Get size of metadata array (last element should be the most up-to-date entry)
// int currMetadataIndex = metadataList.size() - 1;
// System.out.println(metadataList.get(currMetadataIndex).toJson());
// }
// else {
// System.out.println("No file with the name: " + file + ".");
// }
System.out.println("=======================================================================");
}
/**
* Assumes the metadata is at most one degree nested
* */
public void find(String namespace, String keyword, String time)
{
boolean checkTime = false;
long startTime = 0;
long endTime = 0;
Date date;
SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yy");
if (!time.equals("None")) {
checkTime = true;
try {
date = sdf.parse(time);
} catch (ParseException e) {
System.out.println("Time should be in MM/dd/yy format.");
return;
}
startTime = date.getTime();
endTime = startTime + 86400000;
}
String[] temp = keyword.split("=");
MongoCollection<Document> collection = database.getCollection(namespace);
BasicDBObject query;
String fileName;
int count = 0;
long compTime = 0;
MongoCursor k;
if (!checkTime) {
if (temp[1].equals("*")) {
query = new BasicDBObject("metadata", new BasicDBObject("$elemMatch",
new BasicDBObject(temp[0], new BasicDBObject("$exists",true))));
} else {
query = new BasicDBObject("metadata", new BasicDBObject("$elemMatch",
new BasicDBObject(temp[0], temp[1])));
}
FindIterable<Document> cursor = collection.find(query);
k = cursor.iterator();
} else {
if (temp[1].equals("*")) {
query = new BasicDBObject("metadata", new BasicDBObject("$elemMatch",
new BasicDBObject(TIMESTAMP, new BasicDBObject("$gt", new Date(startTime))
.append("$lte", new Date(endTime))).append(temp[0], new BasicDBObject("$exists", true))));
} else {
query = new BasicDBObject("metadata", new BasicDBObject("$elemMatch",
new BasicDBObject(TIMESTAMP, new BasicDBObject("$gt", new Date(startTime))
.append("$lte", new Date(endTime))).append(temp[0], temp[1])));
}
FindIterable<Document> cursor = collection.find(query);
k = cursor.iterator();
}
System.out.println("=======================================================================");
System.out.println("Namespace: " + namespace);
System.out.println("-----------------------------------------------------------------------");
while (k.hasNext()) {
Document d = (Document) k.next();
ArrayList<Document> metadataList = (ArrayList<Document>) d.get("metadata");
fileName = (String) d.get("file");
boolean firstT = false;
for (int i = 0; i < metadataList.size(); i++) {
if (metadataList.get(i).get(temp[0]) != null) {
if (temp[1].equals("*") || metadataList.get(i).get(temp[0]).equals(temp[1])
|| (metadataList.get(i).get(temp[0]).getClass() == ArrayList.class
&& ((ArrayList <String>) metadataList.get(i).get(temp[0])).contains(temp[1])) ) {
compTime = ((Date) metadataList.get(i).get(TIMESTAMP)).getTime();
if ( checkTime && (compTime <= endTime && compTime > startTime) || !checkTime) {
if (!firstT) {
firstT = true;
System.out.println("In file " + fileName + ":");
}
System.out.println(metadataList.get(i).toJson());
count++;
}
}
}
}
}
System.out.println(count + " records found.");
System.out.println("=======================================================================");
}
public void clear(String namespace)
{
MongoCollection<Document> collection = database.getCollection(namespace);
collection.drop();
System.out.println("Repo " + namespace + " has been cleared");
}
}
| |
/*
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: blah@cliffano.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package com.cliffano.swaggyjenkins.model;
import java.util.Objects;
import java.util.Arrays;
import com.cliffano.swaggyjenkins.model.CauseAction;
import com.cliffano.swaggyjenkins.model.FreeStyleBuild;
import com.cliffano.swaggyjenkins.model.FreeStyleProject;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* QueueLeftItem
*/
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", date = "2022-02-13T02:15:08.056011Z[Etc/UTC]")
public class QueueLeftItem {
public static final String SERIALIZED_NAME_PROPERTY_CLASS = "_class";
@SerializedName(SERIALIZED_NAME_PROPERTY_CLASS)
private String propertyClass;
public static final String SERIALIZED_NAME_ACTIONS = "actions";
@SerializedName(SERIALIZED_NAME_ACTIONS)
private List<CauseAction> actions = null;
public static final String SERIALIZED_NAME_BLOCKED = "blocked";
@SerializedName(SERIALIZED_NAME_BLOCKED)
private Boolean blocked;
public static final String SERIALIZED_NAME_BUILDABLE = "buildable";
@SerializedName(SERIALIZED_NAME_BUILDABLE)
private Boolean buildable;
public static final String SERIALIZED_NAME_ID = "id";
@SerializedName(SERIALIZED_NAME_ID)
private Integer id;
public static final String SERIALIZED_NAME_IN_QUEUE_SINCE = "inQueueSince";
@SerializedName(SERIALIZED_NAME_IN_QUEUE_SINCE)
private Integer inQueueSince;
public static final String SERIALIZED_NAME_PARAMS = "params";
@SerializedName(SERIALIZED_NAME_PARAMS)
private String params;
public static final String SERIALIZED_NAME_STUCK = "stuck";
@SerializedName(SERIALIZED_NAME_STUCK)
private Boolean stuck;
public static final String SERIALIZED_NAME_TASK = "task";
@SerializedName(SERIALIZED_NAME_TASK)
private FreeStyleProject task;
public static final String SERIALIZED_NAME_URL = "url";
@SerializedName(SERIALIZED_NAME_URL)
private String url;
public static final String SERIALIZED_NAME_WHY = "why";
@SerializedName(SERIALIZED_NAME_WHY)
private String why;
public static final String SERIALIZED_NAME_CANCELLED = "cancelled";
@SerializedName(SERIALIZED_NAME_CANCELLED)
private Boolean cancelled;
public static final String SERIALIZED_NAME_EXECUTABLE = "executable";
@SerializedName(SERIALIZED_NAME_EXECUTABLE)
private FreeStyleBuild executable;
public QueueLeftItem() {
}
public QueueLeftItem propertyClass(String propertyClass) {
this.propertyClass = propertyClass;
return this;
}
/**
* Get propertyClass
* @return propertyClass
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getPropertyClass() {
return propertyClass;
}
public void setPropertyClass(String propertyClass) {
this.propertyClass = propertyClass;
}
public QueueLeftItem actions(List<CauseAction> actions) {
this.actions = actions;
return this;
}
public QueueLeftItem addActionsItem(CauseAction actionsItem) {
if (this.actions == null) {
this.actions = new ArrayList<CauseAction>();
}
this.actions.add(actionsItem);
return this;
}
/**
* Get actions
* @return actions
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public List<CauseAction> getActions() {
return actions;
}
public void setActions(List<CauseAction> actions) {
this.actions = actions;
}
public QueueLeftItem blocked(Boolean blocked) {
this.blocked = blocked;
return this;
}
/**
* Get blocked
* @return blocked
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Boolean getBlocked() {
return blocked;
}
public void setBlocked(Boolean blocked) {
this.blocked = blocked;
}
public QueueLeftItem buildable(Boolean buildable) {
this.buildable = buildable;
return this;
}
/**
* Get buildable
* @return buildable
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Boolean getBuildable() {
return buildable;
}
public void setBuildable(Boolean buildable) {
this.buildable = buildable;
}
public QueueLeftItem id(Integer id) {
this.id = id;
return this;
}
/**
* Get id
* @return id
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public QueueLeftItem inQueueSince(Integer inQueueSince) {
this.inQueueSince = inQueueSince;
return this;
}
/**
* Get inQueueSince
* @return inQueueSince
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Integer getInQueueSince() {
return inQueueSince;
}
public void setInQueueSince(Integer inQueueSince) {
this.inQueueSince = inQueueSince;
}
public QueueLeftItem params(String params) {
this.params = params;
return this;
}
/**
* Get params
* @return params
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getParams() {
return params;
}
public void setParams(String params) {
this.params = params;
}
public QueueLeftItem stuck(Boolean stuck) {
this.stuck = stuck;
return this;
}
/**
* Get stuck
* @return stuck
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Boolean getStuck() {
return stuck;
}
public void setStuck(Boolean stuck) {
this.stuck = stuck;
}
public QueueLeftItem task(FreeStyleProject task) {
this.task = task;
return this;
}
/**
* Get task
* @return task
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public FreeStyleProject getTask() {
return task;
}
public void setTask(FreeStyleProject task) {
this.task = task;
}
public QueueLeftItem url(String url) {
this.url = url;
return this;
}
/**
* Get url
* @return url
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public QueueLeftItem why(String why) {
this.why = why;
return this;
}
/**
* Get why
* @return why
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getWhy() {
return why;
}
public void setWhy(String why) {
this.why = why;
}
public QueueLeftItem cancelled(Boolean cancelled) {
this.cancelled = cancelled;
return this;
}
/**
* Get cancelled
* @return cancelled
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Boolean getCancelled() {
return cancelled;
}
public void setCancelled(Boolean cancelled) {
this.cancelled = cancelled;
}
public QueueLeftItem executable(FreeStyleBuild executable) {
this.executable = executable;
return this;
}
/**
* Get executable
* @return executable
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public FreeStyleBuild getExecutable() {
return executable;
}
public void setExecutable(FreeStyleBuild executable) {
this.executable = executable;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
QueueLeftItem queueLeftItem = (QueueLeftItem) o;
return Objects.equals(this.propertyClass, queueLeftItem.propertyClass) &&
Objects.equals(this.actions, queueLeftItem.actions) &&
Objects.equals(this.blocked, queueLeftItem.blocked) &&
Objects.equals(this.buildable, queueLeftItem.buildable) &&
Objects.equals(this.id, queueLeftItem.id) &&
Objects.equals(this.inQueueSince, queueLeftItem.inQueueSince) &&
Objects.equals(this.params, queueLeftItem.params) &&
Objects.equals(this.stuck, queueLeftItem.stuck) &&
Objects.equals(this.task, queueLeftItem.task) &&
Objects.equals(this.url, queueLeftItem.url) &&
Objects.equals(this.why, queueLeftItem.why) &&
Objects.equals(this.cancelled, queueLeftItem.cancelled) &&
Objects.equals(this.executable, queueLeftItem.executable);
}
@Override
public int hashCode() {
return Objects.hash(propertyClass, actions, blocked, buildable, id, inQueueSince, params, stuck, task, url, why, cancelled, executable);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class QueueLeftItem {\n");
sb.append(" propertyClass: ").append(toIndentedString(propertyClass)).append("\n");
sb.append(" actions: ").append(toIndentedString(actions)).append("\n");
sb.append(" blocked: ").append(toIndentedString(blocked)).append("\n");
sb.append(" buildable: ").append(toIndentedString(buildable)).append("\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" inQueueSince: ").append(toIndentedString(inQueueSince)).append("\n");
sb.append(" params: ").append(toIndentedString(params)).append("\n");
sb.append(" stuck: ").append(toIndentedString(stuck)).append("\n");
sb.append(" task: ").append(toIndentedString(task)).append("\n");
sb.append(" url: ").append(toIndentedString(url)).append("\n");
sb.append(" why: ").append(toIndentedString(why)).append("\n");
sb.append(" cancelled: ").append(toIndentedString(cancelled)).append("\n");
sb.append(" executable: ").append(toIndentedString(executable)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.server;
import org.apache.curator.framework.api.ACLProvider;
import org.apache.drill.common.AutoCloseables;
import org.apache.drill.common.StackTrace;
import org.apache.drill.common.concurrent.ExtendedLatch;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.map.CaseInsensitiveMap;
import org.apache.drill.common.scanner.ClassPathScanner;
import org.apache.drill.common.scanner.persistence.ScanResult;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.coord.ClusterCoordinator;
import org.apache.drill.exec.coord.ClusterCoordinator.RegistrationHandle;
import org.apache.drill.exec.coord.zk.ZKACLProviderFactory;
import org.apache.drill.exec.coord.zk.ZKClusterCoordinator;
import org.apache.drill.exec.exception.DrillbitStartupException;
import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint.State;
import org.apache.drill.exec.server.DrillbitStateManager.DrillbitState;
import org.apache.drill.exec.server.options.OptionDefinition;
import org.apache.drill.exec.server.options.OptionValue;
import org.apache.drill.exec.server.options.OptionValue.OptionScope;
import org.apache.drill.exec.server.options.SystemOptionManager;
import org.apache.drill.exec.server.rest.WebServer;
import org.apache.drill.exec.service.ServiceEngine;
import org.apache.drill.exec.store.StoragePluginRegistry;
import org.apache.drill.exec.store.sys.PersistentStoreProvider;
import org.apache.drill.exec.store.sys.PersistentStoreRegistry;
import org.apache.drill.exec.store.sys.store.provider.CachingPersistentStoreProvider;
import org.apache.drill.exec.store.sys.store.provider.InMemoryStoreProvider;
import org.apache.drill.exec.store.sys.store.provider.LocalPersistentStoreProvider;
import org.apache.drill.common.util.GuavaPatcher;
import org.apache.drill.common.util.ProtobufPatcher;
import org.apache.drill.exec.work.WorkManager;
import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTesting;
import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch;
import org.apache.zookeeper.Environment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.bridge.SLF4JBridgeHandler;
import javax.tools.ToolProvider;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardWatchEventKinds;
import java.nio.file.WatchEvent;
import java.nio.file.WatchKey;
import java.nio.file.WatchService;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Starts, tracks and stops all the required services for a Drillbit daemon to work.
*/
public class Drillbit implements AutoCloseable {
private static final Logger logger = LoggerFactory.getLogger(Drillbit.class);
static {
/*
* HBase and MapR-DB clients use older version of protobuf,
* and override some methods that became final in recent versions.
* This code removes these final modifiers.
*/
ProtobufPatcher.patch();
/*
* HBase client uses older version of Guava's Stopwatch API,
* while Drill ships with 18.x which has changes the scope of
* these API to 'package', this code make them accessible.
*/
GuavaPatcher.patch();
Environment.logEnv("Drillbit environment: ", logger);
// Jersey uses java.util.logging - create bridge: jul to slf4j
SLF4JBridgeHandler.removeHandlersForRootLogger();
SLF4JBridgeHandler.install();
}
public final static String SYSTEM_OPTIONS_NAME = "org.apache.drill.exec.server.Drillbit.system_options";
private final ClusterCoordinator coord;
private final ServiceEngine engine;
private final PersistentStoreProvider storeProvider;
private final WorkManager manager;
private final BootStrapContext context;
private final WebServer webServer;
private final int gracePeriod;
private DrillbitStateManager stateManager;
private GracefulShutdownThread gracefulShutdownThread;
private Thread shutdownHook;
private volatile boolean quiescentMode;
private volatile boolean forcefulShutdown;
private volatile boolean interruptPollShutdown = true;
public void setQuiescentMode(boolean quiescentMode) {
this.quiescentMode = quiescentMode;
}
public void setForcefulShutdown(boolean forcefulShutdown) {
this.forcefulShutdown = forcefulShutdown;
}
public RegistrationHandle getRegistrationHandle() {
return registrationHandle;
}
private RegistrationHandle registrationHandle;
private volatile StoragePluginRegistry storageRegistry;
private final PersistentStoreProvider profileStoreProvider;
@VisibleForTesting
public Drillbit(
final DrillConfig config,
final RemoteServiceSet serviceSet) throws Exception {
this(config, SystemOptionManager.createDefaultOptionDefinitions(), serviceSet, ClassPathScanner.fromPrescan(config));
}
@VisibleForTesting
public Drillbit(
final DrillConfig config,
final CaseInsensitiveMap<OptionDefinition> definitions,
final RemoteServiceSet serviceSet) throws Exception {
this(config, definitions, serviceSet, ClassPathScanner.fromPrescan(config));
}
public Drillbit(
final DrillConfig config,
final RemoteServiceSet serviceSet,
final ScanResult classpathScan) throws Exception {
this(config, SystemOptionManager.createDefaultOptionDefinitions(), serviceSet, classpathScan);
}
@VisibleForTesting
public Drillbit(
final DrillConfig config,
final CaseInsensitiveMap<OptionDefinition> definitions,
final RemoteServiceSet serviceSet,
final ScanResult classpathScan) throws Exception {
//Must start up with access to JDK Compiler
if (ToolProvider.getSystemJavaCompiler() == null) {
throw new DrillbitStartupException("JDK Java compiler not available. Ensure Drill is running with the java executable from a JDK and not a JRE");
}
gracePeriod = config.getInt(ExecConstants.GRACE_PERIOD);
final Stopwatch w = Stopwatch.createStarted();
logger.debug("Construction started.");
boolean drillPortHunt = config.getBoolean(ExecConstants.DRILL_PORT_HUNT);
boolean bindToLoopbackAddress = config.getBoolean(ExecConstants.ALLOW_LOOPBACK_ADDRESS_BINDING);
final boolean allowPortHunting = (serviceSet != null) || drillPortHunt;
context = new BootStrapContext(config, definitions, classpathScan);
manager = new WorkManager(context);
webServer = new WebServer(context, manager, this);
boolean isDistributedMode = (serviceSet == null) && !bindToLoopbackAddress;
if (serviceSet != null) {
coord = serviceSet.getCoordinator();
storeProvider = new CachingPersistentStoreProvider(new LocalPersistentStoreProvider(config));
} else {
String clusterId = config.getString(ExecConstants.SERVICE_NAME);
String zkRoot = config.getString(ExecConstants.ZK_ROOT);
String drillClusterPath = "/" + zkRoot + "/" + clusterId;
ACLProvider aclProvider = ZKACLProviderFactory.getACLProvider(config, drillClusterPath, context);
coord = new ZKClusterCoordinator(config, aclProvider);
storeProvider = new PersistentStoreRegistry<>(this.coord, config).newPStoreProvider();
}
//Check if InMemory Profile Store, else use Default Store Provider
if (config.getBoolean(ExecConstants.PROFILES_STORE_INMEMORY)) {
profileStoreProvider = new InMemoryStoreProvider(config.getInt(ExecConstants.PROFILES_STORE_CAPACITY));
logger.info("Upto {} latest query profiles will be retained in-memory", config.getInt(ExecConstants.PROFILES_STORE_CAPACITY));
} else {
profileStoreProvider = storeProvider;
}
engine = new ServiceEngine(manager, context, allowPortHunting, isDistributedMode);
stateManager = new DrillbitStateManager(DrillbitState.STARTUP);
logger.info("Construction completed ({} ms).", w.elapsed(TimeUnit.MILLISECONDS));
}
public int getUserPort() {
return engine.getUserPort();
}
public int getWebServerPort() {
return webServer.getPort();
}
public void run() throws Exception {
final Stopwatch w = Stopwatch.createStarted();
logger.debug("Startup begun.");
gracefulShutdownThread = new GracefulShutdownThread(this, new StackTrace());
coord.start(10000);
stateManager.setState(DrillbitState.ONLINE);
storeProvider.start();
if (profileStoreProvider != storeProvider) {
profileStoreProvider.start();
}
DrillbitEndpoint md = engine.start();
manager.start(md, engine.getController(), engine.getDataConnectionCreator(), coord, storeProvider, profileStoreProvider);
final DrillbitContext drillbitContext = manager.getContext();
storageRegistry = drillbitContext.getStorage();
storageRegistry.init();
drillbitContext.getOptionManager().init();
javaPropertiesToSystemOptions();
manager.getContext().getRemoteFunctionRegistry().init(context.getConfig(), storeProvider, coord);
webServer.start();
//Discovering HTTP port (in case of port hunting)
if (webServer.isRunning()) {
int httpPort = getWebServerPort();
md = md.toBuilder().setHttpPort(httpPort).build();
}
registrationHandle = coord.register(md);
// Must start the RM after the above since it needs to read system options.
drillbitContext.startRM();
shutdownHook = new ShutdownThread(this, new StackTrace());
Runtime.getRuntime().addShutdownHook(shutdownHook);
gracefulShutdownThread.start();
logger.info("Startup completed ({} ms).", w.elapsed(TimeUnit.MILLISECONDS));
}
/*
Wait uninterruptibly
*/
private void waitForGracePeriod() {
ExtendedLatch exitLatch = new ExtendedLatch();
exitLatch.awaitUninterruptibly(gracePeriod);
}
private void updateState(State state) {
if (registrationHandle != null) {
coord.update(registrationHandle, state);
}
}
/*
*/
public void shutdown() {
this.close();
}
/*
The drillbit is moved into Quiescent state and the drillbit waits for grace period amount of time.
Then drillbit moves into draining state and waits for all the queries and fragments to complete.
*/
@Override
public synchronized void close() {
if (!stateManager.getState().equals(DrillbitState.ONLINE)) {
return;
}
final Stopwatch w = Stopwatch.createStarted();
logger.debug("Shutdown begun.");
// We don't really want for Drillbits to pile up in memory, so the hook should be removed
// It might be better to use PhantomReferences to cleanup as soon as Drillbit becomes
// unreachable, however current approach seems to be good enough.
Thread shutdownHook = this.shutdownHook;
if (shutdownHook != null && Thread.currentThread() != shutdownHook) {
try {
Runtime.getRuntime().removeShutdownHook(shutdownHook);
} catch (IllegalArgumentException e) {
// If shutdown is in progress, just ignore the removal
}
}
updateState(State.QUIESCENT);
stateManager.setState(DrillbitState.GRACE);
waitForGracePeriod();
stateManager.setState(DrillbitState.DRAINING);
// wait for all the in-flight queries to finish
manager.waitToExit(forcefulShutdown);
//safe to exit
updateState(State.OFFLINE);
stateManager.setState(DrillbitState.OFFLINE);
if (quiescentMode) {
return;
}
if (coord != null && registrationHandle != null) {
coord.unregister(registrationHandle);
}
try {
Thread.sleep(context.getConfig().getInt(ExecConstants.ZK_REFRESH) * 2);
} catch (final InterruptedException e) {
logger.warn("Interrupted while sleeping during coordination deregistration.");
// Preserve evidence that the interruption occurred so that code higher up on the call stack can learn of the
// interruption and respond to it if it wants to.
Thread.currentThread().interrupt();
}
try {
AutoCloseables.close(
webServer,
engine,
storeProvider,
coord,
manager,
storageRegistry,
context);
//Closing the profile store provider if distinct
if (storeProvider != profileStoreProvider) {
AutoCloseables.close(profileStoreProvider);
}
} catch(Exception e) {
logger.warn("Failure on close()", e);
}
logger.info("Shutdown completed ({} ms).", w.elapsed(TimeUnit.MILLISECONDS) );
stateManager.setState(DrillbitState.SHUTDOWN);
// Interrupt GracefulShutdownThread since Drillbit close is not called from it.
if (interruptPollShutdown) {
gracefulShutdownThread.interrupt();
}
}
private void javaPropertiesToSystemOptions() {
// get the system options property
final String allSystemProps = System.getProperty(SYSTEM_OPTIONS_NAME);
if ((allSystemProps == null) || allSystemProps.isEmpty()) {
return;
}
final SystemOptionManager optionManager = getContext().getOptionManager();
// parse out the properties, validate, and then set them
final String[] systemProps = allSystemProps.split(",");
for (final String systemProp : systemProps) {
final String[] keyValue = systemProp.split("=");
if (keyValue.length != 2) {
throwInvalidSystemOption(systemProp, "does not contain a key=value assignment");
}
final String optionName = keyValue[0].trim();
if (optionName.isEmpty()) {
throwInvalidSystemOption(systemProp, "does not contain a key before the assignment");
}
final String optionString = stripQuotes(keyValue[1].trim(), systemProp);
if (optionString.isEmpty()) {
throwInvalidSystemOption(systemProp, "does not contain a value after the assignment");
}
final OptionValue defaultValue = optionManager.getOption(optionName);
if (defaultValue == null) {
throwInvalidSystemOption(systemProp, "does not specify a valid option name");
}
if (!defaultValue.accessibleScopes.inScopeOf(OptionScope.SYSTEM)) {
throwInvalidSystemOption(systemProp, "does not specify a SYSTEM option ");
}
optionManager.setLocalOption(defaultValue.kind, optionName, optionString);
}
}
/**
* Polls for graceful file to check if graceful shutdown is triggered from the script.
*/
private static class GracefulShutdownThread extends Thread {
private static final String DRILL_HOME = "DRILL_HOME";
private static final String GRACEFUL_SIGFILE = "GRACEFUL_SIGFILE";
private static final String NOT_SUPPORTED_MESSAGE = "Graceful shutdown from command line will not be supported.";
private final Drillbit drillbit;
private final StackTrace stackTrace;
GracefulShutdownThread(Drillbit drillbit, StackTrace stackTrace) {
this.drillbit = drillbit;
this.stackTrace = stackTrace;
setName("Drillbit-Graceful-Shutdown#" + getName());
}
@Override
public void run () {
try {
pollShutdown();
} catch (InterruptedException e) {
drillbit.interruptPollShutdown = false;
logger.debug("Graceful Shutdown thread was interrupted", e);
} catch (IOException e) {
throw new RuntimeException("Exception while polling for graceful shutdown\n" + stackTrace, e);
}
}
/**
* Poll for the graceful file, if the file is found or modified, close the Drillbit.
* In case if the {@link #DRILL_HOME} or {@link #GRACEFUL_SIGFILE} environment variables are not set,
* graceful shutdown will not be supported from the command line.
*/
private void pollShutdown() throws IOException, InterruptedException {
Path drillHomePath = getDrillHomePath();
String gracefulFile = System.getenv(GRACEFUL_SIGFILE);
if (!areShutdownArgsValid(drillHomePath, gracefulFile)) {
return;
}
while (true) {
try (WatchService watchService = Objects.requireNonNull(drillHomePath).getFileSystem().newWatchService()) {
drillHomePath.register(watchService, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY);
while (true) {
WatchKey watchKey = watchService.take();
for (WatchEvent<?> event : watchKey.pollEvents()) {
if (StandardWatchEventKinds.OVERFLOW != event.kind()) {
Path changedPath = (Path) event.context();
if (changedPath != null && changedPath.endsWith(gracefulFile)) {
drillbit.interruptPollShutdown = false;
drillbit.close();
return;
}
}
}
if (!watchKey.reset()) {
logger.debug("Watch key is no longer valid, attempting to re-register Drill home path in Watch Service");
break;
}
}
}
}
}
private Path getDrillHomePath() {
String drillHome = System.getenv(DRILL_HOME);
try {
return drillHome == null ? null : Paths.get(drillHome);
} catch (InvalidPathException e) {
logger.warn("Unable to construct {} path [{}]: {}.", DRILL_HOME, drillHome, e.getMessage());
logger.debug("Invalid {} path", DRILL_HOME, e);
return null;
}
}
private boolean areShutdownArgsValid(Path drillHomePath, String gracefulFile) {
StringBuilder builder = new StringBuilder();
if (drillHomePath == null) {
builder.append(DRILL_HOME).append(" is unset or invalid.");
} else {
if (!Files.exists(drillHomePath)) {
builder.append(DRILL_HOME).append("path [").append(drillHomePath).append("] does not exist.");
}
}
if (gracefulFile == null) {
if (builder.length() != 0) {
builder.append(" ");
}
builder.append(GRACEFUL_SIGFILE).append(" is unset.");
}
if (builder.length() != 0) {
logger.warn("{}. {}", NOT_SUPPORTED_MESSAGE, builder.toString());
return false;
}
return true;
}
}
/**
* Shutdown hook for Drillbit. Closes the drillbit, and reports on errors that
* occur during closure, as well as the location the drillbit was started from.
*/
private static class ShutdownThread extends Thread {
private final static AtomicInteger idCounter = new AtomicInteger(0);
private final Drillbit drillbit;
private final StackTrace stackTrace;
/**
* Constructor.
*
* @param drillbit the drillbit to close down
* @param stackTrace the stack trace from where the Drillbit was started;
* use new StackTrace() to generate this
*/
ShutdownThread(final Drillbit drillbit, final StackTrace stackTrace) {
this.drillbit = drillbit;
this.stackTrace = stackTrace;
/*
* TODO should we try to determine a test class name?
* See https://blogs.oracle.com/tor/entry/how_to_determine_the_junit
*/
setName("Drillbit-ShutdownHook#" + idCounter.getAndIncrement());
}
@Override
public void run() {
if (FailureUtils.hadUnrecoverableFailure()) {
// We cannot close drill cleanly in this case.
return;
}
logger.info("Received shutdown request.");
try {
/*
* We can avoid metrics deregistration concurrency issues by only closing
* one drillbit at a time. To enforce that, we synchronize on a convenient
* singleton object.
*/
synchronized(idCounter) {
drillbit.close();
}
} catch(final Exception e) {
throw new RuntimeException("Caught exception closing Drillbit started from\n" + stackTrace, e);
}
}
}
public DrillbitContext getContext() {
return manager.getContext();
}
@VisibleForTesting
public GracefulShutdownThread getGracefulShutdownThread() {
return gracefulShutdownThread;
}
public static void main(final String[] cli) throws DrillbitStartupException {
final StartupOptions options = StartupOptions.parse(cli);
start(options);
}
public static Drillbit start(final StartupOptions options) throws DrillbitStartupException {
return start(DrillConfig.create(options.getConfigLocation()), SystemOptionManager.createDefaultOptionDefinitions(), null);
}
public static Drillbit start(final DrillConfig config) throws DrillbitStartupException {
return start(config, SystemOptionManager.createDefaultOptionDefinitions(), null);
}
public static Drillbit start(final DrillConfig config, final RemoteServiceSet remoteServiceSet) throws DrillbitStartupException {
return start(config, SystemOptionManager.createDefaultOptionDefinitions(), remoteServiceSet);
}
@VisibleForTesting
public static Drillbit start(final DrillConfig config, final CaseInsensitiveMap<OptionDefinition> validators,
final RemoteServiceSet remoteServiceSet)
throws DrillbitStartupException {
logger.debug("Starting new Drillbit.");
// TODO: allow passing as a parameter
ScanResult classpathScan = ClassPathScanner.fromPrescan(config);
Drillbit bit;
try {
bit = new Drillbit(config, validators, remoteServiceSet, classpathScan);
} catch (final Exception ex) {
if (ex instanceof DrillbitStartupException) {
throw (DrillbitStartupException) ex;
} else {
throw new DrillbitStartupException("Failure while initializing values in Drillbit.", ex);
}
}
try {
bit.run();
} catch (final Exception e) {
logger.error("Failure during initial startup of Drillbit.", e);
bit.close();
throw new DrillbitStartupException("Failure during initial startup of Drillbit.", e);
}
logger.debug("Started new Drillbit.");
return bit;
}
private static void throwInvalidSystemOption(final String systemProp, final String errorMessage) {
throw new IllegalStateException("Property \"" + SYSTEM_OPTIONS_NAME + "\" part \"" + systemProp
+ "\" " + errorMessage + ".");
}
private static String stripQuotes(final String s, final String systemProp) {
if (s.isEmpty()) {
return s;
}
final char cFirst = s.charAt(0);
final char cLast = s.charAt(s.length() - 1);
if ((cFirst == '"') || (cFirst == '\'')) {
if (cLast != cFirst) {
throwInvalidSystemOption(systemProp, "quoted value does not have closing quote");
}
return s.substring(1, s.length() - 2); // strip the quotes
}
if ((cLast == '"') || (cLast == '\'')) {
throwInvalidSystemOption(systemProp, "value has unbalanced closing quote");
}
// return as-is
return s;
}
}
| |
package nl.tudelft.pl2016gr2.model.graph.nodes;
import javafx.beans.property.IntegerProperty;
import javafx.beans.property.SimpleIntegerProperty;
import nl.tudelft.pl2016gr2.model.Annotation;
import nl.tudelft.pl2016gr2.model.GenomeMap;
import nl.tudelft.pl2016gr2.model.graph.data.GraphNodeGuiData;
import nl.tudelft.pl2016gr2.model.metadata.LineageColor;
import nl.tudelft.pl2016gr2.thirdparty.testing.utility.TestId;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.PriorityQueue;
import java.util.function.Consumer;
/**
* Aides in implementing the <code>GraphNode</code> interface by implementing methods that should
* have the same behaviour for all nodes.
*
* @author Wouter Smit
*/
public abstract class AbstractGraphNode implements GraphNode {
@TestId(id = "id_field")
private int identifier;
private final GraphNodeGuiData guiData = new GraphNodeGuiData();
private PriorityQueue<Annotation> annotations;
private LineageColor lineage;
private HashSet<GraphNode> inEdges;
private HashSet<GraphNode> outEdges;
/**
* Construct a bare abstract node with an ID.
*
* @param identifier the id of the node.
*/
public AbstractGraphNode(int identifier) {
this.identifier = identifier;
this.inEdges = new HashSet<>();
this.outEdges = new HashSet<>();
this.annotations = new PriorityQueue<>();
}
/**
* Construct an abstract node with an ID and the in/out edges.
*
* @param identifier the id of the node.
* @param inEdges the in edges of the node.
* @param outEdges the out edges of the node.
*/
public AbstractGraphNode(int identifier, Collection<GraphNode> inEdges,
Collection<GraphNode> outEdges) {
this.identifier = identifier;
this.inEdges = new HashSet<>(inEdges);
this.outEdges = new HashSet<>(outEdges);
this.annotations = new PriorityQueue<>();
}
/**
* Constructor to copy an AbstractGraphNode.
*
* @param abstractGraphNode the abstractGraphNode to copy.
*/
protected AbstractGraphNode(AbstractGraphNode abstractGraphNode) {
this.identifier = abstractGraphNode.identifier;
this.inEdges = abstractGraphNode.inEdges;
this.outEdges = abstractGraphNode.outEdges;
this.annotations = abstractGraphNode.annotations;
}
@Override
public int getId() {
return identifier;
}
@Override
public Collection<GraphNode> getChildren() {
return null;
}
@Override
public Collection<Integer> getGenomesOverEdge(GraphNode node) {
Collection<Integer> genomes = new ArrayList<>();
Collection<Integer> otherGenomes = new HashSet<>();
// Mark genomes that are seen in other out edges which appear before the node.
this.getOutEdges().forEach(outEdge -> {
if (outEdge.getLevel() < node.getLevel() && !outEdge.equals(node)) {
otherGenomes.addAll(outEdge.getGenomes());
}
});
List<Integer> thisGenomes = getGenomes();
node.forEachContainedGenome(thisGenomes, genome -> {
if (!otherGenomes.contains(genome)) {
genomes.add(genome);
}
});
return genomes;
}
@Override
public void forEachContainedGenome(List<Integer> genomes, Consumer<Integer> genomeConsumer) {
List<Integer> thisGenomes = getGenomes();
int thisIndex = 0;
int otherIndex = 0;
while (thisIndex < thisGenomes.size() && otherIndex < genomes.size()) {
int thisValue = thisGenomes.get(thisIndex);
int otherValue = genomes.get(otherIndex);
if (thisValue < otherValue) {
thisIndex++;
} else if (thisValue > otherValue) {
otherIndex++;
} else {
thisIndex++;
otherIndex++;
genomeConsumer.accept(thisValue);
}
}
}
@Override
public int approximateGenomesOverEdge(GraphNode node) {
return Math.min(getGenomeSize(), node.getGenomeSize());
}
@Override
public Collection<GraphNode> getInEdges() {
return inEdges;
}
@Override
public void setInEdges(Collection<GraphNode> edges) {
inEdges = new HashSet<>(edges);
//inEdges.trimToSize();
}
@Override
public void addAllInEdges(Collection<GraphNode> nodes) {
inEdges.addAll(nodes);
}
@Override
public void addInEdge(GraphNode node) {
inEdges.add(node);
}
@Override
public void removeInEdge(GraphNode node) {
inEdges.remove(node);
}
@Override
public Collection<GraphNode> getOutEdges() {
return outEdges;
}
@Override
public void setOutEdges(Collection<GraphNode> edges) {
if (edges == null) {
outEdges = new HashSet<>(0);
} else {
outEdges = new HashSet<>(edges);
}
//outEdges.trimToSize();
}
@Override
public void addAllOutEdges(Collection<GraphNode> nodes) {
outEdges.addAll(nodes);
}
@Override
public void addOutEdge(GraphNode node) {
outEdges.add(node);
}
@Override
public void removeOutEdge(GraphNode node) {
outEdges.remove(node);
}
@Override
public void trimToSize() {
//inEdges.trimToSize();
//outEdges.trimToSize();
}
@Override
public String toString() {
return "id: " + getId();
}
@Override
public int hashCode() {
return identifier * 37;
}
@Override
public boolean equals(Object obj) {
if (obj == null || !getClass().equals(obj.getClass())) {
return false;
}
return identifier == ((AbstractGraphNode) obj).identifier;
}
@Override
public GraphNodeGuiData getGuiData() {
return guiData;
}
protected void setAnnotations(AbstractGraphNode node) {
this.annotations = node.annotations;
}
@Override
public void addAnnotation(Annotation annotation) {
annotations.add(annotation);
}
@Override
public boolean hasAnnotations() {
return !annotations.isEmpty();
}
@Override
public PriorityQueue<Annotation> getAnnotations() {
return annotations;
}
@Override
public LineageColor getMostFrequentLineage() {
if (lineage == null) {
HashMap<LineageColor, Integer> lineageFrequency = new HashMap<>();
GenomeMap genomeMap = GenomeMap.getInstance();
for (Integer genome : getGenomes()) {
LineageColor color = LineageColor.toLineage(genomeMap.getMetadata(genome));
lineageFrequency.put(color, lineageFrequency.getOrDefault(color, 0) + 1);
}
final IntegerProperty maxFreq = new SimpleIntegerProperty(0);
lineage = LineageColor.NONE;
lineageFrequency.forEach((LineageColor color, Integer freq) -> {
if (freq > maxFreq.get()) {
maxFreq.set(freq);
lineage = color;
}
});
}
return lineage;
}
@Override
public boolean containsAllGenomes(List<Integer> genomes) {
List<Integer> thisGenomes = getGenomes();
int index = 0;
for (int i = 0; i < thisGenomes.size() && index < genomes.size(); i++) {
if (thisGenomes.get(i).intValue() == genomes.get(index).intValue()) {
index++;
} else if (thisGenomes.get(i) > genomes.get(index)) {
return false;
}
}
return index == genomes.size();
}
@Override
public boolean containsAnyGenome(List<Integer> genomes) {
List<Integer> thisGenomes = getGenomes();
int thisIndex = 0;
int otherIndex = 0;
while (thisIndex < thisGenomes.size() && otherIndex < genomes.size()) {
int thisValue = thisGenomes.get(thisIndex);
int otherValue = genomes.get(otherIndex);
if (thisValue < otherValue) {
thisIndex++;
} else if (thisValue > otherValue) {
otherIndex++;
} else {
return true;
}
}
return false;
}
@Override
public boolean hasSameGenomes(List<Integer> genomes) {
List<Integer> thisGenomes = getGenomes();
if (thisGenomes.size() != genomes.size()) {
return false;
}
for (int i = 0; i < genomes.size(); i++) {
if (genomes.get(i).intValue() != thisGenomes.get(i).intValue()) {
return false;
}
}
return true;
}
}
| |
/*
* Copyright 2015, The Sporting Exchange Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.betfair.cougar.modules.zipkin.impl;
import com.betfair.cougar.modules.zipkin.api.ZipkinData;
import com.google.common.collect.Lists;
import com.twitter.zipkin.gen.*;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.nio.ByteBuffer;
import java.util.Objects;
/**
* A container used for storing Zipkin annotations relative to a specific Zipkin span, to be sent once the span has been
* fully populated.
*/
public final class ZipkinAnnotationsStore {
private static final int SHORT_SIZE_B = Short.SIZE / 8;
private static final int INT_SIZE_B = Integer.SIZE / 8;
private static final int LONG_SIZE_B = Long.SIZE / 8;
private static final int DOUBLE_SIZE_B = Double.SIZE / 8;
private static final ByteBuffer TRUE_BB = ByteBuffer.wrap(new byte[]{1});
private static final ByteBuffer FALSE_BB = ByteBuffer.wrap(new byte[]{0});
private Endpoint defaultEndpoint;
private Span underlyingSpan;
/**
* Creates a new annotations store for a specific span, to be created from the passed in ZipkinData.
*
* @param zipkinData The ZipkinData to be used for creating the underlying span.
*/
ZipkinAnnotationsStore(@Nonnull ZipkinData zipkinData) {
this.underlyingSpan = new Span(zipkinData.getTraceId(), zipkinData.getSpanName(), zipkinData.getSpanId(),
Lists.<Annotation>newArrayList(), Lists.<BinaryAnnotation>newArrayList());
if (zipkinData.getParentSpanId() != null) {
underlyingSpan.setParent_id(zipkinData.getParentSpanId());
}
}
// PUBLIC METHODS
/**
* Adds an annotation for an event that happened on a specific timestamp.
*
* @param timestamp The timestamp of the annotation, in microseconds
* @param s The annotation value to emit
* @return this object
*/
@Nonnull
public ZipkinAnnotationsStore addAnnotation(long timestamp, @Nonnull String s) {
return addAnnotation(timestamp, s, defaultEndpoint);
}
/**
* Adds a (binary) string annotation for an event.
*
* @param key The key of the annotation
* @param value The value of the annotation
* @return this object
*/
@Nonnull
public ZipkinAnnotationsStore addAnnotation(@Nonnull String key, @Nonnull String value) {
return addBinaryAnnotation(key, value, defaultEndpoint);
}
/**
* Adds a (binary) short annotation for an event.
*
* @param key The key of the annotation
* @param value The value of the annotation
* @return this object
*/
@Nonnull
public ZipkinAnnotationsStore addAnnotation(@Nonnull String key, short value) {
return addBinaryAnnotation(key, value, defaultEndpoint);
}
/**
* Adds a (binary) int annotation for an event.
*
* @param key The key of the annotation
* @param value The value of the annotation
* @return this object
*/
@Nonnull
public ZipkinAnnotationsStore addAnnotation(@Nonnull String key, int value) {
return addBinaryAnnotation(key, value, defaultEndpoint);
}
/**
* Adds a (binary) long annotation for an event.
*
* @param key The key of the annotation
* @param value The value of the annotation
* @return this object
*/
@Nonnull
public ZipkinAnnotationsStore addAnnotation(@Nonnull String key, long value) {
return addBinaryAnnotation(key, value, defaultEndpoint);
}
/**
* Adds a (binary) double annotation for an event.
*
* @param key The key of the annotation
* @param value The value of the annotation
* @return this object
*/
@Nonnull
public ZipkinAnnotationsStore addAnnotation(@Nonnull String key, double value) {
return addBinaryAnnotation(key, value, defaultEndpoint);
}
/**
* Adds a (binary) boolean annotation for an event.
*
* @param key The key of the annotation
* @param value The value of the annotation
* @return this object
*/
@Nonnull
public ZipkinAnnotationsStore addAnnotation(@Nonnull String key, boolean value) {
return addBinaryAnnotation(key, value, defaultEndpoint);
}
/**
* Adds a (binary) byte array annotation for an event.
*
* @param key The key of the annotation
* @param value The value of the annotation
* @return this object
*/
@Nonnull
public ZipkinAnnotationsStore addAnnotation(@Nonnull String key, byte[] value) {
return addBinaryAnnotation(key, value, defaultEndpoint);
}
// PACKAGE-PRIVATE METHODS
@Nonnull
ZipkinAnnotationsStore defaultEndpoint(@Nonnull Endpoint defaultEndpoint) {
this.defaultEndpoint = defaultEndpoint;
return this;
}
@Nonnull
ZipkinAnnotationsStore addAnnotation(long timestampMicro, @Nonnull String s, @Nullable Endpoint endpoint) {
Objects.requireNonNull(s);
Annotation annotation = new Annotation(timestampMicro, s);
if (endpoint != null) {
// endpoint is optional - current version of zipkin web doesn't show spans without host though
annotation.setHost(endpoint);
}
underlyingSpan.addToAnnotations(annotation);
return this;
}
@Nonnull
ZipkinAnnotationsStore addBinaryAnnotation(@Nonnull String key, @Nonnull String value, @Nonnull Endpoint endpoint) {
// Using default charset
ByteBuffer wrappedValue = ByteBuffer.wrap(value.getBytes());
return addBinaryAnnotation(key, wrappedValue, AnnotationType.STRING, endpoint);
}
@Nonnull
ZipkinAnnotationsStore addBinaryAnnotation(@Nonnull String key, short value, @Nonnull Endpoint endpoint) {
ByteBuffer wrappedValue = ByteBuffer.allocate(SHORT_SIZE_B).putShort(value);
wrappedValue.flip();
return addBinaryAnnotation(key, wrappedValue, AnnotationType.I16, endpoint);
}
@Nonnull
ZipkinAnnotationsStore addBinaryAnnotation(@Nonnull String key, int value, @Nonnull Endpoint endpoint) {
ByteBuffer wrappedValue = ByteBuffer.allocate(INT_SIZE_B).putInt(value);
wrappedValue.flip();
return addBinaryAnnotation(key, wrappedValue, AnnotationType.I32, endpoint);
}
@Nonnull
ZipkinAnnotationsStore addBinaryAnnotation(@Nonnull String key, long value, @Nonnull Endpoint endpoint) {
ByteBuffer wrappedValue = ByteBuffer.allocate(LONG_SIZE_B).putLong(value);
wrappedValue.flip();
return addBinaryAnnotation(key, wrappedValue, AnnotationType.I64, endpoint);
}
@Nonnull
ZipkinAnnotationsStore addBinaryAnnotation(@Nonnull String key, double value, @Nonnull Endpoint endpoint) {
ByteBuffer wrappedValue = ByteBuffer.allocate(DOUBLE_SIZE_B).putDouble(value);
wrappedValue.flip();
return addBinaryAnnotation(key, wrappedValue, AnnotationType.DOUBLE, endpoint);
}
@Nonnull
ZipkinAnnotationsStore addBinaryAnnotation(@Nonnull String key, boolean value, @Nonnull Endpoint endpoint) {
ByteBuffer wrappedValue = value ? TRUE_BB : FALSE_BB;
return addBinaryAnnotation(key, wrappedValue, AnnotationType.BOOL, endpoint);
}
@Nonnull
ZipkinAnnotationsStore addBinaryAnnotation(@Nonnull String key, byte[] value, @Nonnull Endpoint endpoint) {
ByteBuffer wrappedValue = ByteBuffer.wrap(value);
return addBinaryAnnotation(key, wrappedValue, AnnotationType.BYTES, endpoint);
}
@Nonnull
Span generate() {
return underlyingSpan;
}
// PRIVATE METHODS
@Nonnull
private ZipkinAnnotationsStore addBinaryAnnotation(@Nonnull String key, @Nonnull ByteBuffer byteBuffer,
@Nonnull AnnotationType annotationType, @Nullable Endpoint endpoint) {
BinaryAnnotation binaryAnnotation = new BinaryAnnotation(key, byteBuffer, annotationType);
if (endpoint != null) {
// endpoint is optional - current version of zipkin web doesn't show spans without host though
binaryAnnotation.setHost(endpoint);
}
underlyingSpan.addToBinary_annotations(binaryAnnotation);
return this;
}
}
| |
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okhttp.internal;
import java.io.BufferedWriter;
import java.io.Closeable;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* A cache that uses a bounded amount of space on a filesystem. Each cache
* entry has a string key and a fixed number of values. Each key must match
* the regex <strong>[a-z0-9_-]{1,64}</strong>. Values are byte sequences,
* accessible as streams or files. Each value must be between {@code 0} and
* {@code Integer.MAX_VALUE} bytes in length.
*
* <p>The cache stores its data in a directory on the filesystem. This
* directory must be exclusive to the cache; the cache may delete or overwrite
* files from its directory. It is an error for multiple processes to use the
* same cache directory at the same time.
*
* <p>This cache limits the number of bytes that it will store on the
* filesystem. When the number of stored bytes exceeds the limit, the cache will
* remove entries in the background until the limit is satisfied. The limit is
* not strict: the cache may temporarily exceed it while waiting for files to be
* deleted. The limit does not include filesystem overhead or the cache
* journal so space-sensitive applications should set a conservative limit.
*
* <p>Clients call {@link #edit} to create or update the values of an entry. An
* entry may have only one editor at one time; if a value is not available to be
* edited then {@link #edit} will return null.
* <ul>
* <li>When an entry is being <strong>created</strong> it is necessary to
* supply a full set of values; the empty value should be used as a
* placeholder if necessary.
* <li>When an entry is being <strong>edited</strong>, it is not necessary
* to supply data for every value; values default to their previous
* value.
* </ul>
* Every {@link #edit} call must be matched by a call to {@link Editor#commit}
* or {@link Editor#abort}. Committing is atomic: a read observes the full set
* of values as they were before or after the commit, but never a mix of values.
*
* <p>Clients call {@link #get} to read a snapshot of an entry. The read will
* observe the value at the time that {@link #get} was called. Updates and
* removals after the call do not impact ongoing reads.
*
* <p>This class is tolerant of some I/O errors. If files are missing from the
* filesystem, the corresponding entries will be dropped from the cache. If
* an error occurs while writing a cache value, the edit will fail silently.
* Callers should handle other problems by catching {@code IOException} and
* responding appropriately.
*/
public final class DiskLruCache implements Closeable {
static final String JOURNAL_FILE = "journal";
static final String JOURNAL_FILE_TEMP = "journal.tmp";
static final String JOURNAL_FILE_BACKUP = "journal.bkp";
static final String MAGIC = "libcore.io.DiskLruCache";
static final String VERSION_1 = "1";
static final long ANY_SEQUENCE_NUMBER = -1;
static final Pattern LEGAL_KEY_PATTERN = Pattern.compile("[a-z0-9_-]{1,64}");
private static final String CLEAN = "CLEAN";
private static final String DIRTY = "DIRTY";
private static final String REMOVE = "REMOVE";
private static final String READ = "READ";
/*
* This cache uses a journal file named "journal". A typical journal file
* looks like this:
* libcore.io.DiskLruCache
* 1
* 100
* 2
*
* CLEAN 3400330d1dfc7f3f7f4b8d4d803dfcf6 832 21054
* DIRTY 335c4c6028171cfddfbaae1a9c313c52
* CLEAN 335c4c6028171cfddfbaae1a9c313c52 3934 2342
* REMOVE 335c4c6028171cfddfbaae1a9c313c52
* DIRTY 1ab96a171faeeee38496d8b330771a7a
* CLEAN 1ab96a171faeeee38496d8b330771a7a 1600 234
* READ 335c4c6028171cfddfbaae1a9c313c52
* READ 3400330d1dfc7f3f7f4b8d4d803dfcf6
*
* The first five lines of the journal form its header. They are the
* constant string "libcore.io.DiskLruCache", the disk cache's version,
* the application's version, the value count, and a blank line.
*
* Each of the subsequent lines in the file is a record of the state of a
* cache entry. Each line contains space-separated values: a state, a key,
* and optional state-specific values.
* o DIRTY lines track that an entry is actively being created or updated.
* Every successful DIRTY action should be followed by a CLEAN or REMOVE
* action. DIRTY lines without a matching CLEAN or REMOVE indicate that
* temporary files may need to be deleted.
* o CLEAN lines track a cache entry that has been successfully published
* and may be read. A publish line is followed by the lengths of each of
* its values.
* o READ lines track accesses for LRU.
* o REMOVE lines track entries that have been deleted.
*
* The journal file is appended to as cache operations occur. The journal may
* occasionally be compacted by dropping redundant lines. A temporary file named
* "journal.tmp" will be used during compaction; that file should be deleted if
* it exists when the cache is opened.
*/
private final File directory;
private final File journalFile;
private final File journalFileTmp;
private final File journalFileBackup;
private final int appVersion;
private long maxSize;
private final int valueCount;
private long size = 0;
private Writer journalWriter;
private final LinkedHashMap<String, Entry> lruEntries =
new LinkedHashMap<String, Entry>(0, 0.75f, true);
private int redundantOpCount;
/**
* To differentiate between old and current snapshots, each entry is given
* a sequence number each time an edit is committed. A snapshot is stale if
* its sequence number is not equal to its entry's sequence number.
*/
private long nextSequenceNumber = 0;
/** This cache uses a single background thread to evict entries. */
final ThreadPoolExecutor executorService =
new ThreadPoolExecutor(0, 1, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
private final Callable<Void> cleanupCallable = new Callable<Void>() {
public Void call() throws Exception {
synchronized (DiskLruCache.this) {
if (journalWriter == null) {
return null; // Closed.
}
trimToSize();
if (journalRebuildRequired()) {
rebuildJournal();
redundantOpCount = 0;
}
}
return null;
}
};
private DiskLruCache(File directory, int appVersion, int valueCount, long maxSize) {
this.directory = directory;
this.appVersion = appVersion;
this.journalFile = new File(directory, JOURNAL_FILE);
this.journalFileTmp = new File(directory, JOURNAL_FILE_TEMP);
this.journalFileBackup = new File(directory, JOURNAL_FILE_BACKUP);
this.valueCount = valueCount;
this.maxSize = maxSize;
}
/**
* Opens the cache in {@code directory}, creating a cache if none exists
* there.
*
* @param directory a writable directory
* @param valueCount the number of values per cache entry. Must be positive.
* @param maxSize the maximum number of bytes this cache should use to store
* @throws IOException if reading or writing the cache directory fails
*/
public static DiskLruCache open(File directory, int appVersion, int valueCount, long maxSize)
throws IOException {
if (maxSize <= 0) {
throw new IllegalArgumentException("maxSize <= 0");
}
if (valueCount <= 0) {
throw new IllegalArgumentException("valueCount <= 0");
}
// If a bkp file exists, use it instead.
File backupFile = new File(directory, JOURNAL_FILE_BACKUP);
if (backupFile.exists()) {
File journalFile = new File(directory, JOURNAL_FILE);
// If journal file also exists just delete backup file.
if (journalFile.exists()) {
backupFile.delete();
} else {
renameTo(backupFile, journalFile, false);
}
}
// Prefer to pick up where we left off.
DiskLruCache cache = new DiskLruCache(directory, appVersion, valueCount, maxSize);
if (cache.journalFile.exists()) {
try {
cache.readJournal();
cache.processJournal();
cache.journalWriter = new BufferedWriter(
new OutputStreamWriter(new FileOutputStream(cache.journalFile, true), Util.US_ASCII));
return cache;
} catch (IOException journalIsCorrupt) {
Platform.get().logW("DiskLruCache " + directory + " is corrupt: "
+ journalIsCorrupt.getMessage() + ", removing");
cache.delete();
}
}
// Create a new empty cache.
directory.mkdirs();
cache = new DiskLruCache(directory, appVersion, valueCount, maxSize);
cache.rebuildJournal();
return cache;
}
private void readJournal() throws IOException {
StrictLineReader reader = new StrictLineReader(new FileInputStream(journalFile), Util.US_ASCII);
try {
String magic = reader.readLine();
String version = reader.readLine();
String appVersionString = reader.readLine();
String valueCountString = reader.readLine();
String blank = reader.readLine();
if (!MAGIC.equals(magic)
|| !VERSION_1.equals(version)
|| !Integer.toString(appVersion).equals(appVersionString)
|| !Integer.toString(valueCount).equals(valueCountString)
|| !"".equals(blank)) {
throw new IOException("unexpected journal header: [" + magic + ", " + version + ", "
+ valueCountString + ", " + blank + "]");
}
int lineCount = 0;
while (true) {
try {
readJournalLine(reader.readLine());
lineCount++;
} catch (EOFException endOfJournal) {
break;
}
}
redundantOpCount = lineCount - lruEntries.size();
} finally {
Util.closeQuietly(reader);
}
}
private void readJournalLine(String line) throws IOException {
int firstSpace = line.indexOf(' ');
if (firstSpace == -1) {
throw new IOException("unexpected journal line: " + line);
}
int keyBegin = firstSpace + 1;
int secondSpace = line.indexOf(' ', keyBegin);
final String key;
if (secondSpace == -1) {
key = line.substring(keyBegin);
if (firstSpace == REMOVE.length() && line.startsWith(REMOVE)) {
lruEntries.remove(key);
return;
}
} else {
key = line.substring(keyBegin, secondSpace);
}
Entry entry = lruEntries.get(key);
if (entry == null) {
entry = new Entry(key);
lruEntries.put(key, entry);
}
if (secondSpace != -1 && firstSpace == CLEAN.length() && line.startsWith(CLEAN)) {
String[] parts = line.substring(secondSpace + 1).split(" ");
entry.readable = true;
entry.currentEditor = null;
entry.setLengths(parts);
} else if (secondSpace == -1 && firstSpace == DIRTY.length() && line.startsWith(DIRTY)) {
entry.currentEditor = new Editor(entry);
} else if (secondSpace == -1 && firstSpace == READ.length() && line.startsWith(READ)) {
// This work was already done by calling lruEntries.get().
} else {
throw new IOException("unexpected journal line: " + line);
}
}
/**
* Computes the initial size and collects garbage as a part of opening the
* cache. Dirty entries are assumed to be inconsistent and will be deleted.
*/
private void processJournal() throws IOException {
deleteIfExists(journalFileTmp);
for (Iterator<Entry> i = lruEntries.values().iterator(); i.hasNext(); ) {
Entry entry = i.next();
if (entry.currentEditor == null) {
for (int t = 0; t < valueCount; t++) {
size += entry.lengths[t];
}
} else {
entry.currentEditor = null;
for (int t = 0; t < valueCount; t++) {
deleteIfExists(entry.getCleanFile(t));
deleteIfExists(entry.getDirtyFile(t));
}
i.remove();
}
}
}
/**
* Creates a new journal that omits redundant information. This replaces the
* current journal if it exists.
*/
private synchronized void rebuildJournal() throws IOException {
if (journalWriter != null) {
journalWriter.close();
}
Writer writer = new BufferedWriter(
new OutputStreamWriter(new FileOutputStream(journalFileTmp), Util.US_ASCII));
try {
writer.write(MAGIC);
writer.write("\n");
writer.write(VERSION_1);
writer.write("\n");
writer.write(Integer.toString(appVersion));
writer.write("\n");
writer.write(Integer.toString(valueCount));
writer.write("\n");
writer.write("\n");
for (Entry entry : lruEntries.values()) {
if (entry.currentEditor != null) {
writer.write(DIRTY + ' ' + entry.key + '\n');
} else {
writer.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n');
}
}
} finally {
writer.close();
}
if (journalFile.exists()) {
renameTo(journalFile, journalFileBackup, true);
}
renameTo(journalFileTmp, journalFile, false);
journalFileBackup.delete();
journalWriter = new BufferedWriter(
new OutputStreamWriter(new FileOutputStream(journalFile, true), Util.US_ASCII));
}
private static void deleteIfExists(File file) throws IOException {
if (file.exists() && !file.delete()) {
throw new IOException();
}
}
private static void renameTo(File from, File to, boolean deleteDestination) throws IOException {
if (deleteDestination) {
deleteIfExists(to);
}
if (!from.renameTo(to)) {
throw new IOException();
}
}
/**
* Returns a snapshot of the entry named {@code key}, or null if it doesn't
* exist is not currently readable. If a value is returned, it is moved to
* the head of the LRU queue.
*/
public synchronized Snapshot get(String key) throws IOException {
checkNotClosed();
validateKey(key);
Entry entry = lruEntries.get(key);
if (entry == null) {
return null;
}
if (!entry.readable) {
return null;
}
// Open all streams eagerly to guarantee that we see a single published
// snapshot. If we opened streams lazily then the streams could come
// from different edits.
InputStream[] ins = new InputStream[valueCount];
try {
for (int i = 0; i < valueCount; i++) {
ins[i] = new FileInputStream(entry.getCleanFile(i));
}
} catch (FileNotFoundException e) {
// A file must have been deleted manually!
for (int i = 0; i < valueCount; i++) {
if (ins[i] != null) {
Util.closeQuietly(ins[i]);
} else {
break;
}
}
return null;
}
redundantOpCount++;
journalWriter.append(READ + ' ' + key + '\n');
if (journalRebuildRequired()) {
executorService.submit(cleanupCallable);
}
return new Snapshot(key, entry.sequenceNumber, ins, entry.lengths);
}
/**
* Returns an editor for the entry named {@code key}, or null if another
* edit is in progress.
*/
public Editor edit(String key) throws IOException {
return edit(key, ANY_SEQUENCE_NUMBER);
}
private synchronized Editor edit(String key, long expectedSequenceNumber) throws IOException {
checkNotClosed();
validateKey(key);
Entry entry = lruEntries.get(key);
if (expectedSequenceNumber != ANY_SEQUENCE_NUMBER && (entry == null
|| entry.sequenceNumber != expectedSequenceNumber)) {
return null; // Snapshot is stale.
}
if (entry == null) {
entry = new Entry(key);
lruEntries.put(key, entry);
} else if (entry.currentEditor != null) {
return null; // Another edit is in progress.
}
Editor editor = new Editor(entry);
entry.currentEditor = editor;
// Flush the journal before creating files to prevent file leaks.
journalWriter.write(DIRTY + ' ' + key + '\n');
journalWriter.flush();
return editor;
}
/** Returns the directory where this cache stores its data. */
public File getDirectory() {
return directory;
}
/**
* Returns the maximum number of bytes that this cache should use to store
* its data.
*/
public long getMaxSize() {
return maxSize;
}
/**
* Changes the maximum number of bytes the cache can store and queues a job
* to trim the existing store, if necessary.
*/
public synchronized void setMaxSize(long maxSize) {
this.maxSize = maxSize;
executorService.submit(cleanupCallable);
}
/**
* Returns the number of bytes currently being used to store the values in
* this cache. This may be greater than the max size if a background
* deletion is pending.
*/
public synchronized long size() {
return size;
}
private synchronized void completeEdit(Editor editor, boolean success) throws IOException {
Entry entry = editor.entry;
if (entry.currentEditor != editor) {
throw new IllegalStateException();
}
// If this edit is creating the entry for the first time, every index must have a value.
if (success && !entry.readable) {
for (int i = 0; i < valueCount; i++) {
if (!editor.written[i]) {
editor.abort();
throw new IllegalStateException("Newly created entry didn't create value for index " + i);
}
if (!entry.getDirtyFile(i).exists()) {
editor.abort();
return;
}
}
}
for (int i = 0; i < valueCount; i++) {
File dirty = entry.getDirtyFile(i);
if (success) {
if (dirty.exists()) {
File clean = entry.getCleanFile(i);
dirty.renameTo(clean);
long oldLength = entry.lengths[i];
long newLength = clean.length();
entry.lengths[i] = newLength;
size = size - oldLength + newLength;
}
} else {
deleteIfExists(dirty);
}
}
redundantOpCount++;
entry.currentEditor = null;
if (entry.readable | success) {
entry.readable = true;
journalWriter.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n');
if (success) {
entry.sequenceNumber = nextSequenceNumber++;
}
} else {
lruEntries.remove(entry.key);
journalWriter.write(REMOVE + ' ' + entry.key + '\n');
}
journalWriter.flush();
if (size > maxSize || journalRebuildRequired()) {
executorService.submit(cleanupCallable);
}
}
/**
* We only rebuild the journal when it will halve the size of the journal
* and eliminate at least 2000 ops.
*/
private boolean journalRebuildRequired() {
final int redundantOpCompactThreshold = 2000;
return redundantOpCount >= redundantOpCompactThreshold //
&& redundantOpCount >= lruEntries.size();
}
/**
* Drops the entry for {@code key} if it exists and can be removed. Entries
* actively being edited cannot be removed.
*
* @return true if an entry was removed.
*/
public synchronized boolean remove(String key) throws IOException {
checkNotClosed();
validateKey(key);
Entry entry = lruEntries.get(key);
if (entry == null || entry.currentEditor != null) {
return false;
}
for (int i = 0; i < valueCount; i++) {
File file = entry.getCleanFile(i);
if (!file.delete()) {
throw new IOException("failed to delete " + file);
}
size -= entry.lengths[i];
entry.lengths[i] = 0;
}
redundantOpCount++;
journalWriter.append(REMOVE + ' ' + key + '\n');
lruEntries.remove(key);
if (journalRebuildRequired()) {
executorService.submit(cleanupCallable);
}
return true;
}
/** Returns true if this cache has been closed. */
public boolean isClosed() {
return journalWriter == null;
}
private void checkNotClosed() {
if (journalWriter == null) {
throw new IllegalStateException("cache is closed");
}
}
/** Force buffered operations to the filesystem. */
public synchronized void flush() throws IOException {
checkNotClosed();
trimToSize();
journalWriter.flush();
}
/** Closes this cache. Stored values will remain on the filesystem. */
public synchronized void close() throws IOException {
if (journalWriter == null) {
return; // Already closed.
}
for (Entry entry : new ArrayList<Entry>(lruEntries.values())) {
if (entry.currentEditor != null) {
entry.currentEditor.abort();
}
}
trimToSize();
journalWriter.close();
journalWriter = null;
}
private void trimToSize() throws IOException {
while (size > maxSize) {
Map.Entry<String, Entry> toEvict = lruEntries.entrySet().iterator().next();
remove(toEvict.getKey());
}
}
/**
* Closes the cache and deletes all of its stored values. This will delete
* all files in the cache directory including files that weren't created by
* the cache.
*/
public void delete() throws IOException {
close();
Util.deleteContents(directory);
}
private void validateKey(String key) {
Matcher matcher = LEGAL_KEY_PATTERN.matcher(key);
if (!matcher.matches()) {
throw new IllegalArgumentException("keys must match regex [a-z0-9_-]{1,64}: \"" + key + "\"");
}
}
private static String inputStreamToString(InputStream in) throws IOException {
return Util.readFully(new InputStreamReader(in, Util.UTF_8));
}
/** A snapshot of the values for an entry. */
public final class Snapshot implements Closeable {
private final String key;
private final long sequenceNumber;
private final InputStream[] ins;
private final long[] lengths;
private Snapshot(String key, long sequenceNumber, InputStream[] ins, long[] lengths) {
this.key = key;
this.sequenceNumber = sequenceNumber;
this.ins = ins;
this.lengths = lengths;
}
/**
* Returns an editor for this snapshot's entry, or null if either the
* entry has changed since this snapshot was created or if another edit
* is in progress.
*/
public Editor edit() throws IOException {
return DiskLruCache.this.edit(key, sequenceNumber);
}
/** Returns the unbuffered stream with the value for {@code index}. */
public InputStream getInputStream(int index) {
return ins[index];
}
/** Returns the string value for {@code index}. */
public String getString(int index) throws IOException {
return inputStreamToString(getInputStream(index));
}
/** Returns the byte length of the value for {@code index}. */
public long getLength(int index) {
return lengths[index];
}
public void close() {
for (InputStream in : ins) {
Util.closeQuietly(in);
}
}
}
private static final OutputStream NULL_OUTPUT_STREAM = new OutputStream() {
@Override
public void write(int b) throws IOException {
// Eat all writes silently. Nom nom.
}
};
/** Edits the values for an entry. */
public final class Editor {
private final Entry entry;
private final boolean[] written;
private boolean hasErrors;
private boolean committed;
private Editor(Entry entry) {
this.entry = entry;
this.written = (entry.readable) ? null : new boolean[valueCount];
}
/**
* Returns an unbuffered input stream to read the last committed value,
* or null if no value has been committed.
*/
public InputStream newInputStream(int index) throws IOException {
synchronized (DiskLruCache.this) {
if (entry.currentEditor != this) {
throw new IllegalStateException();
}
if (!entry.readable) {
return null;
}
try {
return new FileInputStream(entry.getCleanFile(index));
} catch (FileNotFoundException e) {
return null;
}
}
}
/**
* Returns the last committed value as a string, or null if no value
* has been committed.
*/
public String getString(int index) throws IOException {
InputStream in = newInputStream(index);
return in != null ? inputStreamToString(in) : null;
}
/**
* Returns a new unbuffered output stream to write the value at
* {@code index}. If the underlying output stream encounters errors
* when writing to the filesystem, this edit will be aborted when
* {@link #commit} is called. The returned output stream does not throw
* IOExceptions.
*/
public OutputStream newOutputStream(int index) throws IOException {
synchronized (DiskLruCache.this) {
if (entry.currentEditor != this) {
throw new IllegalStateException();
}
if (!entry.readable) {
written[index] = true;
}
File dirtyFile = entry.getDirtyFile(index);
FileOutputStream outputStream;
try {
outputStream = new FileOutputStream(dirtyFile);
} catch (FileNotFoundException e) {
// Attempt to recreate the cache directory.
directory.mkdirs();
try {
outputStream = new FileOutputStream(dirtyFile);
} catch (FileNotFoundException e2) {
// We are unable to recover. Silently eat the writes.
return NULL_OUTPUT_STREAM;
}
}
return new FaultHidingOutputStream(outputStream);
}
}
/** Sets the value at {@code index} to {@code value}. */
public void set(int index, String value) throws IOException {
Writer writer = null;
try {
writer = new OutputStreamWriter(newOutputStream(index), Util.UTF_8);
writer.write(value);
} finally {
Util.closeQuietly(writer);
}
}
/**
* Commits this edit so it is visible to readers. This releases the
* edit lock so another edit may be started on the same key.
*/
public void commit() throws IOException {
if (hasErrors) {
completeEdit(this, false);
remove(entry.key); // The previous entry is stale.
} else {
completeEdit(this, true);
}
committed = true;
}
/**
* Aborts this edit. This releases the edit lock so another edit may be
* started on the same key.
*/
public void abort() throws IOException {
completeEdit(this, false);
}
public void abortUnlessCommitted() {
if (!committed) {
try {
abort();
} catch (IOException ignored) {
}
}
}
private class FaultHidingOutputStream extends FilterOutputStream {
private FaultHidingOutputStream(OutputStream out) {
super(out);
}
@Override public void write(int oneByte) {
try {
out.write(oneByte);
} catch (IOException e) {
hasErrors = true;
}
}
@Override public void write(byte[] buffer, int offset, int length) {
try {
out.write(buffer, offset, length);
} catch (IOException e) {
hasErrors = true;
}
}
@Override public void close() {
try {
out.close();
} catch (IOException e) {
hasErrors = true;
}
}
@Override public void flush() {
try {
out.flush();
} catch (IOException e) {
hasErrors = true;
}
}
}
}
private final class Entry {
private final String key;
/** Lengths of this entry's files. */
private final long[] lengths;
/** True if this entry has ever been published. */
private boolean readable;
/** The ongoing edit or null if this entry is not being edited. */
private Editor currentEditor;
/** The sequence number of the most recently committed edit to this entry. */
private long sequenceNumber;
private Entry(String key) {
this.key = key;
this.lengths = new long[valueCount];
}
public String getLengths() throws IOException {
StringBuilder result = new StringBuilder();
for (long size : lengths) {
result.append(' ').append(size);
}
return result.toString();
}
/** Set lengths using decimal numbers like "10123". */
private void setLengths(String[] strings) throws IOException {
if (strings.length != valueCount) {
throw invalidLengths(strings);
}
try {
for (int i = 0; i < strings.length; i++) {
lengths[i] = Long.parseLong(strings[i]);
}
} catch (NumberFormatException e) {
throw invalidLengths(strings);
}
}
private IOException invalidLengths(String[] strings) throws IOException {
throw new IOException("unexpected journal line: " + java.util.Arrays.toString(strings));
}
public File getCleanFile(int i) {
return new File(directory, key + "." + i);
}
public File getDirtyFile(int i) {
return new File(directory, key + "." + i + ".tmp");
}
}
}
| |
/*
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Bob Jervis
* Google Inc.
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package com.google.javascript.rhino.jstype;
import static com.google.javascript.rhino.jstype.TernaryValue.FALSE;
import static com.google.javascript.rhino.jstype.TernaryValue.UNKNOWN;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.ObjectTypeI;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
/**
* Object type.
*
* In JavaScript, all object types have properties, and each of those
* properties has a type. Property types may be DECLARED, INFERRED, or
* UNKNOWN.
*
* DECLARED properties have an explicit type annotation, as in:
* <code>
* /xx @type {number} x/
* Foo.prototype.bar = 1;
* </code>
* This property may only hold number values, and an assignment to any
* other type of value is an error.
*
* INFERRED properties do not have an explicit type annotation. Rather,
* we try to find all the possible types that this property can hold.
* <code>
* Foo.prototype.bar = 1;
* </code>
* If the programmer assigns other types of values to this property,
* the property will take on the union of all these types.
*
* UNKNOWN properties are properties on the UNKNOWN type. The UNKNOWN
* type has all properties, but we do not know whether they are
* declared or inferred.
*
*/
public abstract class ObjectType
extends JSType
implements ObjectTypeI {
private boolean visited;
private JSDocInfo docInfo = null;
private boolean unknown = true;
ObjectType(JSTypeRegistry registry) {
super(registry);
}
ObjectType(JSTypeRegistry registry, TemplateTypeMap templateTypeMap) {
super(registry, templateTypeMap);
}
public Node getRootNode() { return null; }
public ObjectType getParentScope() {
return getImplicitPrototype();
}
/**
* Returns the property map that manages the set of properties for an object.
*/
PropertyMap getPropertyMap() {
return PropertyMap.immutableEmptyMap();
}
/**
* Default getSlot implementation. This gets overridden by FunctionType
* for lazily-resolved prototypes.
*/
public Property getSlot(String name) {
return getPropertyMap().getSlot(name);
}
public Property getOwnSlot(String name) {
return getPropertyMap().getOwnProperty(name);
}
public JSType getTypeOfThis() {
return null;
}
/**
* Gets the declared default element type.
* @see TemplatizedType
*/
public ImmutableList<JSType> getTemplateTypes() {
return null;
}
/**
* Gets the docInfo for this type.
*/
@Override
public JSDocInfo getJSDocInfo() {
return docInfo;
}
/**
* Sets the docInfo for this type from the given
* {@link JSDocInfo}. The {@code JSDocInfo} may be {@code null}.
*/
public void setJSDocInfo(JSDocInfo info) {
docInfo = info;
}
/**
* Detects a cycle in the implicit prototype chain. This method accesses
* the {@link #getImplicitPrototype()} method and must therefore be
* invoked only after the object is sufficiently initialized to respond to
* calls to this method.<p>
*
* @return True iff an implicit prototype cycle was detected.
*/
final boolean detectImplicitPrototypeCycle() {
// detecting cycle
this.visited = true;
ObjectType p = getImplicitPrototype();
while (p != null) {
if (p.visited) {
return true;
} else {
p.visited = true;
}
p = p.getImplicitPrototype();
}
// clean up
p = this;
do {
p.visited = false;
p = p.getImplicitPrototype();
} while (p != null);
return false;
}
/**
* Detects cycles in either the implicit prototype chain, or the implemented/extended
* interfaces.<p>
*
* @return True iff a cycle was detected.
*/
final boolean detectInheritanceCycle() {
// TODO(dimvar): This should get moved to preventing cycles in FunctionTypeBuilder
// rather than removing them here after they have been created.
// Also, this doesn't do the right thing for extended interfaces, though that is
// masked by another bug.
return detectImplicitPrototypeCycle()
|| Iterables.contains(this.getCtorImplementedInterfaces(), this)
|| Iterables.contains(this.getCtorExtendedInterfaces(), this);
}
/**
* Gets the reference name for this object. This includes named types
* like constructors, prototypes, and enums. It notably does not include
* literal types like strings and booleans and structural types.
* @return the object's name or {@code null} if this is an anonymous
* object
*/
public abstract String getReferenceName();
/**
* Due to the complexity of some of our internal type systems, sometimes
* we have different types constructed by the same constructor.
* In other parts of the type system, these are called delegates.
* We construct these types by appending suffixes to the constructor name.
*
* The normalized reference name does not have these suffixes, and as such,
* recollapses these implicit types back to their real type.
*/
public String getNormalizedReferenceName() {
String name = getReferenceName();
if (name != null) {
int pos = name.indexOf('(');
if (pos != -1) {
return name.substring(0, pos);
}
}
return name;
}
@Override
public String getDisplayName() {
return getNormalizedReferenceName();
}
/**
* Creates a suffix for a proxy delegate.
* @see #getNormalizedReferenceName
*/
public static String createDelegateSuffix(String suffix) {
return "(" + suffix + ")";
}
/**
* Returns true if the object is named.
* @return true if the object is named, false if it is anonymous
*/
public boolean hasReferenceName() {
return false;
}
@Override
public TernaryValue testForEquality(JSType that) {
// super
TernaryValue result = super.testForEquality(that);
if (result != null) {
return result;
}
// objects are comparable to everything but null/undefined
if (that.isSubtype(
getNativeType(JSTypeNative.OBJECT_NUMBER_STRING_BOOLEAN))) {
return UNKNOWN;
} else {
return FALSE;
}
}
/**
* Gets this object's constructor.
* @return this object's constructor or {@code null} if it is a native
* object (constructed natively v.s. by instantiation of a function)
*/
@Override
public abstract FunctionType getConstructor();
/**
* Gets the implicit prototype (a.k.a. the {@code [[Prototype]]} property).
*/
public abstract ObjectType getImplicitPrototype();
/**
* Defines a property whose type is explicitly declared by the programmer.
* @param propertyName the property's name
* @param type the type
* @param propertyNode the node corresponding to the declaration of property
* which might later be accessed using {@code getPropertyNode}.
*/
public final boolean defineDeclaredProperty(String propertyName,
JSType type, Node propertyNode) {
boolean result = defineProperty(propertyName, type, false, propertyNode);
// All property definitions go through this method
// or defineInferredProperty. Because the properties defined an an
// object can affect subtyping, it's slightly more efficient
// to register this after defining the property.
registry.registerPropertyOnType(propertyName, this);
return result;
}
/**
* Defines a property whose type is on a synthesized object. These objects
* don't actually exist in the user's program. They're just used for
* bookkeeping in the type system.
*/
public final boolean defineSynthesizedProperty(String propertyName,
JSType type, Node propertyNode) {
return defineProperty(propertyName, type, false, propertyNode);
}
/**
* Defines a property whose type is inferred.
* @param propertyName the property's name
* @param type the type
* @param propertyNode the node corresponding to the inferred definition of
* property that might later be accessed using {@code getPropertyNode}.
*/
public final boolean defineInferredProperty(String propertyName,
JSType type, Node propertyNode) {
if (hasProperty(propertyName)) {
if (isPropertyTypeDeclared(propertyName)) {
// We never want to hide a declared property with an inferred property.
return true;
}
JSType originalType = getPropertyType(propertyName);
type = originalType == null ? type :
originalType.getLeastSupertype(type);
}
boolean result = defineProperty(propertyName, type, true,
propertyNode);
// All property definitions go through this method
// or defineDeclaredProperty. Because the properties defined an an
// object can affect subtyping, it's slightly more efficient
// to register this after defining the property.
registry.registerPropertyOnType(propertyName, this);
return result;
}
/**
* Defines a property.<p>
*
* For clarity, callers should prefer {@link #defineDeclaredProperty} and
* {@link #defineInferredProperty}.
*
* @param propertyName the property's name
* @param type the type
* @param inferred {@code true} if this property's type is inferred
* @param propertyNode the node that represents the definition of property.
* Depending on the actual sub-type the node type might be different.
* The general idea is to have an estimate of where in the source code
* this property is defined.
* @return True if the property was registered successfully, false if this
* conflicts with a previous property type declaration.
*/
abstract boolean defineProperty(String propertyName, JSType type,
boolean inferred, Node propertyNode);
/**
* Removes the declared or inferred property from this ObjectType.
*
* @param propertyName the property's name
* @return true if the property was removed successfully. False if the
* property did not exist, or could not be removed.
*/
public boolean removeProperty(String propertyName) {
return false;
}
/**
* Gets the node corresponding to the definition of the specified property.
* This could be the node corresponding to declaration of the property or the
* node corresponding to the first reference to this property, e.g.,
* "this.propertyName" in a constructor. Note this is mainly intended to be
* an estimate of where in the source code a property is defined. Sometime
* the returned node is not even part of the global AST but in the AST of the
* JsDoc that defines a type.
*
* @param propertyName the name of the property
* @return the {@code Node} corresponding to the property or null.
*/
public Node getPropertyNode(String propertyName) {
Property p = getSlot(propertyName);
return p == null ? null : p.getNode();
}
/**
* Gets the docInfo on the specified property on this type. This should not
* be implemented recursively, as you generally need to know exactly on
* which type in the prototype chain the JSDocInfo exists.
*/
public JSDocInfo getOwnPropertyJSDocInfo(String propertyName) {
Property p = getOwnSlot(propertyName);
return p == null ? null : p.getJSDocInfo();
}
/**
* Sets the docInfo for the specified property from the
* {@link JSDocInfo} on its definition.
* @param info {@code JSDocInfo} for the property definition. May be
* {@code null}.
*/
public void setPropertyJSDocInfo(String propertyName, JSDocInfo info) {
// by default, do nothing
}
/** Sets the node where the property was defined. */
public void setPropertyNode(String propertyName, Node defSite) {
// by default, do nothing
}
@Override
public JSType findPropertyType(String propertyName) {
return hasProperty(propertyName) ?
getPropertyType(propertyName) : null;
}
/**
* Gets the property type of the property whose name is given. If the
* underlying object does not have this property, the Unknown type is
* returned to indicate that no information is available on this property.
*
* This gets overridden by FunctionType for lazily-resolved call() and
* bind() functions.
*
* @return the property's type or {@link UnknownType}. This method never
* returns {@code null}.
*/
public JSType getPropertyType(String propertyName) {
StaticTypedSlot<JSType> slot = getSlot(propertyName);
if (slot == null) {
if (isNoResolvedType() || isCheckedUnknownType()) {
return getNativeType(JSTypeNative.CHECKED_UNKNOWN_TYPE);
} else if (isEmptyType()) {
return getNativeType(JSTypeNative.NO_TYPE);
}
return getNativeType(JSTypeNative.UNKNOWN_TYPE);
}
return slot.getType();
}
@Override
public boolean hasProperty(String propertyName) {
// Unknown types have all properties.
return isEmptyType() || isUnknownType() || getSlot(propertyName) != null;
}
/**
* Checks whether the property whose name is given is present directly on
* the object. Returns false even if it is declared on a supertype.
*/
public boolean hasOwnProperty(String propertyName) {
return getOwnSlot(propertyName) != null;
}
/**
* Returns the names of all the properties directly on this type.
*
* Overridden by FunctionType to add "prototype".
*/
public Set<String> getOwnPropertyNames() {
return getPropertyMap().getOwnPropertyNames();
}
/**
* Checks whether the property's type is inferred.
*/
public boolean isPropertyTypeInferred(String propertyName) {
StaticTypedSlot<JSType> slot = getSlot(propertyName);
return slot == null ? false : slot.isTypeInferred();
}
/**
* Checks whether the property's type is declared.
*/
public boolean isPropertyTypeDeclared(String propertyName) {
StaticTypedSlot<JSType> slot = getSlot(propertyName);
return slot == null ? false : !slot.isTypeInferred();
}
@Override
public boolean isStructuralType() {
FunctionType constructor = this.getConstructor();
return constructor != null && constructor.isStructuralInterface();
}
/**
* Whether the given property is declared on this object.
*/
final boolean hasOwnDeclaredProperty(String name) {
return hasOwnProperty(name) && isPropertyTypeDeclared(name);
}
/** Checks whether the property was defined in the externs. */
public boolean isPropertyInExterns(String propertyName) {
Property p = getSlot(propertyName);
return p == null ? false : p.isFromExterns();
}
/**
* Gets the number of properties of this object.
*/
public int getPropertiesCount() {
return getPropertyMap().getPropertiesCount();
}
/**
* Check for structural equivalence with {@code that}.
* (e.g. two @record types with the same prototype properties)
*/
boolean checkStructuralEquivalenceHelper(
ObjectType otherObject, EquivalenceMethod eqMethod, EqCache eqCache) {
if (this.isTemplatizedType() && this.toMaybeTemplatizedType().wrapsSameRawType(otherObject)) {
return this.getTemplateTypeMap().checkEquivalenceHelper(
otherObject.getTemplateTypeMap(), eqMethod, eqCache);
}
MatchStatus result = eqCache.checkCache(this, otherObject);
if (result != null) {
return result.subtypeValue();
}
Set<String> keySet = getPropertyNames();
Set<String> otherKeySet = otherObject.getPropertyNames();
if (!otherKeySet.equals(keySet)) {
eqCache.updateCache(this, otherObject, MatchStatus.NOT_MATCH);
return false;
}
for (String key : keySet) {
if (!otherObject.getPropertyType(key).checkEquivalenceHelper(
getPropertyType(key), eqMethod, eqCache)) {
eqCache.updateCache(this, otherObject, MatchStatus.NOT_MATCH);
return false;
}
}
eqCache.updateCache(this, otherObject, MatchStatus.MATCH);
return true;
}
private static boolean isStructuralSubtypeHelper(
ObjectType typeA, ObjectType typeB, ImplCache implicitImplCache) {
// typeA is a subtype of record type typeB iff:
// 1) typeA has all the non-optional properties declared in typeB.
// 2) And for each property of typeB, its type must be
// a super type of the corresponding property of typeA.
for (String property : typeB.getPropertyNames()) {
JSType propB = typeB.getPropertyType(property);
if (!typeA.hasProperty(property)) {
// Currently, any type that explicitly includes undefined (eg, `?|undefined`) is optional.
if (propB.isExplicitlyVoidable()) {
continue;
}
return false;
}
JSType propA = typeA.getPropertyType(property);
if (!propA.isSubtype(propB, implicitImplCache)) {
return false;
}
}
return true;
}
/**
* Determine if {@code this} is a an implicit subtype of {@code superType}.
*/
boolean isStructuralSubtype(ObjectType superType, ImplCache implicitImplCache) {
// Union types should be handled by isSubtype already
Preconditions.checkArgument(!this.isUnionType());
Preconditions.checkArgument(!superType.isUnionType());
Preconditions.checkArgument(superType.isStructuralType(),
"isStructuralSubtype should be called with structural supertype. Found %s", superType);
MatchStatus cachedResult = implicitImplCache.checkCache(this, superType);
if (cachedResult != null) {
return cachedResult.subtypeValue();
}
boolean result = isStructuralSubtypeHelper(this, superType, implicitImplCache);
implicitImplCache.updateCache(
this, superType, result ? MatchStatus.MATCH : MatchStatus.NOT_MATCH);
return result;
}
/**
* Returns a list of properties defined or inferred on this type and any of
* its supertypes.
*/
public Set<String> getPropertyNames() {
Set<String> props = new TreeSet<>();
collectPropertyNames(props);
return props;
}
/**
* Adds any properties defined on this type or its supertypes to the set.
*/
final void collectPropertyNames(Set<String> props) {
getPropertyMap().collectPropertyNames(props);
}
@Override
public <T> T visit(Visitor<T> visitor) {
return visitor.caseObjectType(this);
}
@Override <T> T visit(RelationshipVisitor<T> visitor, JSType that) {
return visitor.caseObjectType(this, that);
}
/**
* Checks that the prototype is an implicit prototype of this object. Since
* each object has an implicit prototype, an implicit prototype's
* implicit prototype is also this implicit prototype's.
*
* @param prototype any prototype based object
*
* @return {@code true} if {@code prototype} is {@code equal} to any
* object in this object's implicit prototype chain.
*/
final boolean isImplicitPrototype(ObjectType prototype) {
for (ObjectType current = this;
current != null;
current = current.getImplicitPrototype()) {
if (current.isTemplatizedType()) {
current = current.toMaybeTemplatizedType().getReferencedType();
}
if (current.isEquivalentTo(prototype)) {
return true;
}
}
return false;
}
@Override
public BooleanLiteralSet getPossibleToBooleanOutcomes() {
return BooleanLiteralSet.TRUE;
}
/**
* We treat this as the unknown type if any of its implicit prototype
* properties is unknown.
*/
@Override
public boolean isUnknownType() {
// If the object is unknown now, check the supertype again,
// because it might have been resolved since the last check.
if (unknown) {
ObjectType implicitProto = getImplicitPrototype();
if (implicitProto == null ||
implicitProto.isNativeObjectType()) {
unknown = false;
for (ObjectType interfaceType : getCtorExtendedInterfaces()) {
if (interfaceType.isUnknownType()) {
unknown = true;
break;
}
}
} else {
unknown = implicitProto.isUnknownType();
}
}
return unknown;
}
@Override
public boolean isObject() {
return true;
}
/**
* Returns true if any cached values have been set for this type. If true,
* then the prototype chain should not be changed, as it might invalidate the
* cached values.
*/
public boolean hasCachedValues() {
return !unknown;
}
/**
* Clear cached values. Should be called before making changes to a prototype
* that may have been changed since creation.
*/
public void clearCachedValues() {
unknown = true;
}
/** Whether this is a built-in object. */
public boolean isNativeObjectType() {
return false;
}
/**
* A null-safe version of JSType#toObjectType.
*/
public static ObjectType cast(JSType type) {
return type == null ? null : type.toObjectType();
}
@Override
public final boolean isFunctionPrototypeType() {
return getOwnerFunction() != null;
}
/** Gets the owner of this if it's a function prototype. */
public FunctionType getOwnerFunction() {
return null;
}
/** Sets the owner function. By default, does nothing. */
void setOwnerFunction(FunctionType type) {}
/**
* Gets the interfaces implemented by the ctor associated with this type.
* Intended to be overridden by subclasses.
*/
public Iterable<ObjectType> getCtorImplementedInterfaces() {
return ImmutableSet.of();
}
/**
* Gets the interfaces extended by the interface associated with this type.
* Intended to be overridden by subclasses.
*/
public Iterable<ObjectType> getCtorExtendedInterfaces() {
return ImmutableSet.of();
}
/**
* get the map of properties to types covered in an object type
* @return a Map that maps the property's name to the property's type */
public Map<String, JSType> getPropertyTypeMap() {
ImmutableMap.Builder<String, JSType> propTypeMap = ImmutableMap.builder();
for (String name : this.getPropertyNames()) {
propTypeMap.put(name, this.getPropertyType(name));
}
return propTypeMap.build();
}
}
| |
package com.fourspaces.featherdb.views;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.HashMap;
import java.util.Map;
import org.json.JSONObject;
import com.fourspaces.featherdb.FeatherDB;
import com.fourspaces.featherdb.document.Document;
import com.fourspaces.featherdb.document.JSONDocument;
import com.fourspaces.featherdb.utils.Logger;
/**
* This view manager is very simple... it reruns each view upon request (basically adhoc style)
* It also serializes each Java view object (named /basedir/_view_name/function_name/view.obj),
* which is how it persists whether or not a view exists...
*
*/
public class SimpleViewManager extends ViewManager {
protected FeatherDB featherDB;
protected Logger log = Logger.get(SimpleViewManager.class);
protected File baseDir;
protected Map<String,View> views = new HashMap<String,View>();
protected final static String VIEW_INSTANCE_NAME = "view.obj";
public SimpleViewManager(){
}
/* (non-Javadoc)
* @see com.fourspaces.featherdb.views.ViewManager#init()
*/
public void init(FeatherDB featherDB) throws ViewException {
this.featherDB=featherDB;
baseDir = new File(featherDB.getProperty("view.simple.path"));
if (baseDir==null) {
throw new RuntimeException("Could not open SimpleViewManager path (view.simple.dir)");
}
if (!baseDir.exists()) {
baseDir.mkdirs();
}
//Document d = featherDB.getBackend().getDocument("_views", "list");
for (String db : featherDB.getBackend().getDatabaseNames()) {
log.debug("Loading views for: {}", db);
loadViewsForDatabase(db);
}
}
/* (non-Javadoc)
* @see com.fourspaces.featherdb.views.ViewManager#shutdown()
*/
public void shutdown() {
}
protected File viewDbDir(String db) {
return new File(baseDir,db);
}
protected File viewDir(String db, String viewName) {
return new File(viewDbDir(db),viewName);
}
protected void loadViewsForDatabase(String db) throws ViewException {
File viewDbDir = viewDbDir(db);
if (!viewDbDir.exists()) {
initDatabaseViews(db);
} else {
for(File instanceDir:viewDbDir.listFiles()) {
if (instanceDir.isDirectory()) {
for (File functionDir: instanceDir.listFiles()) {
ObjectInputStream ois =null;
try {
ois = new ObjectInputStream(new FileInputStream(new File(functionDir,VIEW_INSTANCE_NAME)));
log.debug("Loading view {}/{}/{} ",db,instanceDir.getName(),functionDir.getName());
views.put(db+"/"+instanceDir.getName()+"/"+functionDir.getName(),(View) ois.readObject());
} catch (FileNotFoundException e) {
throw new ViewException(e);
} catch (IOException e) {
throw new ViewException(e);
} catch (ClassNotFoundException e) {
throw new ViewException(e);
} finally {
if (ois!=null) {
try {
ois.close();
} catch (IOException e) {
}
}
}
}
}
}
}
}
/* (non-Javadoc)
* @see com.fourspaces.featherdb.views.ViewManager#addView(com.fourspaces.featherdb.document.Document)
*/
public void addView(JSONDocument jsondoc) throws ViewException {
String viewType = (String) jsondoc.get("view_type");
if (viewType == null || viewType.equals("text/javascript")) {
for (String key:jsondoc.keys()) {
if (((String)jsondoc.get(key)).startsWith("function")) {
log.debug("Adding javascript view: {}/{}/{} => {}",jsondoc.getDatabase(),jsondoc.getId(),key,jsondoc.get(key));
addView(jsondoc.getDatabase(),jsondoc.getId(),key,new JavaScriptView(jsondoc.getDatabase(),(String) jsondoc.get(key)));
}
}
} else if (viewType.startsWith("java:")){
log.debug("Adding java view: {}/{} => {}", jsondoc.getDatabase(),jsondoc.getId(),viewType);
try {
Class clazz = Thread.currentThread().getContextClassLoader().loadClass(viewType.substring(5));
addView(jsondoc.getDatabase(),jsondoc.getId(),DEFAULT_FUNCTION_NAME,(View) clazz.newInstance());
} catch (ClassNotFoundException e) {
throw new ViewException(e);
} catch (ViewException e) {
throw new ViewException(e);
} catch (InstantiationException e) {
throw new ViewException(e);
} catch (IllegalAccessException e) {
throw new ViewException(e);
}
} else {
log.warn("Don't know how to handle view type: {}\n{}", viewType,jsondoc.toString());
}
}
protected View getView(String db, String view, String function) {
return views.get(db+"/"+view+"/"+function);
}
public JSONObject getViewResults(String db, String viewName, String function) {
return AdHocViewRunner.runView(featherDB,db,viewName,function,getView(db,viewName,function));
}
public void recalculateDocument(Document doc) {
// this manager recalculates all views on the fly... so this isn't needed.
// but we still need to add new views!
if (doc.getId().startsWith("_") && doc instanceof JSONDocument) {
try {
addView((JSONDocument) doc);
} catch (ViewException e) {
log.error("Error adding new view: {}",doc.getId(),e);
}
}
}
public void initDatabaseViews(String db) throws ViewException {
File viewDir = viewDbDir(db);
viewDir.mkdirs();
addView(db,"_all_docs",DEFAULT_FUNCTION_NAME,new AllDocuments(db));
}
public void addView(String db, String view, String function,View instance) throws ViewException {
File viewDir = new File(viewDir(db,view),function);
if (!viewDir.exists()) {
viewDir.mkdirs();
}
ObjectOutputStream oos =null;
try {
oos = new ObjectOutputStream(new FileOutputStream(new File(viewDir,VIEW_INSTANCE_NAME)));
oos.writeObject(instance);
oos.close();
} catch (FileNotFoundException e) {
throw new ViewException(e);
} catch (IOException e) {
throw new ViewException(e);
} finally {
if (oos!=null) {
try {
oos.close();
} catch (IOException e) {
}
}
}
views.put(db+"/"+view+"/"+function,instance);
}
public void removeDatabaseViews(String db) {
recursivelyDeleteFiles(viewDbDir(db));
}
private void recursivelyDeleteFiles(File file) {
if (file.isDirectory()) {
for (File f:file.listFiles()) {
recursivelyDeleteFiles(f);
}
}
file.delete();
}
public boolean doesViewExist(String db, String view, String function) {
return views.containsKey(db+"/"+view+"/"+function);
}
public static JSONObject adHocView(FeatherDB featherDB,String db,String viewName, String functionName, String src) throws ViewException {
View view=new JavaScriptView(db,src);
return adHocView(featherDB,db,viewName,functionName,view);
}
public static JSONObject adHocView(FeatherDB featherDB,String db,String viewName, String functionName,View view) {
view.setBackend(featherDB.getBackend());
JSONObject results = new JSONObject();
int total = 0;
for (Document doc: featherDB.getBackend().allDocuments(db)) {
JSONObject result = view.filter(doc);
if (result!=null) {
results.put((String) result.get("key"),result.get("value"));
total++;
}
}
JSONObject out = new JSONObject();
out.put("view", viewName+"/"+functionName);
out.put("total_rows", total);
out.put("rows",results);
return out;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.transport;
import java.util.ArrayList;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import io.netty.buffer.ByteBuf;
import io.netty.channel.*;
import io.netty.handler.codec.MessageToMessageDecoder;
import io.netty.handler.codec.MessageToMessageEncoder;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.service.ClientWarn;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.tracing.Tracing;
import org.apache.cassandra.transport.messages.*;
import org.apache.cassandra.service.QueryState;
import org.apache.cassandra.utils.JVMStabilityInspector;
import org.apache.cassandra.utils.UUIDGen;
/**
* A message from the CQL binary protocol.
*/
public abstract class Message
{
protected static final Logger logger = LoggerFactory.getLogger(Message.class);
/**
* When we encounter an unexpected IOException we look for these {@link Throwable#getMessage() messages}
* (because we have no better way to distinguish) and log them at DEBUG rather than INFO, since they
* are generally caused by unclean client disconnects rather than an actual problem.
*/
private static final Set<String> ioExceptionsAtDebugLevel = ImmutableSet.<String>builder().
add("Connection reset by peer").
add("Broken pipe").
add("Connection timed out").
build();
public interface Codec<M extends Message> extends CBCodec<M> {}
public enum Direction
{
REQUEST, RESPONSE;
public static Direction extractFromVersion(int versionWithDirection)
{
return (versionWithDirection & 0x80) == 0 ? REQUEST : RESPONSE;
}
public int addToVersion(int rawVersion)
{
return this == REQUEST ? (rawVersion & 0x7F) : (rawVersion | 0x80);
}
}
public enum Type
{
ERROR (0, Direction.RESPONSE, ErrorMessage.codec),
STARTUP (1, Direction.REQUEST, StartupMessage.codec),
READY (2, Direction.RESPONSE, ReadyMessage.codec),
AUTHENTICATE (3, Direction.RESPONSE, AuthenticateMessage.codec),
CREDENTIALS (4, Direction.REQUEST, UnsupportedMessageCodec.instance),
OPTIONS (5, Direction.REQUEST, OptionsMessage.codec),
SUPPORTED (6, Direction.RESPONSE, SupportedMessage.codec),
QUERY (7, Direction.REQUEST, QueryMessage.codec),
RESULT (8, Direction.RESPONSE, ResultMessage.codec),
PREPARE (9, Direction.REQUEST, PrepareMessage.codec),
EXECUTE (10, Direction.REQUEST, ExecuteMessage.codec),
REGISTER (11, Direction.REQUEST, RegisterMessage.codec),
EVENT (12, Direction.RESPONSE, EventMessage.codec),
BATCH (13, Direction.REQUEST, BatchMessage.codec),
AUTH_CHALLENGE (14, Direction.RESPONSE, AuthChallenge.codec),
AUTH_RESPONSE (15, Direction.REQUEST, AuthResponse.codec),
AUTH_SUCCESS (16, Direction.RESPONSE, AuthSuccess.codec);
public final int opcode;
public final Direction direction;
public final Codec<?> codec;
private static final Type[] opcodeIdx;
static
{
int maxOpcode = -1;
for (Type type : Type.values())
maxOpcode = Math.max(maxOpcode, type.opcode);
opcodeIdx = new Type[maxOpcode + 1];
for (Type type : Type.values())
{
if (opcodeIdx[type.opcode] != null)
throw new IllegalStateException("Duplicate opcode");
opcodeIdx[type.opcode] = type;
}
}
Type(int opcode, Direction direction, Codec<?> codec)
{
this.opcode = opcode;
this.direction = direction;
this.codec = codec;
}
public static Type fromOpcode(int opcode, Direction direction)
{
if (opcode >= opcodeIdx.length)
throw new ProtocolException(String.format("Unknown opcode %d", opcode));
Type t = opcodeIdx[opcode];
if (t == null)
throw new ProtocolException(String.format("Unknown opcode %d", opcode));
if (t.direction != direction)
throw new ProtocolException(String.format("Wrong protocol direction (expected %s, got %s) for opcode %d (%s)",
t.direction,
direction,
opcode,
t));
return t;
}
}
public final Type type;
protected Connection connection;
private int streamId;
private Frame sourceFrame;
private Map<String, ByteBuffer> customPayload;
protected ProtocolVersion forcedProtocolVersion = null;
protected Message(Type type)
{
this.type = type;
}
public void attach(Connection connection)
{
this.connection = connection;
}
public Connection connection()
{
return connection;
}
public Message setStreamId(int streamId)
{
this.streamId = streamId;
return this;
}
public int getStreamId()
{
return streamId;
}
public void setSourceFrame(Frame sourceFrame)
{
this.sourceFrame = sourceFrame;
}
public Frame getSourceFrame()
{
return sourceFrame;
}
public Map<String, ByteBuffer> getCustomPayload()
{
return customPayload;
}
public void setCustomPayload(Map<String, ByteBuffer> customPayload)
{
this.customPayload = customPayload;
}
public static abstract class Request extends Message
{
private boolean tracingRequested;
protected Request(Type type)
{
super(type);
if (type.direction != Direction.REQUEST)
throw new IllegalArgumentException();
}
protected boolean isTraceable()
{
return false;
}
protected abstract Response execute(QueryState queryState, long queryStartNanoTime, boolean traceRequest);
final Response execute(QueryState queryState, long queryStartNanoTime)
{
boolean shouldTrace = false;
UUID tracingSessionId = null;
if (isTraceable())
{
if (isTracingRequested())
{
shouldTrace = true;
tracingSessionId = UUIDGen.getTimeUUID();
Tracing.instance.newSession(tracingSessionId, getCustomPayload());
}
else if (StorageService.instance.shouldTraceProbablistically())
{
shouldTrace = true;
Tracing.instance.newSession(getCustomPayload());
}
}
Response response;
try
{
response = execute(queryState, queryStartNanoTime, shouldTrace);
}
finally
{
if (shouldTrace)
Tracing.instance.stopSession();
}
if (isTraceable() && isTracingRequested())
response.setTracingId(tracingSessionId);
return response;
}
void setTracingRequested()
{
tracingRequested = true;
}
boolean isTracingRequested()
{
return tracingRequested;
}
}
public static abstract class Response extends Message
{
protected UUID tracingId;
protected List<String> warnings;
protected Response(Type type)
{
super(type);
if (type.direction != Direction.RESPONSE)
throw new IllegalArgumentException();
}
Message setTracingId(UUID tracingId)
{
this.tracingId = tracingId;
return this;
}
UUID getTracingId()
{
return tracingId;
}
Message setWarnings(List<String> warnings)
{
this.warnings = warnings;
return this;
}
public List<String> getWarnings()
{
return warnings;
}
}
@ChannelHandler.Sharable
public static class ProtocolDecoder extends MessageToMessageDecoder<Frame>
{
public void decode(ChannelHandlerContext ctx, Frame frame, List results)
{
boolean isRequest = frame.header.type.direction == Direction.REQUEST;
boolean isTracing = frame.header.flags.contains(Frame.Header.Flag.TRACING);
boolean isCustomPayload = frame.header.flags.contains(Frame.Header.Flag.CUSTOM_PAYLOAD);
boolean hasWarning = frame.header.flags.contains(Frame.Header.Flag.WARNING);
UUID tracingId = isRequest || !isTracing ? null : CBUtil.readUUID(frame.body);
List<String> warnings = isRequest || !hasWarning ? null : CBUtil.readStringList(frame.body);
Map<String, ByteBuffer> customPayload = !isCustomPayload ? null : CBUtil.readBytesMap(frame.body);
try
{
if (isCustomPayload && frame.header.version.isSmallerThan(ProtocolVersion.V4))
throw new ProtocolException("Received frame with CUSTOM_PAYLOAD flag for native protocol version < 4");
Message message = frame.header.type.codec.decode(frame.body, frame.header.version);
message.setStreamId(frame.header.streamId);
message.setSourceFrame(frame);
message.setCustomPayload(customPayload);
if (isRequest)
{
assert message instanceof Request;
Request req = (Request)message;
Connection connection = ctx.channel().attr(Connection.attributeKey).get();
req.attach(connection);
if (isTracing)
req.setTracingRequested();
}
else
{
assert message instanceof Response;
if (isTracing)
((Response)message).setTracingId(tracingId);
if (hasWarning)
((Response)message).setWarnings(warnings);
}
results.add(message);
}
catch (Throwable ex)
{
frame.release();
// Remember the streamId
throw ErrorMessage.wrap(ex, frame.header.streamId);
}
}
}
@ChannelHandler.Sharable
public static class ProtocolEncoder extends MessageToMessageEncoder<Message>
{
public void encode(ChannelHandlerContext ctx, Message message, List results)
{
Connection connection = ctx.channel().attr(Connection.attributeKey).get();
// The only case the connection can be null is when we send the initial STARTUP message (client side thus)
ProtocolVersion version = connection == null ? ProtocolVersion.CURRENT : connection.getVersion();
EnumSet<Frame.Header.Flag> flags = EnumSet.noneOf(Frame.Header.Flag.class);
Codec<Message> codec = (Codec<Message>)message.type.codec;
try
{
int messageSize = codec.encodedSize(message, version);
ByteBuf body;
if (message instanceof Response)
{
UUID tracingId = ((Response)message).getTracingId();
Map<String, ByteBuffer> customPayload = message.getCustomPayload();
if (tracingId != null)
messageSize += CBUtil.sizeOfUUID(tracingId);
List<String> warnings = ((Response)message).getWarnings();
if (warnings != null)
{
if (version.isSmallerThan(ProtocolVersion.V4))
throw new ProtocolException("Must not send frame with WARNING flag for native protocol version < 4");
messageSize += CBUtil.sizeOfStringList(warnings);
}
if (customPayload != null)
{
if (version.isSmallerThan(ProtocolVersion.V4))
throw new ProtocolException("Must not send frame with CUSTOM_PAYLOAD flag for native protocol version < 4");
messageSize += CBUtil.sizeOfBytesMap(customPayload);
}
body = CBUtil.allocator.buffer(messageSize);
if (tracingId != null)
{
CBUtil.writeUUID(tracingId, body);
flags.add(Frame.Header.Flag.TRACING);
}
if (warnings != null)
{
CBUtil.writeStringList(warnings, body);
flags.add(Frame.Header.Flag.WARNING);
}
if (customPayload != null)
{
CBUtil.writeBytesMap(customPayload, body);
flags.add(Frame.Header.Flag.CUSTOM_PAYLOAD);
}
}
else
{
assert message instanceof Request;
if (((Request)message).isTracingRequested())
flags.add(Frame.Header.Flag.TRACING);
Map<String, ByteBuffer> payload = message.getCustomPayload();
if (payload != null)
messageSize += CBUtil.sizeOfBytesMap(payload);
body = CBUtil.allocator.buffer(messageSize);
if (payload != null)
{
CBUtil.writeBytesMap(payload, body);
flags.add(Frame.Header.Flag.CUSTOM_PAYLOAD);
}
}
try
{
codec.encode(message, body, version);
}
catch (Throwable e)
{
body.release();
throw e;
}
// if the driver attempted to connect with a protocol version lower than the minimum supported
// version, respond with a protocol error message with the correct frame header for that version
ProtocolVersion responseVersion = message.forcedProtocolVersion == null
? version
: message.forcedProtocolVersion;
if (responseVersion.isBeta())
flags.add(Frame.Header.Flag.USE_BETA);
results.add(Frame.create(message.type, message.getStreamId(), responseVersion, flags, body));
}
catch (Throwable e)
{
throw ErrorMessage.wrap(e, message.getStreamId());
}
}
}
@ChannelHandler.Sharable
public static class Dispatcher extends SimpleChannelInboundHandler<Request>
{
private static class FlushItem
{
final ChannelHandlerContext ctx;
final Object response;
final Frame sourceFrame;
private FlushItem(ChannelHandlerContext ctx, Object response, Frame sourceFrame)
{
this.ctx = ctx;
this.sourceFrame = sourceFrame;
this.response = response;
}
}
private static abstract class Flusher implements Runnable
{
final EventLoop eventLoop;
final ConcurrentLinkedQueue<FlushItem> queued = new ConcurrentLinkedQueue<>();
final AtomicBoolean scheduled = new AtomicBoolean(false);
final HashSet<ChannelHandlerContext> channels = new HashSet<>();
final List<FlushItem> flushed = new ArrayList<>();
void start()
{
if (!scheduled.get() && scheduled.compareAndSet(false, true))
{
this.eventLoop.execute(this);
}
}
public Flusher(EventLoop eventLoop)
{
this.eventLoop = eventLoop;
}
}
private static final class LegacyFlusher extends Flusher
{
int runsSinceFlush = 0;
int runsWithNoWork = 0;
private LegacyFlusher(EventLoop eventLoop)
{
super(eventLoop);
}
public void run()
{
boolean doneWork = false;
FlushItem flush;
while ( null != (flush = queued.poll()) )
{
channels.add(flush.ctx);
flush.ctx.write(flush.response, flush.ctx.voidPromise());
flushed.add(flush);
doneWork = true;
}
runsSinceFlush++;
if (!doneWork || runsSinceFlush > 2 || flushed.size() > 50)
{
for (ChannelHandlerContext channel : channels)
channel.flush();
for (FlushItem item : flushed)
item.sourceFrame.release();
channels.clear();
flushed.clear();
runsSinceFlush = 0;
}
if (doneWork)
{
runsWithNoWork = 0;
}
else
{
// either reschedule or cancel
if (++runsWithNoWork > 5)
{
scheduled.set(false);
if (queued.isEmpty() || !scheduled.compareAndSet(false, true))
return;
}
}
eventLoop.schedule(this, 10000, TimeUnit.NANOSECONDS);
}
}
private static final class ImmediateFlusher extends Flusher
{
private ImmediateFlusher(EventLoop eventLoop)
{
super(eventLoop);
}
public void run()
{
boolean doneWork = false;
FlushItem flush;
scheduled.set(false);
while (null != (flush = queued.poll()))
{
channels.add(flush.ctx);
flush.ctx.write(flush.response, flush.ctx.voidPromise());
flushed.add(flush);
doneWork = true;
}
if (doneWork)
{
for (ChannelHandlerContext channel : channels)
channel.flush();
for (FlushItem item : flushed)
item.sourceFrame.release();
channels.clear();
flushed.clear();
}
}
}
private static final ConcurrentMap<EventLoop, Flusher> flusherLookup = new ConcurrentHashMap<>();
private final boolean useLegacyFlusher;
public Dispatcher(boolean useLegacyFlusher)
{
super(false);
this.useLegacyFlusher = useLegacyFlusher;
}
@Override
public void channelRead0(ChannelHandlerContext ctx, Request request)
{
final Response response;
final ServerConnection connection;
long queryStartNanoTime = System.nanoTime();
try
{
assert request.connection() instanceof ServerConnection;
connection = (ServerConnection)request.connection();
if (connection.getVersion().isGreaterOrEqualTo(ProtocolVersion.V4))
ClientWarn.instance.captureWarnings();
QueryState qstate = connection.validateNewMessage(request.type, connection.getVersion());
logger.trace("Received: {}, v={}", request, connection.getVersion());
connection.requests.inc();
response = request.execute(qstate, queryStartNanoTime);
response.setStreamId(request.getStreamId());
response.setWarnings(ClientWarn.instance.getWarnings());
response.attach(connection);
connection.applyStateTransition(request.type, response.type);
}
catch (Throwable t)
{
JVMStabilityInspector.inspectThrowable(t);
UnexpectedChannelExceptionHandler handler = new UnexpectedChannelExceptionHandler(ctx.channel(), true);
flush(new FlushItem(ctx, ErrorMessage.fromException(t, handler).setStreamId(request.getStreamId()), request.getSourceFrame()));
return;
}
finally
{
ClientWarn.instance.resetWarnings();
}
logger.trace("Responding: {}, v={}", response, connection.getVersion());
flush(new FlushItem(ctx, response, request.getSourceFrame()));
}
private void flush(FlushItem item)
{
EventLoop loop = item.ctx.channel().eventLoop();
Flusher flusher = flusherLookup.get(loop);
if (flusher == null)
{
Flusher created = useLegacyFlusher ? new LegacyFlusher(loop) : new ImmediateFlusher(loop);
Flusher alt = flusherLookup.putIfAbsent(loop, flusher = created);
if (alt != null)
flusher = alt;
}
flusher.queued.add(item);
flusher.start();
}
}
@ChannelHandler.Sharable
public static final class ExceptionHandler extends ChannelInboundHandlerAdapter
{
@Override
public void exceptionCaught(final ChannelHandlerContext ctx, Throwable cause)
{
// Provide error message to client in case channel is still open
UnexpectedChannelExceptionHandler handler = new UnexpectedChannelExceptionHandler(ctx.channel(), false);
ErrorMessage errorMessage = ErrorMessage.fromException(cause, handler);
if (ctx.channel().isOpen())
{
ChannelFuture future = ctx.writeAndFlush(errorMessage);
// On protocol exception, close the channel as soon as the message have been sent
if (cause instanceof ProtocolException)
{
future.addListener(new ChannelFutureListener()
{
public void operationComplete(ChannelFuture future)
{
ctx.close();
}
});
}
}
}
}
/**
* Include the channel info in the logged information for unexpected errors, and (if {@link #alwaysLogAtError} is
* false then choose the log level based on the type of exception (some are clearly client issues and shouldn't be
* logged at server ERROR level)
*/
static final class UnexpectedChannelExceptionHandler implements Predicate<Throwable>
{
private final Channel channel;
private final boolean alwaysLogAtError;
UnexpectedChannelExceptionHandler(Channel channel, boolean alwaysLogAtError)
{
this.channel = channel;
this.alwaysLogAtError = alwaysLogAtError;
}
@Override
public boolean apply(Throwable exception)
{
String message;
try
{
message = "Unexpected exception during request; channel = " + channel;
}
catch (Exception ignore)
{
// We don't want to make things worse if String.valueOf() throws an exception
message = "Unexpected exception during request; channel = <unprintable>";
}
// netty wraps SSL errors in a CodecExcpetion
boolean isIOException = exception instanceof IOException || (exception.getCause() instanceof IOException);
if (!alwaysLogAtError && isIOException)
{
String errorMessage = exception.getMessage();
boolean logAtTrace = false;
for (String ioException : ioExceptionsAtDebugLevel)
{
// exceptions thrown from the netty epoll transport add the name of the function that failed
// to the exception string (which is simply wrapping a JDK exception), so we can't do a simple/naive comparison
if (errorMessage.contains(ioException))
{
logAtTrace = true;
break;
}
}
if (logAtTrace)
{
// Likely unclean client disconnects
logger.trace(message, exception);
}
else
{
// Generally unhandled IO exceptions are network issues, not actual ERRORS
logger.info(message, exception);
}
}
else
{
// Anything else is probably a bug in server of client binary protocol handling
logger.error(message, exception);
}
// We handled the exception.
return true;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j;
import java.util.Arrays;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* Applications create Markers by using the Marker Manager. All Markers created by this Manager are
* immutable.
*/
public final class MarkerManager {
private static final ConcurrentMap<String, Marker> MARKERS = new ConcurrentHashMap<>();
private MarkerManager() {
// do nothing
}
/**
* Clears all markers.
*/
public static void clear() {
MARKERS.clear();
}
/**
* Tests existence of the given marker.
* @param key the marker name
* @return true if the marker exists.
* @since 2.4
*/
public static boolean exists(final String key) {
return MARKERS.containsKey(key);
}
/**
* Retrieves a Marker or create a Marker that has no parent.
* @param name The name of the Marker.
* @return The Marker with the specified name.
* @throws IllegalArgumentException if the argument is {@code null}
*/
public static Marker getMarker(final String name) {
MARKERS.putIfAbsent(name, new Log4jMarker(name));
return MARKERS.get(name);
}
/**
* Retrieves or creates a Marker with the specified parent. The parent must have been previously created.
* @param name The name of the Marker.
* @param parent The name of the parent Marker.
* @return The Marker with the specified name.
* @throws IllegalArgumentException if the parent Marker does not exist.
* @deprecated Use the Marker add or set methods to add parent Markers. Will be removed by final GA release.
*/
@Deprecated
public static Marker getMarker(final String name, final String parent) {
final Marker parentMarker = MARKERS.get(parent);
if (parentMarker == null) {
throw new IllegalArgumentException("Parent Marker " + parent + " has not been defined");
}
@SuppressWarnings("deprecation")
final Marker marker = getMarker(name, parentMarker);
return marker;
}
/**
* Retrieves or creates a Marker with the specified parent.
* @param name The name of the Marker.
* @param parent The parent Marker.
* @return The Marker with the specified name.
* @throws IllegalArgumentException if any argument is {@code null}
* @deprecated Use the Marker add or set methods to add parent Markers. Will be removed by final GA release.
*/
@Deprecated
public static Marker getMarker(final String name, final Marker parent) {
MARKERS.putIfAbsent(name, new Log4jMarker(name));
return MARKERS.get(name).addParents(parent);
}
/**
* <em>Consider this class private, it is only public to satisfy Jackson for XML and JSON IO.</em>
* <p>
* The actual Marker implementation.
* </p>
* <p>
* <em>Internal note: We could make this class package private instead of public if the class
* {@code org.apache.logging.log4j.core.jackson.MarkerMixIn}
* is moved to this package and would of course stay in its current module.</em>
* </p>
*/
public static class Log4jMarker implements Marker {
private static final long serialVersionUID = 100L;
private final String name;
private volatile Marker[] parents;
/**
* Required by JAXB and Jackson for XML and JSON IO.
*/
@SuppressWarnings("unused")
private Log4jMarker() {
this.name = null;
this.parents = null;
}
/**
* Constructs a new Marker.
* @param name the name of the Marker.
* @throws IllegalArgumentException if the argument is {@code null}
*/
public Log4jMarker(final String name) {
if (name == null) {
// we can't store null references in a ConcurrentHashMap as it is, not to mention that a null Marker
// name seems rather pointless. To get an "anonymous" Marker, just use an empty string.
throw new IllegalArgumentException("Marker name cannot be null.");
}
this.name = name;
this.parents = null;
}
// TODO: use java.util.concurrent
@Override
public synchronized Marker addParents(final Marker... parents) {
if (parents == null) {
throw new IllegalArgumentException("A parent marker must be specified");
}
// It is not strictly necessary to copy the variable here but it should perform better than
// Accessing a volatile variable multiple times.
final Marker[] localParents = this.parents;
// Don't add a parent that is already in the hierarchy.
int count = 0;
int size = parents.length;
if (localParents != null) {
for (final Marker parent : parents) {
if (!(contains(parent, localParents) || parent.isInstanceOf(this))) {
++count;
}
}
if (count == 0) {
return this;
}
size = localParents.length + count;
}
final Marker[] markers = new Marker[size];
if (localParents != null) {
// It's perfectly OK to call arraycopy in a synchronized context; it's still faster
//noinspection CallToNativeMethodWhileLocked
System.arraycopy(localParents, 0, markers, 0, localParents.length);
}
int index = localParents == null ? 0 : localParents.length;
for (final Marker parent : parents) {
if (localParents == null || !(contains(parent, localParents) || parent.isInstanceOf(this))) {
markers[index++] = parent;
}
}
this.parents = markers;
return this;
}
@Override
public synchronized boolean remove(final Marker parent) {
if (parent == null) {
throw new IllegalArgumentException("A parent marker must be specified");
}
final Marker[] localParents = this.parents;
if (localParents == null) {
return false;
}
final int localParentsLength = localParents.length;
if (localParentsLength == 1) {
if (localParents[0].equals(parent)) {
parents = null;
return true;
}
return false;
}
int index = 0;
final Marker[] markers = new Marker[localParentsLength - 1];
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < localParentsLength; i++) {
final Marker marker = localParents[i];
if (!marker.equals(parent)) {
if (index == localParentsLength - 1) {
// no need to swap array
return false;
}
markers[index++] = marker;
}
}
parents = markers;
return true;
}
@Override
public Marker setParents(final Marker... markers) {
if (markers == null || markers.length == 0) {
this.parents = null;
} else {
final Marker[] array = new Marker[markers.length];
System.arraycopy(markers, 0, array, 0, markers.length);
this.parents = array;
}
return this;
}
@Override
public String getName() {
return this.name;
}
@Override
public Marker[] getParents() {
if (this.parents == null) {
return null;
}
return Arrays.copyOf(this.parents, this.parents.length);
}
@Override
public boolean hasParents() {
return this.parents != null;
}
@Override
public boolean isInstanceOf(final Marker marker) {
if (marker == null) {
throw new IllegalArgumentException("A marker parameter is required");
}
if (this == marker) {
return true;
}
final Marker[] localParents = parents;
if (localParents != null) {
// With only one or two parents the for loop is slower.
final int localParentsLength = localParents.length;
if (localParentsLength == 1) {
return checkParent(localParents[0], marker);
}
if (localParentsLength == 2) {
return checkParent(localParents[0], marker) || checkParent(localParents[1], marker);
}
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < localParentsLength; i++) {
final Marker localParent = localParents[i];
if (checkParent(localParent, marker)) {
return true;
}
}
}
return false;
}
@Override
public boolean isInstanceOf(final String markerName) {
if (markerName == null) {
throw new IllegalArgumentException("A marker name is required");
}
if (markerName.equals(this.getName())) {
return true;
}
// Use a real marker for child comparisons. It is faster than comparing the names.
final Marker marker = MARKERS.get(markerName);
if (marker == null) {
return false;
}
final Marker[] localParents = parents;
if (localParents != null) {
final int localParentsLength = localParents.length;
if (localParentsLength == 1) {
return checkParent(localParents[0], marker);
}
if (localParentsLength == 2) {
return checkParent(localParents[0], marker) || checkParent(localParents[1], marker);
}
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < localParentsLength; i++) {
final Marker localParent = localParents[i];
if (checkParent(localParent, marker)) {
return true;
}
}
}
return false;
}
private static boolean checkParent(final Marker parent, final Marker marker) {
if (parent == marker) {
return true;
}
final Marker[] localParents = parent instanceof Log4jMarker ? ((Log4jMarker)parent).parents : parent.getParents();
if (localParents != null) {
final int localParentsLength = localParents.length;
if (localParentsLength == 1) {
return checkParent(localParents[0], marker);
}
if (localParentsLength == 2) {
return checkParent(localParents[0], marker) || checkParent(localParents[1], marker);
}
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < localParentsLength; i++) {
final Marker localParent = localParents[i];
if (checkParent(localParent, marker)) {
return true;
}
}
}
return false;
}
/*
* Called from add while synchronized.
*/
private static boolean contains(final Marker parent, final Marker... localParents) {
//noinspection ForLoopReplaceableByForEach
// performance tests showed a normal for loop is slightly faster than a for-each loop on some platforms
for (int i = 0, localParentsLength = localParents.length; i < localParentsLength; i++) {
final Marker marker = localParents[i];
if (marker == parent) {
return true;
}
}
return false;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || !(o instanceof Marker)) {
return false;
}
final Marker marker = (Marker) o;
return name.equals(marker.getName());
}
@Override
public int hashCode() {
return name.hashCode();
}
@Override
public String toString() {
// FIXME: might want to use an initial capacity; the default is 16 (or str.length() + 16)
final StringBuilder sb = new StringBuilder(name);
final Marker[] localParents = parents;
if (localParents != null) {
addParentInfo(sb, localParents);
}
return sb.toString();
}
private static void addParentInfo(final StringBuilder sb, final Marker... parents) {
sb.append("[ ");
boolean first = true;
//noinspection ForLoopReplaceableByForEach
for (int i = 0, parentsLength = parents.length; i < parentsLength; i++) {
final Marker marker = parents[i];
if (!first) {
sb.append(", ");
}
first = false;
sb.append(marker.getName());
final Marker[] p = marker instanceof Log4jMarker ? ((Log4jMarker) marker).parents : marker.getParents();
if (p != null) {
addParentInfo(sb, p);
}
}
sb.append(" ]");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.jms.management.impl;
import javax.management.MBeanInfo;
import javax.management.NotCompliantMBeanException;
import javax.management.StandardMBean;
import org.apache.activemq.artemis.api.core.DiscoveryGroupConfiguration;
import org.apache.activemq.artemis.api.core.TransportConfiguration;
import org.apache.activemq.artemis.api.core.management.Parameter;
import org.apache.activemq.artemis.api.jms.management.ConnectionFactoryControl;
import org.apache.activemq.artemis.core.management.impl.MBeanInfoHelper;
import org.apache.activemq.artemis.jms.client.ActiveMQConnectionFactory;
import org.apache.activemq.artemis.jms.server.JMSServerManager;
import org.apache.activemq.artemis.jms.server.config.ConnectionFactoryConfiguration;
public class JMSConnectionFactoryControlImpl extends StandardMBean implements ConnectionFactoryControl {
// Constants -----------------------------------------------------
// Attributes ----------------------------------------------------
private final ConnectionFactoryConfiguration cfConfig;
private ActiveMQConnectionFactory cf;
private final String name;
private final JMSServerManager jmsManager;
// Static --------------------------------------------------------
// Constructors --------------------------------------------------
public JMSConnectionFactoryControlImpl(final ConnectionFactoryConfiguration cfConfig,
final ActiveMQConnectionFactory cf,
final JMSServerManager jmsManager,
final String name) throws NotCompliantMBeanException {
super(ConnectionFactoryControl.class);
this.cfConfig = cfConfig;
this.cf = cf;
this.name = name;
this.jmsManager = jmsManager;
}
// Public --------------------------------------------------------
// ManagedConnectionFactoryMBean implementation ------------------
@Override
public String[] getRegistryBindings() {
return jmsManager.getBindingsOnConnectionFactory(name);
}
@Override
public boolean isCompressLargeMessages() {
return cf.isCompressLargeMessage();
}
@Override
public void setCompressLargeMessages(final boolean compress) {
cfConfig.setCompressLargeMessages(compress);
recreateCF();
}
@Override
public boolean isHA() {
return cfConfig.isHA();
}
@Override
public int getFactoryType() {
return cfConfig.getFactoryType().intValue();
}
@Override
public String getClientID() {
return cfConfig.getClientID();
}
@Override
public long getClientFailureCheckPeriod() {
return cfConfig.getClientFailureCheckPeriod();
}
@Override
public void setClientID(String clientID) {
cfConfig.setClientID(clientID);
recreateCF();
}
@Override
public void setDupsOKBatchSize(int dupsOKBatchSize) {
cfConfig.setDupsOKBatchSize(dupsOKBatchSize);
recreateCF();
}
@Override
public void setTransactionBatchSize(int transactionBatchSize) {
cfConfig.setTransactionBatchSize(transactionBatchSize);
recreateCF();
}
@Override
public void setClientFailureCheckPeriod(long clientFailureCheckPeriod) {
cfConfig.setClientFailureCheckPeriod(clientFailureCheckPeriod);
recreateCF();
}
@Override
public void setConnectionTTL(long connectionTTL) {
cfConfig.setConnectionTTL(connectionTTL);
recreateCF();
}
@Override
public void setCallTimeout(long callTimeout) {
cfConfig.setCallTimeout(callTimeout);
recreateCF();
}
@Override
public void setCallFailoverTimeout(long callTimeout) {
cfConfig.setCallFailoverTimeout(callTimeout);
recreateCF();
}
@Override
public void setConsumerWindowSize(int consumerWindowSize) {
cfConfig.setConsumerWindowSize(consumerWindowSize);
recreateCF();
}
@Override
public void setConsumerMaxRate(int consumerMaxRate) {
cfConfig.setConsumerMaxRate(consumerMaxRate);
recreateCF();
}
@Override
public void setConfirmationWindowSize(int confirmationWindowSize) {
cfConfig.setConfirmationWindowSize(confirmationWindowSize);
recreateCF();
}
@Override
public void setProducerMaxRate(int producerMaxRate) {
cfConfig.setProducerMaxRate(producerMaxRate);
recreateCF();
}
@Override
public int getProducerWindowSize() {
return cfConfig.getProducerWindowSize();
}
@Override
public void setProducerWindowSize(int producerWindowSize) {
cfConfig.setProducerWindowSize(producerWindowSize);
recreateCF();
}
@Override
public void setCacheLargeMessagesClient(boolean cacheLargeMessagesClient) {
cfConfig.setCacheLargeMessagesClient(cacheLargeMessagesClient);
recreateCF();
}
@Override
public boolean isCacheLargeMessagesClient() {
return cfConfig.isCacheLargeMessagesClient();
}
@Override
public void setMinLargeMessageSize(int minLargeMessageSize) {
cfConfig.setMinLargeMessageSize(minLargeMessageSize);
recreateCF();
}
@Override
public void setBlockOnNonDurableSend(boolean blockOnNonDurableSend) {
cfConfig.setBlockOnNonDurableSend(blockOnNonDurableSend);
recreateCF();
}
@Override
public void setBlockOnAcknowledge(boolean blockOnAcknowledge) {
cfConfig.setBlockOnAcknowledge(blockOnAcknowledge);
recreateCF();
}
@Override
public void setBlockOnDurableSend(boolean blockOnDurableSend) {
cfConfig.setBlockOnDurableSend(blockOnDurableSend);
recreateCF();
}
@Override
public void setAutoGroup(boolean autoGroup) {
cfConfig.setAutoGroup(autoGroup);
recreateCF();
}
@Override
public void setPreAcknowledge(boolean preAcknowledge) {
cfConfig.setPreAcknowledge(preAcknowledge);
recreateCF();
}
@Override
public void setMaxRetryInterval(long retryInterval) {
cfConfig.setMaxRetryInterval(retryInterval);
recreateCF();
}
@Override
public void setRetryIntervalMultiplier(double retryIntervalMultiplier) {
cfConfig.setRetryIntervalMultiplier(retryIntervalMultiplier);
recreateCF();
}
@Override
public void setReconnectAttempts(int reconnectAttempts) {
cfConfig.setReconnectAttempts(reconnectAttempts);
recreateCF();
}
@Override
public void setFailoverOnInitialConnection(boolean failover) {
cfConfig.setFailoverOnInitialConnection(failover);
recreateCF();
}
@Override
public boolean isUseGlobalPools() {
return cfConfig.isUseGlobalPools();
}
@Override
public void setScheduledThreadPoolMaxSize(int scheduledThreadPoolMaxSize) {
cfConfig.setScheduledThreadPoolMaxSize(scheduledThreadPoolMaxSize);
recreateCF();
}
@Override
public int getThreadPoolMaxSize() {
return cfConfig.getThreadPoolMaxSize();
}
@Override
public void setThreadPoolMaxSize(int threadPoolMaxSize) {
cfConfig.setThreadPoolMaxSize(threadPoolMaxSize);
recreateCF();
}
@Override
public int getInitialMessagePacketSize() {
return cf.getInitialMessagePacketSize();
}
@Override
public void setGroupID(String groupID) {
cfConfig.setGroupID(groupID);
recreateCF();
}
@Override
public String getGroupID() {
return cfConfig.getGroupID();
}
@Override
public void setUseGlobalPools(boolean useGlobalPools) {
cfConfig.setUseGlobalPools(useGlobalPools);
recreateCF();
}
@Override
public int getScheduledThreadPoolMaxSize() {
return cfConfig.getScheduledThreadPoolMaxSize();
}
@Override
public void setRetryInterval(long retryInterval) {
cfConfig.setRetryInterval(retryInterval);
recreateCF();
}
@Override
public long getMaxRetryInterval() {
return cfConfig.getMaxRetryInterval();
}
@Override
public String getConnectionLoadBalancingPolicyClassName() {
return cfConfig.getLoadBalancingPolicyClassName();
}
@Override
public void setConnectionLoadBalancingPolicyClassName(String name) {
cfConfig.setLoadBalancingPolicyClassName(name);
recreateCF();
}
@Override
public TransportConfiguration[] getStaticConnectors() {
return cf.getStaticConnectors();
}
@Override
public DiscoveryGroupConfiguration getDiscoveryGroupConfiguration() {
return cf.getDiscoveryGroupConfiguration();
}
@Override
public void addBinding(@Parameter(name = "binding", desc = "the name of the binding for the Registry") String binding) throws Exception {
jmsManager.addConnectionFactoryToBindingRegistry(name, binding);
}
@Override
public void removeBinding(@Parameter(name = "binding", desc = "the name of the binding for the Registry") String binding) throws Exception {
jmsManager.removeConnectionFactoryFromBindingRegistry(name, binding);
}
@Override
public long getCallTimeout() {
return cfConfig.getCallTimeout();
}
@Override
public long getCallFailoverTimeout() {
return cfConfig.getCallFailoverTimeout();
}
@Override
public int getConsumerMaxRate() {
return cfConfig.getConsumerMaxRate();
}
@Override
public int getConsumerWindowSize() {
return cfConfig.getConsumerWindowSize();
}
@Override
public int getProducerMaxRate() {
return cfConfig.getProducerMaxRate();
}
@Override
public int getConfirmationWindowSize() {
return cfConfig.getConfirmationWindowSize();
}
@Override
public int getDupsOKBatchSize() {
return cfConfig.getDupsOKBatchSize();
}
@Override
public boolean isBlockOnAcknowledge() {
return cfConfig.isBlockOnAcknowledge();
}
@Override
public boolean isBlockOnNonDurableSend() {
return cfConfig.isBlockOnNonDurableSend();
}
@Override
public boolean isBlockOnDurableSend() {
return cfConfig.isBlockOnDurableSend();
}
@Override
public boolean isPreAcknowledge() {
return cfConfig.isPreAcknowledge();
}
@Override
public String getName() {
return name;
}
@Override
public long getConnectionTTL() {
return cfConfig.getConnectionTTL();
}
@Override
public int getReconnectAttempts() {
return cfConfig.getReconnectAttempts();
}
@Override
public boolean isFailoverOnInitialConnection() {
return cfConfig.isFailoverOnInitialConnection();
}
@Override
public int getMinLargeMessageSize() {
return cfConfig.getMinLargeMessageSize();
}
@Override
public long getRetryInterval() {
return cfConfig.getRetryInterval();
}
@Override
public double getRetryIntervalMultiplier() {
return cfConfig.getRetryIntervalMultiplier();
}
@Override
public int getTransactionBatchSize() {
return cfConfig.getTransactionBatchSize();
}
@Override
public void setProtocolManagerFactoryStr(String protocolManagerFactoryStr) {
cfConfig.setProtocolManagerFactoryStr(protocolManagerFactoryStr);
recreateCF();
}
@Override
public String getProtocolManagerFactoryStr() {
return cfConfig.getProtocolManagerFactoryStr();
}
@Override
public boolean isAutoGroup() {
return cfConfig.isAutoGroup();
}
@Override
public MBeanInfo getMBeanInfo() {
MBeanInfo info = super.getMBeanInfo();
return new MBeanInfo(info.getClassName(), info.getDescription(), info.getAttributes(), info.getConstructors(), MBeanInfoHelper.getMBeanOperationsInfo(ConnectionFactoryControl.class), info.getNotifications());
}
// Package protected ---------------------------------------------
// Protected -----------------------------------------------------
private void recreateCF() {
try {
this.cf = jmsManager.recreateCF(this.name, this.cfConfig);
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
// Private -------------------------------------------------------
// Inner classes -------------------------------------------------
}
| |
package org.zstack.network.service.vip;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.core.Platform;
import org.zstack.core.cascade.CascadeConstant;
import org.zstack.core.cascade.CascadeFacade;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.cloudbus.CloudBusCallBack;
import org.zstack.core.cloudbus.MessageSafe;
import org.zstack.core.componentloader.PluginExtension;
import org.zstack.core.componentloader.PluginRegistry;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.db.SimpleQuery;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.core.workflow.FlowChainBuilder;
import org.zstack.core.workflow.ShareFlow;
import org.zstack.header.AbstractService;
import org.zstack.header.apimediator.ApiMessageInterceptionException;
import org.zstack.header.core.Completion;
import org.zstack.header.core.NopeCompletion;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.identity.IdentityErrors;
import org.zstack.header.identity.Quota;
import org.zstack.header.identity.Quota.QuotaOperator;
import org.zstack.header.identity.Quota.QuotaPair;
import org.zstack.header.identity.ReportQuotaExtensionPoint;
import org.zstack.header.message.APIDeleteMessage.DeletionMode;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.message.MessageReply;
import org.zstack.header.network.l3.*;
import org.zstack.identity.AccountManager;
import org.zstack.tag.TagManager;
import org.zstack.utils.DebugUtils;
import org.zstack.utils.Utils;
import org.zstack.utils.logging.CLogger;
import javax.persistence.TypedQuery;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.zstack.utils.CollectionDSL.list;
/**
*/
public class VipManagerImpl extends AbstractService implements VipManager, ReportQuotaExtensionPoint {
private static final CLogger logger = Utils.getLogger(VipManagerImpl.class);
@Autowired
private CloudBus bus;
@Autowired
private DatabaseFacade dbf;
@Autowired
private ErrorFacade errf;
@Autowired
private PluginRegistry pluginRgty;
@Autowired
private AccountManager acntMgr;
@Autowired
private CascadeFacade casf;
@Autowired
private TagManager tagMgr;
private Map<String, VipReleaseExtensionPoint> vipReleaseExts = new HashMap<String, VipReleaseExtensionPoint>();
private Map<String, VipBackend> vipBackends = new HashMap<String, VipBackend>();
private List<String> releaseVipByApiFlowNames;
private FlowChainBuilder releaseVipByApiFlowChainBuilder;
private void populateExtensions() {
List<PluginExtension> exts = pluginRgty.getExtensionByInterfaceName(VipReleaseExtensionPoint.class.getName());
for (PluginExtension ext : exts) {
VipReleaseExtensionPoint extp = (VipReleaseExtensionPoint)ext.getInstance();
VipReleaseExtensionPoint old = vipReleaseExts.get(extp.getVipUse());
if (old != null) {
throw new CloudRuntimeException(String.format("duplicate VirtualRouterVipReleaseExtensionPoint for %s, old[%s], new[%s]", old.getClass().getName(), extp.getClass().getName(), old.getVipUse()));
}
vipReleaseExts.put(extp.getVipUse(), extp);
}
exts = pluginRgty.getExtensionByInterfaceName(VipBackend.class.getName());
for (PluginExtension ext : exts) {
VipBackend extp = (VipBackend) ext.getInstance();
VipBackend old = vipBackends.get(extp.getServiceProviderTypeForVip());
if (old != null) {
throw new CloudRuntimeException(
String.format("duplicate VipBackend[%s, %s] for provider type[%s]", old.getClass().getName(), extp.getClass().getName(), extp.getServiceProviderTypeForVip())
);
}
vipBackends.put(extp.getServiceProviderTypeForVip(), extp);
}
}
public VipReleaseExtensionPoint getVipReleaseExtensionPoint(String use) {
VipReleaseExtensionPoint extp = vipReleaseExts.get(use);
if (extp == null) {
throw new CloudRuntimeException(String.format("cannot VipReleaseExtensionPoint for use[%s]", use));
}
return extp;
}
@Override
@MessageSafe
public void handleMessage(Message msg) {
if (msg instanceof APIMessage) {
handleApiMessage((APIMessage) msg);
} else {
handleLocalMessage(msg);
}
}
private void handleLocalMessage(Message msg) {
if (msg instanceof VipDeletionMsg) {
handle((VipDeletionMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handle(final VipDeletionMsg msg) {
final VipDeletionReply reply = new VipDeletionReply();
final VipVO vip = dbf.findByUuid(msg.getVipUuid(), VipVO.class);
if (vip.getUseFor() == null) {
returnVip(VipInventory.valueOf(vip));
dbf.removeByPrimaryKey(vip.getUuid(), VipVO.class);
logger.debug(String.format("released vip[uuid:%s, ip:%s] on l3Network[uuid:%s]", vip.getUuid(), vip.getIp(), vip.getL3NetworkUuid()));
bus.reply(msg, reply);
return;
}
final VipInventory vipinv = VipInventory.valueOf(vip);
FlowChain chain = releaseVipByApiFlowChainBuilder.build();
chain.setName(String.format("api-release-vip-uuid-%s-ip-%s-name-%s", vipinv.getUuid(), vipinv.getIp(), vipinv.getName()));
chain.getData().put(VipConstant.Params.VIP.toString(), vipinv);
chain.done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
returnVip(vipinv);
dbf.removeByPrimaryKey(vip.getUuid(), VipVO.class);
bus.reply(msg, reply);
}
}).error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
reply.setError(errCode);
bus.reply(msg, reply);
}
}).start();
}
private void handleApiMessage(APIMessage msg) {
if (msg instanceof APICreateVipMsg) {
handle((APICreateVipMsg) msg);
} else if (msg instanceof APIDeleteVipMsg) {
handle((APIDeleteVipMsg) msg);
} else if (msg instanceof APIChangeVipStateMsg) {
handle((APIChangeVipStateMsg) msg);
} else if (msg instanceof APIUpdateVipMsg) {
handle((APIUpdateVipMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handle(APIUpdateVipMsg msg) {
VipVO vo = dbf.findByUuid(msg.getUuid(), VipVO.class);
boolean update = false;
if (msg.getName() != null) {
vo.setName(msg.getName());
update = true;
}
if (msg.getDescription() != null) {
vo.setDescription(msg.getDescription());
update = true;
}
if (update) {
vo = dbf.updateAndRefresh(vo);
}
APIUpdateVipEvent evt = new APIUpdateVipEvent(msg.getId());
evt.setInventory(VipInventory.valueOf(vo));
bus.publish(evt);
}
private void handle(APIChangeVipStateMsg msg) {
VipVO vip = dbf.findByUuid(msg.getUuid(), VipVO.class);
VipStateEvent sevt = VipStateEvent.valueOf(msg.getStateEvent());
vip.setState(vip.getState().nextState(sevt));
vip = dbf.updateAndRefresh(vip);
APIChangeVipStateEvent evt = new APIChangeVipStateEvent(msg.getId());
evt.setInventory(VipInventory.valueOf(vip));
bus.publish(evt);
}
private void returnVip(VipInventory vip) {
ReturnIpMsg msg = new ReturnIpMsg();
msg.setL3NetworkUuid(vip.getL3NetworkUuid());
msg.setUsedIpUuid(vip.getUsedIpUuid());
bus.makeTargetServiceIdByResourceUuid(msg, L3NetworkConstant.SERVICE_ID, vip.getL3NetworkUuid());
bus.send(msg);
}
private void handle(final APIDeleteVipMsg msg) {
final VipVO vip = dbf.findByUuid(msg.getUuid(), VipVO.class);
final APIDeleteVipEvent evt = new APIDeleteVipEvent(msg.getId());
final String issuer = VipVO.class.getSimpleName();
final List<VipInventory> ctx = Arrays.asList(VipInventory.valueOf(vip));
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("delete-vip-%s", vip.getUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
if (msg.getDeletionMode() == DeletionMode.Permissive) {
flow(new NoRollbackFlow() {
String __name__ = String.format("delete-vip-permissive-check");
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_CHECK_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = String.format("delete-vip-permissive-delete");
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_DELETE_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
} else {
flow(new NoRollbackFlow() {
String __name__ = String.format("delete-vip-force-delete");
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_FORCE_DELETE_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
}
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
casf.asyncCascadeFull(CascadeConstant.DELETION_CLEANUP_CODE, issuer, ctx, new NopeCompletion());
bus.publish(evt);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
evt.setErrorCode(errf.instantiateErrorCode(SysErrors.DELETE_RESOURCE_ERROR, errCode));
bus.publish(evt);
}
});
}
}).start();
}
private void handle(final APICreateVipMsg msg) {
final APICreateVipEvent evt = new APICreateVipEvent(msg.getId());
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("create-vip-%s-from-l3-%s", msg.getName(), msg.getL3NetworkUuid()));
chain.then(new ShareFlow() {
UsedIpInventory ip;
VipInventory vip;
@Override
public void setup() {
flow(new Flow() {
String __name__ = String.format("allocate-ip-for-vip");
@Override
public void run(final FlowTrigger trigger, Map data) {
String strategyType = msg.getAllocatorStrategy() == null ? L3NetworkConstant.RANDOM_IP_ALLOCATOR_STRATEGY : msg.getAllocatorStrategy();
AllocateIpMsg amsg = new AllocateIpMsg();
amsg.setL3NetworkUuid(msg.getL3NetworkUuid());
amsg.setAllocateStrategy(strategyType);
amsg.setRequiredIp(msg.getRequiredIp());
bus.makeTargetServiceIdByResourceUuid(amsg, L3NetworkConstant.SERVICE_ID, msg.getL3NetworkUuid());
bus.send(amsg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (reply.isSuccess()) {
AllocateIpReply re = reply.castReply();
ip = re.getIpInventory();
trigger.next();
} else {
trigger.fail(reply.getError());
}
}
});
}
@Override
public void rollback(final FlowRollback trigger, Map data) {
if (ip == null) {
trigger.rollback();
return;
}
ReturnIpMsg rmsg = new ReturnIpMsg();
rmsg.setL3NetworkUuid(ip.getL3NetworkUuid());
rmsg.setUsedIpUuid(ip.getUuid());
bus.makeTargetServiceIdByResourceUuid(rmsg, L3NetworkConstant.SERVICE_ID, rmsg.getL3NetworkUuid());
bus.send(rmsg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
logger.warn(String.format("failed to return ip[uuid:%s, ip:%s] to l3Network[uuid:%s], %s",
ip.getUuid(), ip.getIp(), ip.getL3NetworkUuid(), reply.getError()));
}
trigger.rollback();
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = String.format("create-vip-in-db");
@Override
public void run(FlowTrigger trigger, Map data) {
VipVO vipvo = new VipVO();
if (msg.getResourceUuid() != null) {
vipvo.setUuid(msg.getResourceUuid());
} else {
vipvo.setUuid(Platform.getUuid());
}
vipvo.setName(msg.getName());
vipvo.setDescription(msg.getDescription());
vipvo.setState(VipState.Enabled);
vipvo.setGateway(ip.getGateway());
vipvo.setIp(ip.getIp());
vipvo.setIpRangeUuid(ip.getIpRangeUuid());
vipvo.setL3NetworkUuid(ip.getL3NetworkUuid());
vipvo.setNetmask(ip.getNetmask());
vipvo.setUsedIpUuid(ip.getUuid());
vipvo = dbf.persistAndRefresh(vipvo);
acntMgr.createAccountResourceRef(msg.getSession().getAccountUuid(), vipvo.getUuid(), VipVO.class);
tagMgr.createTagsFromAPICreateMessage(msg, vipvo.getUuid(), VipVO.class.getSimpleName());
vip = VipInventory.valueOf(vipvo);
trigger.next();
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
logger.debug(String.format("successfully acquired vip[uuid:%s, address:%s] on l3NetworkUuid[uuid:%s]", vip.getUuid(), ip.getIp(), ip.getL3NetworkUuid()));
evt.setInventory(vip);
bus.publish(evt);
}
});
error(new FlowErrorHandler() {
@Override
public void handle(ErrorCode errCode, Map data) {
evt.setErrorCode(errCode);
bus.publish(evt);
}
});
}
}).start();
}
@Override
public String getId() {
return bus.makeLocalServiceId(VipConstant.SERVICE_ID);
}
private void prepareFlows() {
releaseVipByApiFlowChainBuilder = FlowChainBuilder.newBuilder().setFlowClassNames(releaseVipByApiFlowNames).construct();
}
@Override
public boolean start() {
populateExtensions();
prepareFlows();
return true;
}
@Override
public boolean stop() {
return true;
}
@Override
public VipBackend getVipBackend(String providerType) {
VipBackend backend = vipBackends.get(providerType);
DebugUtils.Assert(backend!=null, String.format("cannot find VipBackend for provider type[%s]", providerType));
return backend;
}
@Override
public void saveVipInfo(String vipUuid, String networkServiceType, String peerL3NetworkUuid) {
VipVO vo = dbf.findByUuid(vipUuid, VipVO.class);
vo.setServiceProvider(networkServiceType);
if (vo.getPeerL3NetworkUuid() == null) {
vo.setPeerL3NetworkUuid(peerL3NetworkUuid);
}
dbf.update(vo);
}
@Override
public void lockAndAcquireVip(VipInventory vip, L3NetworkInventory peerL3Network, String networkServiceType, String networkServiceProviderType, Completion completion) {
lockVip(vip, networkServiceType);
acquireVip(vip, peerL3Network, networkServiceProviderType, completion);
}
@Override
public void releaseAndUnlockVip(VipInventory vip, Completion completion) {
releaseAndUnlockVip(vip, true, completion);
}
@Override
public void releaseAndUnlockVip(final VipInventory vip, boolean releasePeerL3Network, final Completion completion) {
releaseVip(vip, releasePeerL3Network, new Completion(completion) {
@Override
public void success() {
unlockVip(vip);
completion.success();
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
}
@Override
public void acquireVip(final VipInventory vip, final L3NetworkInventory peerL3Network, final String networkServiceProviderType, final Completion completion) {
if (vip.getPeerL3NetworkUuid() != null && !vip.getPeerL3NetworkUuid().equals(peerL3Network.getUuid())) {
completion.fail(
errf.stringToOperationError(String.format("vip[uuid:%s, name:%s] has been serving l3Network[name:%s, uuid:%s], can't serve l3Network[name:%s, uuid:%s]",
vip.getUuid(), vip.getName(), vip.getName(), vip.getPeerL3NetworkUuid(), peerL3Network.getName(), peerL3Network.getUuid()))
);
return;
}
VipBackend bkd = getVipBackend(networkServiceProviderType);
bkd.acquireVip(vip, peerL3Network, new Completion(completion) {
@Override
public void success() {
saveVipInfo(vip.getUuid(), networkServiceProviderType, peerL3Network.getUuid());
logger.debug(String.format("successfully acquired vip[uuid:%s, name:%s, ip:%s] on service[%s]",
vip.getUuid(), vip.getName(), vip.getIp(), networkServiceProviderType));
completion.success();
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
}
@Override
public void releaseVip(final VipInventory vip, final boolean releasePeerL3Network, final Completion completion) {
if (vip.getServiceProvider() == null) {
// the vip has been released by other descendant network service
completion.success();
return;
}
VipBackend bkd = getVipBackend(vip.getServiceProvider());
// service provider should ensure vip always release successfully,
// use its garbage collector on failure
bkd.releaseVip(vip, new Completion() {
@Override
public void success() {
logger.debug(String.format("successfully released vip[uuid:%s, name:%s, ip:%s] on service[%s]",
vip.getUuid(), vip.getName(), vip.getIp(), vip.getServiceProvider()));
VipVO vo = dbf.findByUuid(vip.getUuid(), VipVO.class);
vo.setServiceProvider(null);
if (releasePeerL3Network) {
vo.setPeerL3NetworkUuid(null);
}
dbf.update(vo);
completion.success();
}
@Override
public void fail(ErrorCode errorCode) {
logger.warn(String.format("failed to release vip[uuid:%s, name:%s, ip:%s] on service[%s], its garbage collector should" +
" handle this", vip.getUuid(), vip.getName(), vip.getIp(), vip.getServiceProvider()));
completion.fail(errorCode);
}
});
}
@Override
public void releaseVip(final VipInventory vip, Completion completion) {
releaseVip(vip, true, completion);
}
@Override
public void unlockVip(VipInventory vip) {
VipVO vo = dbf.findByUuid(vip.getUuid(), VipVO.class);
vo.setUseFor(null);
dbf.update(vo);
logger.debug(String.format("successfully unlocked vip[uuid:%s, name:%s, ip:%s]",
vip.getUuid(), vip.getName(), vip.getIp()));
}
@Override
public void lockVip(VipInventory vip, String networkServiceType) {
SimpleQuery<VipVO> q = dbf.createQuery(VipVO.class);
q.add(VipVO_.uuid, SimpleQuery.Op.EQ, vip.getUuid());
VipVO vipvo = q.find();
if (vipvo == null) {
throw new OperationFailureException(
errf.stringToOperationError(String.format("no vip[uuid:%s, name:%s, ip:%s] found for lock", vip.getUuid(), vip.getName(), vip.getIp()))
);
}
if ((vipvo.getUseFor() != null && !vipvo.getUseFor().equals(networkServiceType))) {
throw new OperationFailureException(
errf.stringToOperationError(String.format("vip[uuid:%s, name:%s, ip:%s] has been occupied by usage[%s]",
vipvo.getUuid(), vipvo.getName(), vipvo.getIp(), vipvo.getUseFor()))
);
}
if (networkServiceType.equals(vipvo.getUseFor())) {
return;
}
vipvo.setUseFor(networkServiceType);
dbf.update(vipvo);
logger.debug(String.format("successfully locked vip[uuid:%s, name:%s, ip:%s] for %s",
vip.getUuid(), vip.getName(), vip.getIp(), networkServiceType));
}
public void setReleaseVipByApiFlowNames(List<String> releaseVipByApiFlowNames) {
this.releaseVipByApiFlowNames = releaseVipByApiFlowNames;
}
@Override
public List<Quota> reportQuota() {
QuotaOperator checker = new QuotaOperator() {
@Override
public void checkQuota(APIMessage msg, Map<String, QuotaPair> pairs) {
if (msg instanceof APICreateVipMsg) {
check((APICreateVipMsg)msg, pairs);
}
}
@Override
public List<Quota.QuotaUsage> getQuotaUsageByAccount(String accountUuid) {
Quota.QuotaUsage usage = new Quota.QuotaUsage();
usage.setUsed(getUsedVip(accountUuid));
usage.setName(VipConstant.QUOTA_VIP_NUM);
return list(usage);
}
@Transactional(readOnly = true)
private long getUsedVip(String accountUuid) {
String sql = "select count(vip) from VipVO vip, AccountResourceRefVO ref where ref.resourceUuid = vip.uuid" +
" and ref.accountUuid = :auuid and ref.resourceType = :rtype";
TypedQuery<Long> q = dbf.getEntityManager().createQuery(sql, Long.class);
q.setParameter("auuid", accountUuid);
q.setParameter("rtype", VipVO.class.getSimpleName());
Long vn = q.getSingleResult();
vn = vn == null ? 0 : vn;
return vn;
}
private void check(APICreateVipMsg msg, Map<String, QuotaPair> pairs) {
long vipNum = pairs.get(VipConstant.QUOTA_VIP_NUM).getValue();
long vn = getUsedVip(msg.getSession().getAccountUuid());
if (vn + 1 > vipNum) {
throw new ApiMessageInterceptionException(errf.instantiateErrorCode(IdentityErrors.QUOTA_EXCEEDING,
String.format("quota exceeding. The account[uuid: %s] exceeds a quota[name: %s, value: %s]",
msg.getSession().getAccountUuid(), VipConstant.QUOTA_VIP_NUM, vipNum)
));
}
}
};
Quota quota = new Quota();
quota.addMessageNeedValidation(APICreateVipMsg.class);
quota.setOperator(checker);
QuotaPair p = new QuotaPair();
p.setName(VipConstant.QUOTA_VIP_NUM);
p.setValue(20);
quota.addPair(p);
return list(quota);
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2008 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.ui.texteditor;
import java.lang.reflect.InvocationTargetException;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import org.eclipse.swt.custom.BusyIndicator;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.jface.dialogs.ProgressMonitorDialog;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.text.BadLocationException;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.IRegion;
import org.eclipse.jface.text.IRewriteTarget;
import org.eclipse.jface.text.TextUtilities;
/**
* An action to convert line delimiters of a text editor document to a
* particular line delimiter.
*
* @since 2.0
* @deprecated since 3.1. Line delimiter conversion has been modified to work on groups of files rather than being editor specific
*/
public class ConvertLineDelimitersAction extends TextEditorAction {
/** The target line delimiter. */
private final String fLineDelimiter;
/**
* Creates a line delimiter conversion action.
*
* @param editor the editor
* @param lineDelimiter the target line delimiter to convert the editor's document to
*/
public ConvertLineDelimitersAction(ITextEditor editor, String lineDelimiter) {
this(EditorMessages.getBundleForConstructedKeys(), "dummy", editor, lineDelimiter); //$NON-NLS-1$
}
/**
* Creates a line delimiter conversion action.
*
* @param bundle the resource bundle
* @param prefix the prefix for the resource bundle lookup
* @param editor the editor
* @param lineDelimiter the target line delimiter to convert the editor's document to
*/
public ConvertLineDelimitersAction(ResourceBundle bundle, String prefix, ITextEditor editor, String lineDelimiter) {
super(bundle, prefix, editor);
fLineDelimiter= lineDelimiter;
String platformLineDelimiter= System.getProperty("line.separator"); //$NON-NLS-1$
setText(getString(getLabelKey(fLineDelimiter, platformLineDelimiter)));
update();
}
/*
* @see org.eclipse.jface.action.Action#run()
*/
public void run() {
try {
ITextEditor editor= getTextEditor();
if (editor == null)
return;
if (!validateEditorInputState())
return;
Object adapter= editor.getAdapter(IRewriteTarget.class);
if (adapter instanceof IRewriteTarget) {
IRewriteTarget target= (IRewriteTarget) adapter;
IDocument document= target.getDocument();
if (document != null) {
Shell shell= getTextEditor().getSite().getShell();
ConvertRunnable runnable= new ConvertRunnable(target, fLineDelimiter);
if (document.getNumberOfLines() < 40) {
BusyIndicator.showWhile(shell.getDisplay(), runnable);
} else {
ProgressMonitorDialog dialog= new ProgressMonitorDialog(shell);
dialog.run(false, true, runnable);
}
}
}
} catch (InterruptedException e) {
// action canceled
} catch (InvocationTargetException e) {
// should not happen
}
}
/**
* A runnable that converts all line delimiters of a document to <code>lineDelimiter</code>.
*/
private static class ConvertRunnable implements IRunnableWithProgress, Runnable {
/** The rewrite target */
private final IRewriteTarget fRewriteTarget;
/** The line delimiter to which to convert to */
private final String fLineDelimiter;
/**
* Returns a new runnable for converting all line delimiters in the
* <code>rewriteTarget</code> to <code>lineDelimter</code>.
*
* @param rewriteTarget the rewrite target
* @param lineDelimiter the line delimiter
*/
public ConvertRunnable(IRewriteTarget rewriteTarget, String lineDelimiter) {
fRewriteTarget= rewriteTarget;
fLineDelimiter= lineDelimiter;
}
/*
* @see IRunnableWithProgress#run(org.eclipse.core.runtime.IProgressMonitor)
*/
public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
IDocument document= fRewriteTarget.getDocument();
final int lineCount= document.getNumberOfLines();
monitor.beginTask(EditorMessages.Editor_ConvertLineDelimiter_title, lineCount);
final boolean isLargeUpdate= lineCount > 50;
if (isLargeUpdate)
fRewriteTarget.setRedraw(false);
fRewriteTarget.beginCompoundChange();
Map partitioners= TextUtilities.removeDocumentPartitioners(document);
try {
for (int i= 0; i < lineCount; i++) {
if (monitor.isCanceled())
throw new InterruptedException();
final String delimiter= document.getLineDelimiter(i);
if (delimiter != null && delimiter.length() > 0 && !delimiter.equals(fLineDelimiter)) {
IRegion region= document.getLineInformation(i);
document.replace(region.getOffset() + region.getLength(), delimiter.length(), fLineDelimiter);
}
monitor.worked(1);
}
} catch (BadLocationException e) {
throw new InvocationTargetException(e);
} finally {
if (partitioners != null)
TextUtilities.addDocumentPartitioners(document, partitioners);
fRewriteTarget.endCompoundChange();
if (isLargeUpdate)
fRewriteTarget.setRedraw(true);
monitor.done();
}
}
/*
* @see Runnable#run()
*/
public void run() {
try {
run(new NullProgressMonitor());
} catch (InterruptedException e) {
// should not happen
} catch (InvocationTargetException e) {
// should not happen
}
}
}
// /**
// * Returns whether the given document uses only the given line delimiter.
// * @param document the document to check
// * @param lineDelimiter the line delimiter to check for
// */
// private static boolean usesLineDelimiterExclusively(IDocument document, String lineDelimiter) {
//
// try {
// final int lineCount= document.getNumberOfLines();
// for (int i= 0; i < lineCount; i++) {
// final String delimiter= document.getLineDelimiter(i);
// if (delimiter != null && delimiter.length() > 0 && !delimiter.equals(lineDelimiter))
// return false;
// }
//
// } catch (BadLocationException e) {
// return false;
// }
//
// return true;
// }
/**
* Computes and returns the key to be used to lookup the action's label in
* its resource bundle.
*
* @param lineDelimiter the line delimiter
* @param platformLineDelimiter the platform line delimiter
* @return the key used to lookup the action's label
*/
private static String getLabelKey(String lineDelimiter, String platformLineDelimiter) {
if (lineDelimiter.equals(platformLineDelimiter)) {
if (lineDelimiter.equals("\r\n")) //$NON-NLS-1$
return "Editor.ConvertLineDelimiter.toWindows.default.label"; //$NON-NLS-1$
if (lineDelimiter.equals("\n")) //$NON-NLS-1$
return "Editor.ConvertLineDelimiter.toUNIX.default.label"; //$NON-NLS-1$
if (lineDelimiter.equals("\r")) //$NON-NLS-1$
return "Editor.ConvertLineDelimiter.toMac.default.label"; //$NON-NLS-1$
} else {
if (lineDelimiter.equals("\r\n")) //$NON-NLS-1$
return "Editor.ConvertLineDelimiter.toWindows.label"; //$NON-NLS-1$
if (lineDelimiter.equals("\n")) //$NON-NLS-1$
return "Editor.ConvertLineDelimiter.toUNIX.label"; //$NON-NLS-1$
if (lineDelimiter.equals("\r")) //$NON-NLS-1$
return "Editor.ConvertLineDelimiter.toMac.label"; //$NON-NLS-1$
}
return null;
}
/*
* @since 3.1
*/
private static String getString(String key) {
try {
return EditorMessages.getBundleForConstructedKeys().getString(key);
} catch (MissingResourceException e) {
return "!" + key + "!";//$NON-NLS-2$ //$NON-NLS-1$
}
}
/*
* @see IUpdate#update()
*/
public void update() {
super.update();
setEnabled(canModifyEditor());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.functions.sink;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.runtime.state.CheckpointListener;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.runtime.state.FunctionSnapshotContext;
import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static java.util.Objects.requireNonNull;
import static org.apache.flink.util.Preconditions.checkState;
/**
* This is a recommended base class for all of the {@link SinkFunction} that intend to implement exactly-once semantic.
* It does that by implementing two phase commit algorithm on top of the {@link CheckpointedFunction} and
* {@link CheckpointListener}. User should provide custom {@code TXN} (transaction handle) and implement abstract
* methods handling this transaction handle.
*
* @param <IN> Input type for {@link SinkFunction}.
* @param <TXN> Transaction to store all of the information required to handle a transaction.
* @param <CONTEXT> Context that will be shared across all invocations for the given {@link TwoPhaseCommitSinkFunction}
* instance. Context is created once
*/
@PublicEvolving
public abstract class TwoPhaseCommitSinkFunction<IN, TXN, CONTEXT>
extends RichSinkFunction<IN>
implements CheckpointedFunction, CheckpointListener {
private static final Logger LOG = LoggerFactory.getLogger(TwoPhaseCommitSinkFunction.class);
protected final ListStateDescriptor<State<TXN, CONTEXT>> stateDescriptor;
protected final LinkedHashMap<Long, TXN> pendingCommitTransactions = new LinkedHashMap<>();
@Nullable
protected TXN currentTransaction;
protected Optional<CONTEXT> userContext;
protected ListState<State<TXN, CONTEXT>> state;
/**
* Use default {@link ListStateDescriptor} for internal state serialization. Helpful utilities for using this
* constructor are {@link TypeInformation#of(Class)}, {@link org.apache.flink.api.common.typeinfo.TypeHint} and
* {@link TypeInformation#of(TypeHint)}. Example:
* <pre>
* {@code
* TwoPhaseCommitSinkFunction(TypeInformation.of(new TypeHint<State<TXN, CONTEXT>>() {}));
* }
* </pre>
* @param stateTypeInformation {@link TypeInformation} for POJO holding state of opened transactions.
*/
public TwoPhaseCommitSinkFunction(TypeInformation<State<TXN, CONTEXT>> stateTypeInformation) {
this(new ListStateDescriptor<State<TXN, CONTEXT>>("state", stateTypeInformation));
}
/**
* Instantiate {@link TwoPhaseCommitSinkFunction} with custom state descriptors.
*
* @param stateDescriptor descriptor for transactions POJO.
*/
public TwoPhaseCommitSinkFunction(ListStateDescriptor<State<TXN, CONTEXT>> stateDescriptor) {
this.stateDescriptor = requireNonNull(stateDescriptor, "stateDescriptor is null");
}
protected Optional<CONTEXT> initializeUserContext() {
return Optional.empty();
}
protected Optional<CONTEXT> getUserContext() {
return userContext;
}
// ------ methods that should be implemented in child class to support two phase commit algorithm ------
/**
* Write value within a transaction.
*/
protected abstract void invoke(TXN transaction, IN value) throws Exception;
/**
* Method that starts a new transaction.
*
* @return newly created transaction.
*/
protected abstract TXN beginTransaction() throws Exception;
/**
* Pre commit previously created transaction. Pre commit must make all of the necessary steps to prepare the
* transaction for a commit that might happen in the future. After this point the transaction might still be
* aborted, but underlying implementation must ensure that commit calls on already pre committed transactions
* will always succeed.
*
* <p>Usually implementation involves flushing the data.
*/
protected abstract void preCommit(TXN transaction) throws Exception;
/**
* Commit a pre-committed transaction. If this method fail, Flink application will be
* restarted and {@link TwoPhaseCommitSinkFunction#recoverAndCommit(Object)} will be called again for the
* same transaction.
*/
protected abstract void commit(TXN transaction);
/**
* Invoked on recovered transactions after a failure. User implementation must ensure that this call will eventually
* succeed. If it fails, Flink application will be restarted and it will be invoked again. If it does not succeed
* a data loss will occur. Transactions will be recovered in an order in which they were created.
*/
protected void recoverAndCommit(TXN transaction) {
commit(transaction);
}
/**
* Abort a transaction.
*/
protected abstract void abort(TXN transaction);
/**
* Abort a transaction that was rejected by a coordinator after a failure.
*/
protected void recoverAndAbort(TXN transaction) {
abort(transaction);
}
protected void finishRecoveringContext() {
}
// ------ entry points for above methods implementing {@CheckPointedFunction} and {@CheckpointListener} ------
@Override
public final void invoke(IN value) throws Exception {
invoke(currentTransaction, value);
}
@Override
public final void notifyCheckpointComplete(long checkpointId) throws Exception {
// the following scenarios are possible here
//
// (1) there is exactly one transaction from the latest checkpoint that
// was triggered and completed. That should be the common case.
// Simply commit that transaction in that case.
//
// (2) there are multiple pending transactions because one previous
// checkpoint was skipped. That is a rare case, but can happen
// for example when:
//
// - the master cannot persist the metadata of the last
// checkpoint (temporary outage in the storage system) but
// could persist a successive checkpoint (the one notified here)
//
// - other tasks could not persist their status during
// the previous checkpoint, but did not trigger a failure because they
// could hold onto their state and could successfully persist it in
// a successive checkpoint (the one notified here)
//
// In both cases, the prior checkpoint never reach a committed state, but
// this checkpoint is always expected to subsume the prior one and cover all
// changes since the last successful one. As a consequence, we need to commit
// all pending transactions.
//
// (3) Multiple transactions are pending, but the checkpoint complete notification
// relates not to the latest. That is possible, because notification messages
// can be delayed (in an extreme case till arrive after a succeeding checkpoint
// was triggered) and because there can be concurrent overlapping checkpoints
// (a new one is started before the previous fully finished).
//
// ==> There should never be a case where we have no pending transaction here
//
Iterator<Map.Entry<Long, TXN>> pendingTransactionIterator = pendingCommitTransactions.entrySet().iterator();
checkState(pendingTransactionIterator.hasNext(), "checkpoint completed, but no transaction pending");
while (pendingTransactionIterator.hasNext()) {
Map.Entry<Long, TXN> entry = pendingTransactionIterator.next();
Long pendingTransactionCheckpointId = entry.getKey();
TXN pendingTransaction = entry.getValue();
if (pendingTransactionCheckpointId > checkpointId) {
continue;
}
LOG.info("{} - checkpoint {} complete, committing transaction {} from checkpoint {}",
name(), checkpointId, pendingTransaction, pendingTransactionCheckpointId);
commit(pendingTransaction);
LOG.debug("{} - committed checkpoint transaction {}", name(), pendingTransaction);
pendingTransactionIterator.remove();
}
}
@Override
public void snapshotState(FunctionSnapshotContext context) throws Exception {
// this is like the pre-commit of a 2-phase-commit transaction
// we are ready to commit and remember the transaction
checkState(currentTransaction != null, "bug: no transaction object when performing state snapshot");
long checkpointId = context.getCheckpointId();
LOG.debug("{} - checkpoint {} triggered, flushing transaction '{}'", name(), context.getCheckpointId(), currentTransaction);
preCommit(currentTransaction);
pendingCommitTransactions.put(checkpointId, currentTransaction);
LOG.debug("{} - stored pending transactions {}", name(), pendingCommitTransactions);
currentTransaction = beginTransaction();
LOG.debug("{} - started new transaction '{}'", name(), currentTransaction);
state.clear();
state.add(new State<>(
this.currentTransaction,
new ArrayList<>(pendingCommitTransactions.values()),
userContext));
}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
// when we are restoring state with pendingCommitTransactions, we don't really know whether the
// transactions were already committed, or whether there was a failure between
// completing the checkpoint on the master, and notifying the writer here.
// (the common case is actually that is was already committed, the window
// between the commit on the master and the notification here is very small)
// it is possible to not have any transactions at all if there was a failure before
// the first completed checkpoint, or in case of a scale-out event, where some of the
// new task do not have and transactions assigned to check)
// we can have more than one transaction to check in case of a scale-in event, or
// for the reasons discussed in the 'notifyCheckpointComplete()' method.
state = context.getOperatorStateStore().getListState(stateDescriptor);
if (context.isRestored()) {
LOG.info("{} - restoring state", name());
for (State<TXN, CONTEXT> operatorState : state.get()) {
userContext = operatorState.getContext();
List<TXN> recoveredTransactions = operatorState.getPendingCommitTransactions();
for (TXN recoveredTransaction : recoveredTransactions) {
// If this fails, there is actually a data loss
recoverAndCommit(recoveredTransaction);
LOG.info("{} committed recovered transaction {}", name(), recoveredTransaction);
}
recoverAndAbort(operatorState.getPendingTransaction());
LOG.info("{} aborted recovered transaction {}", name(), operatorState.getPendingTransaction());
if (userContext.isPresent()) {
finishRecoveringContext();
}
}
}
// if in restore we didn't get any userContext or we are initializing from scratch
if (userContext == null) {
LOG.info("{} - no state to restore", name());
userContext = initializeUserContext();
}
this.pendingCommitTransactions.clear();
currentTransaction = beginTransaction();
LOG.debug("{} - started new transaction '{}'", name(), currentTransaction);
}
@Override
public void close() throws Exception {
super.close();
if (currentTransaction != null) {
abort(currentTransaction);
currentTransaction = null;
}
}
private String name() {
return String.format(
"%s %s/%s",
this.getClass().getSimpleName(),
getRuntimeContext().getIndexOfThisSubtask(),
getRuntimeContext().getNumberOfParallelSubtasks());
}
/**
* State POJO class coupling pendingTransaction, context and pendingCommitTransactions.
*/
public static class State<TXN, CONTEXT> {
protected TXN pendingTransaction;
protected List<TXN> pendingCommitTransactions = new ArrayList<>();
protected Optional<CONTEXT> context;
public State() {
}
public State(TXN pendingTransaction, List<TXN> pendingCommitTransactions, Optional<CONTEXT> context) {
this.context = requireNonNull(context, "context is null");
this.pendingTransaction = requireNonNull(pendingTransaction, "pendingTransaction is null");
this.pendingCommitTransactions = requireNonNull(pendingCommitTransactions, "pendingCommitTransactions is null");
}
public TXN getPendingTransaction() {
return pendingTransaction;
}
public void setPendingTransaction(TXN pendingTransaction) {
this.pendingTransaction = pendingTransaction;
}
public List<TXN> getPendingCommitTransactions() {
return pendingCommitTransactions;
}
public void setPendingCommitTransactions(List<TXN> pendingCommitTransactions) {
this.pendingCommitTransactions = pendingCommitTransactions;
}
public Optional<CONTEXT> getContext() {
return context;
}
public void setContext(Optional<CONTEXT> context) {
this.context = context;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.statistics;
import org.apache.geode.SystemFailure;
import org.apache.geode.distributed.internal.DistributionConfig;
import org.apache.geode.internal.concurrent.ConcurrentHashSet;
import org.apache.geode.internal.logging.LogService;
import org.apache.geode.internal.logging.log4j.LogMarker;
import org.apache.logging.log4j.Logger;
import java.util.List;
import java.util.concurrent.SynchronousQueue;
/**
* @since GemFire 7.0
*/
public class StatMonitorHandler implements SampleHandler {
private static final Logger logger = LogService.getLogger();
protected static final String ENABLE_MONITOR_THREAD =
DistributionConfig.GEMFIRE_PREFIX + "stats.enableMonitorThread";
private final boolean enableMonitorThread;
/** The registered monitors */
private final ConcurrentHashSet<StatisticsMonitor> monitors =
new ConcurrentHashSet<StatisticsMonitor>();
/** Protected by synchronization on this handler instance */
private volatile StatMonitorNotifier notifier;
/** Constructs a new StatMonitorHandler instance */
public StatMonitorHandler() {
this.enableMonitorThread = Boolean.getBoolean(ENABLE_MONITOR_THREAD);
}
/** Adds a monitor which will be notified of samples */
public boolean addMonitor(StatisticsMonitor monitor) {
synchronized (this) {
boolean added = false;
if (!this.monitors.contains(monitor)) {
added = this.monitors.add(monitor);
}
if (!this.monitors.isEmpty()) {
startNotifier_IfEnabledAndNotRunning();
}
return added;
}
}
/** Removes a monitor that will no longer be used */
public boolean removeMonitor(StatisticsMonitor monitor) {
synchronized (this) {
boolean removed = false;
if (this.monitors.contains(monitor)) {
removed = this.monitors.remove(monitor);
}
if (this.monitors.isEmpty()) {
stopNotifier_IfEnabledAndRunning();
}
return removed;
}
}
/**
* Stops the notifier thread if one exists.
*/
public void close() {
synchronized (this) {
stopNotifier_IfEnabledAndRunning();
}
}
@Override
public void sampled(long nanosTimeStamp, List<ResourceInstance> resourceInstances) {
synchronized (this) {
if (this.enableMonitorThread) {
final StatMonitorNotifier thread = this.notifier;
if (thread != null) {
try {
thread.monitor(new MonitorTask(System.currentTimeMillis(), resourceInstances));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
} else {
monitor(System.currentTimeMillis(), resourceInstances);
}
}
}
private void monitor(final long sampleTimeMillis, final List<ResourceInstance> resourceInstance) {
for (StatisticsMonitor monitor : StatMonitorHandler.this.monitors) {
try {
monitor.monitor(sampleTimeMillis, resourceInstance);
} catch (VirtualMachineError e) {
SystemFailure.initiateFailure(e);
throw e;
} catch (Error e) {
SystemFailure.checkFailure();
logger.warn(LogMarker.STATISTICS, "StatisticsMonitor {} threw {}", monitor,
e.getClass().getSimpleName(), e);
} catch (RuntimeException e) {
logger.warn(LogMarker.STATISTICS, "StatisticsMonitor {} threw {}", monitor,
e.getClass().getSimpleName(), e);
}
}
}
@Override
public void allocatedResourceType(ResourceType resourceType) {}
@Override
public void allocatedResourceInstance(ResourceInstance resourceInstance) {}
@Override
public void destroyedResourceInstance(ResourceInstance resourceInstance) {}
/** For testing only */
ConcurrentHashSet<StatisticsMonitor> getMonitorsSnapshot() {
return this.monitors;
}
/** For testing only */
StatMonitorNotifier getStatMonitorNotifier() {
return this.notifier;
}
private void startNotifier_IfEnabledAndNotRunning() {
if (this.enableMonitorThread && this.notifier == null) {
this.notifier = new StatMonitorNotifier();
this.notifier.start();
}
}
private void stopNotifier_IfEnabledAndRunning() {
if (this.enableMonitorThread && this.notifier != null) {
this.notifier.stop();
this.notifier = null;
}
}
/**
* @since GemFire 7.0
*/
class StatMonitorNotifier implements Runnable {
/** True while this notifier's thread is running */
private volatile boolean alive;
/** Protected by synchronization on this notifier instance */
private Thread consumer;
/** Protected by synchronization on this notifier instance */
private boolean waiting;
/** Protected by synchronization on this notifier instance */
private Thread producer;
/** Used to hand-off from producer to consumer */
private final SynchronousQueue<MonitorTask> task = new SynchronousQueue<MonitorTask>();
StatMonitorNotifier() {}
@Override
public void run() {
final boolean isDebugEnabled_STATISTICS = logger.isTraceEnabled(LogMarker.STATISTICS);
if (isDebugEnabled_STATISTICS) {
logger.trace(LogMarker.STATISTICS, "StatMonitorNotifier is starting {}", this);
}
try {
work();
} finally {
synchronized (this) {
this.alive = false;
if (this.producer != null) {
this.producer.interrupt();
}
}
}
if (isDebugEnabled_STATISTICS) {
logger.trace(LogMarker.STATISTICS, "StatMonitorNotifier is stopping {}", this);
}
}
private void work() {
boolean working = true;
while (working) {
try {
MonitorTask latestTask = null;
synchronized (this) {
working = this.alive;
if (working) {
this.waiting = true;
}
}
if (working) {
try {
latestTask = this.task.take(); // blocking
} finally {
synchronized (this) {
this.waiting = false;
working = this.alive;
}
}
}
if (working && latestTask != null) {
for (StatisticsMonitor monitor : StatMonitorHandler.this.monitors) {
try {
monitor.monitor(latestTask.getSampleTimeMillis(),
latestTask.getResourceInstances());
} catch (VirtualMachineError e) {
SystemFailure.initiateFailure(e);
throw e;
} catch (Error e) {
SystemFailure.checkFailure();
logger.warn(LogMarker.STATISTICS, "StatisticsMonitor {} threw {}", monitor,
e.getClass().getSimpleName(), e);
} catch (RuntimeException e) {
logger.warn(LogMarker.STATISTICS, "StatisticsMonitor {} threw {}", monitor,
e.getClass().getSimpleName(), e);
}
}
}
} catch (InterruptedException e) {
synchronized (this) {
working = false;
}
}
}
}
void start() {
synchronized (this) {
if (this.consumer == null) {
this.consumer = new Thread(this, toString());
this.consumer.setDaemon(true);
this.alive = true;
this.consumer.start();
}
}
}
void stop() {
synchronized (this) {
if (this.consumer != null) {
this.alive = false;
this.consumer.interrupt();
this.consumer = null;
}
}
}
void monitor(MonitorTask task) throws InterruptedException {
boolean isAlive = false;
synchronized (this) {
if (this.alive) {
isAlive = true;
this.producer = Thread.currentThread();
}
}
if (isAlive) {
try {
this.task.put(task);
} catch (InterruptedException e) {
// fall through and return
} finally {
synchronized (this) {
this.producer = null;
}
}
}
}
boolean isWaiting() {
synchronized (this) {
return this.waiting;
}
}
boolean isAlive() {
synchronized (this) {
return this.alive;
}
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder(getClass().getSimpleName());
sb.append(" Thread").append(" #").append(System.identityHashCode(this));
return sb.toString();
}
}
/**
* @since GemFire 7.0
*/
static class MonitorTask {
private final long sampleTimeMillis;
private final List<ResourceInstance> resourceInstances;
MonitorTask(long sampleTimeMillis, List<ResourceInstance> resourceInstances) {
this.sampleTimeMillis = sampleTimeMillis;
this.resourceInstances = resourceInstances;
}
long getSampleTimeMillis() {
return this.sampleTimeMillis;
}
List<ResourceInstance> getResourceInstances() {
return this.resourceInstances;
}
}
}
| |
/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/portal/tags/sakai-10.6/portal-impl/impl/src/java/org/sakaiproject/portal/charon/handlers/SiteHandler.java $
* $Id: SiteHandler.java 315492 2014-11-18 09:15:50Z jjmerono@um.es $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.portal.charon.handlers;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Locale;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.Cookie;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.authz.api.Role;
import org.sakaiproject.authz.api.SecurityAdvisor;
import org.sakaiproject.authz.cover.AuthzGroupService;
import org.sakaiproject.authz.cover.SecurityService;
import org.sakaiproject.component.cover.ServerConfigurationService;
import org.sakaiproject.entity.api.ResourceProperties;
import org.sakaiproject.entity.api.ResourcePropertiesEdit;
import org.sakaiproject.entity.cover.EntityManager;
import org.sakaiproject.event.api.Event;
import org.sakaiproject.event.cover.EventTrackingService;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.exception.PermissionException;
import org.sakaiproject.portal.api.Portal;
import org.sakaiproject.portal.api.PortalService;
import org.sakaiproject.portal.api.PortalHandlerException;
import org.sakaiproject.portal.api.PortalRenderContext;
import org.sakaiproject.portal.api.SiteView;
import org.sakaiproject.portal.api.StoredState;
import org.sakaiproject.portal.charon.site.AllSitesViewImpl;
import org.sakaiproject.tool.api.Tool;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.SitePage;
import org.sakaiproject.site.api.ToolConfiguration;
import org.sakaiproject.site.cover.SiteService;
import org.sakaiproject.tool.api.Session;
import org.sakaiproject.tool.api.ToolException;
import org.sakaiproject.user.api.Preferences;
import org.sakaiproject.user.api.UserNotDefinedException;
import org.sakaiproject.user.cover.PreferencesService;
import org.sakaiproject.user.cover.UserDirectoryService;
import org.sakaiproject.tool.api.ActiveTool;
import org.sakaiproject.tool.cover.ActiveToolManager;
import org.sakaiproject.util.Web;
import org.sakaiproject.util.ResourceLoader;
import org.sakaiproject.portal.util.URLUtils;
import org.sakaiproject.portal.util.ToolUtils;
import org.sakaiproject.portal.util.PortalUtils;
import org.sakaiproject.portal.util.ByteArrayServletResponse;
import org.sakaiproject.util.Validator;
import org.sakaiproject.portal.charon.handlers.PDAHandler;
/**
* @author ieb
* @since Sakai 2.4
* @version $Rev: 315492 $
*/
public class SiteHandler extends WorksiteHandler
{
private static final String INCLUDE_SITE_NAV = "include-site-nav";
private static final String INCLUDE_LOGO = "include-logo";
private static final String INCLUDE_TABS = "include-tabs";
private static final Log log = LogFactory.getLog(SiteHandler.class);
private static final String URL_FRAGMENT = "site";
private int configuredTabsToDisplay = 5;
private boolean useDHTMLMore = false;
private static ResourceLoader rb = new ResourceLoader("sitenav");
// When these strings appear in the URL they will be replaced by a calculated value based on the context.
// This can be replaced by the users myworkspace.
private String mutableSitename ="-";
// This can be replaced by the page on which a tool appears.
private String mutablePagename ="-";
public SiteHandler()
{
setUrlFragment(SiteHandler.URL_FRAGMENT);
configuredTabsToDisplay = ServerConfigurationService.getInt(
Portal.CONFIG_DEFAULT_TABS, 5);
useDHTMLMore = Boolean.valueOf(ServerConfigurationService.getBoolean(
"portal.use.dhtml.more", true));
mutableSitename = ServerConfigurationService.getString("portal.mutable.sitename", "-");
mutablePagename = ServerConfigurationService.getString("portal.mutable.pagename", "-");
}
@Override
public int doGet(String[] parts, HttpServletRequest req, HttpServletResponse res,
Session session) throws PortalHandlerException
{
if ((parts.length >= 2) && (parts[1].equals(SiteHandler.URL_FRAGMENT)))
{
// This is part of the main portal so we simply remove the attribute
session.setAttribute(PortalService.SAKAI_CONTROLLING_PORTAL, null);
try
{
// site might be specified
String siteId = null;
if (parts.length >= 3)
{
siteId = parts[2];
}
// recognize an optional page/pageid
String pageId = null;
String toolId = null;
// may also have the tool part, so check that length is 5 or greater.
if ((parts.length >= 5) && (parts[3].equals("page")))
{
pageId = parts[4];
}
// Tool resetting URL - clear state and forward to the real tool
// URL
// /portal/site/site-id/tool-reset/toolId
// 0 1 2 3 4
if ((siteId != null) && (parts.length == 5) && (parts[3].equals("tool-reset")))
{
toolId = parts[4];
String toolUrl = req.getContextPath() + "/site/" + siteId + "/tool"
+ Web.makePath(parts, 4, parts.length);
String queryString = Validator.generateQueryString(req);
if (queryString != null)
{
toolUrl = toolUrl + "?" + queryString;
}
portalService.setResetState("true");
res.sendRedirect(toolUrl);
return RESET_DONE;
}
// may also have the tool part, so check that length is 5 or greater.
if ((parts.length >= 5) && (parts[3].equals("tool")))
{
toolId = parts[4];
}
String commonToolId = null;
if(parts.length == 4)
{
commonToolId = parts[3];
}
doSite(req, res, session, siteId, pageId, toolId, commonToolId, parts,
req.getContextPath() + req.getServletPath());
return END;
}
catch (Exception ex)
{
throw new PortalHandlerException(ex);
}
}
else
{
return NEXT;
}
}
public void doSite(HttpServletRequest req, HttpServletResponse res, Session session,
String siteId, String pageId, String toolId,
String commonToolId, String [] parts, String toolContextPath) throws ToolException,
IOException
{
// default site if not set
String userId = session.getUserId();
if (siteId == null)
{
if (userId == null)
{
siteId = portal.getSiteHelper().getGatewaySiteId();
if (siteId == null)
{
siteId = ServerConfigurationService.getGatewaySiteId();
}
}
else
{
// TODO Should maybe switch to portal.getSiteHelper().getMyWorkspace()
AllSitesViewImpl allSites = (AllSitesViewImpl)portal.getSiteHelper().getSitesView(SiteView.View.ALL_SITES_VIEW, req, session, siteId);
List<Map> sites = (List<Map>)allSites.getRenderContextObject();
if (sites.size() > 0) {
siteId = (String)sites.get(0).get("siteId");
}
else
siteId = SiteService.getUserSiteId(userId);
}
}
// Can get a URL like /portal/site/-/page/-/tool/sakai.rwiki.
// The "mutable" site and page can not be given specific values since the
// final resolution depends on looking up the specific placement of the tool
// with this common id in the my workspace for this user.
// check for a mutable site to be resolved here
if (mutableSitename.equalsIgnoreCase(siteId) && (session.getUserId() != null)) {
siteId = SiteService.getUserSiteId(userId);
}
// find the site, for visiting
boolean siteDenied = false;
Site site = null;
try
{
Set<SecurityAdvisor> advisors = (Set<SecurityAdvisor>)session.getAttribute("sitevisit.security.advisor");
if (advisors != null) {
for (SecurityAdvisor advisor:advisors) {
SecurityService.pushAdvisor(advisor);
//session.removeAttribute("sitevisit.security.advisor");
}
}
// This should understand aliases as well as IDs
site = portal.getSiteHelper().getSiteVisit(siteId);
// SAK-20509 remap the siteId from the Site object we now have, since it may have originally been an alias, but has since been translated.
siteId = site.getId();
}
catch (IdUnusedException e)
{
}
catch (PermissionException e)
{
if (ServerConfigurationService.getBoolean("portal.redirectJoin", true) &&
userId != null && portal.getSiteHelper().isJoinable(siteId, userId))
{
String redirectUrl = Web.returnUrl(req, "/join/"+siteId);
res.sendRedirect(redirectUrl);
return;
}
siteDenied = true;
}
if (site == null)
{
// if not logged in, give them a chance
if (userId == null)
{
StoredState ss = portalService.newStoredState("directtool", "tool");
ss.setRequest(req);
ss.setToolContextPath(toolContextPath);
portalService.setStoredState(ss);
portal.doLogin(req, res, session, URLUtils.getSafePathInfo(req), false);
}
else
{
// Post an event for denied site visits by known users.
// This can be picked up to check the user state and refresh it if stale,
// such as showing links to sites that are no longer accessible.
// It is also helpful for event log analysis for user trouble or bad behavior.
if (siteDenied)
{
Event event = EventTrackingService.newEvent(SiteService.EVENT_SITE_VISIT_DENIED, siteId, false);
EventTrackingService.post(event);
}
portal.doError(req, res, session, Portal.ERROR_SITE);
}
return;
}
// Supports urls like: /portal/site/{SITEID}/sakai.announcements
if(site != null && commonToolId != null)
{
ToolConfiguration tc = null;
if(!commonToolId.startsWith("sakai."))
{
// Try the most likely case first, that of common tool ids starting with 'sakai.'
tc = site.getToolForCommonId("sakai." + commonToolId);
if(tc == null)
{
// That failed, try the supplied tool id
tc = site.getToolForCommonId(commonToolId);
}
}
if(tc != null)
{
pageId = tc.getPageId();
}
}
// Find the pageId looking backwards through the toolId
if(site != null && pageId == null && toolId != null ) {
SitePage p = (SitePage) ToolUtils.getPageForTool(site, toolId);
if ( p != null ) pageId = p.getId();
}
// if no page id, see if there was a last page visited for this site
if (pageId == null)
{
pageId = (String) session.getAttribute(Portal.ATTR_SITE_PAGE + siteId);
}
// If the page is the mutable page name then look up the
// real page id from the tool name.
if (mutablePagename.equalsIgnoreCase(pageId)) {
pageId = findPageIdFromToolId(pageId, URLUtils.getSafePathInfo(req), site);
}
// clear the last page visited
session.removeAttribute(Portal.ATTR_SITE_PAGE + siteId);
// form a context sensitive title
String title = ServerConfigurationService.getString("ui.service","Sakai") + " : "
+ site.getTitle();
// Lookup the page in the site - enforcing access control
// business rules
SitePage page = portal.getSiteHelper().lookupSitePage(pageId, site);
if (page != null)
{
// store the last page visited
session.setAttribute(Portal.ATTR_SITE_PAGE + siteId, page.getId());
title += " : " + page.getTitle();
}
// Check for incomplete URLs in the case of inlined tools
String trinity = ServerConfigurationService.getString(ToolUtils.PORTAL_INLINE_EXPERIMENTAL, "false");
if ( "true".equals(trinity) && toolId == null) {
String pagerefUrl = ToolUtils.getPageUrl(req, site, page, getUrlFragment(),
false, null, null);
// http://localhost:8080/portal/site/963b28b/tool/0996adf
String[] pieces = pagerefUrl.split("/");
if ( pieces.length > 6 && "tool".equals(pieces[6]) ) {
// SAK-25503 - This probably should be a log.debug later
String queryString = req.getQueryString();
if ( queryString != null ) pagerefUrl = pagerefUrl + '?' + queryString;
log.warn("Redirecting tool inline url: "+pagerefUrl);
res.sendRedirect(pagerefUrl);
return;
}
}
// Create and initialize a copy of the PDA Handler
PDAHandler pdah = new PDAHandler();
pdah.register(portal,portalService,servletContext);
// See if we can buffer the content, if not, pass the request through
String TCP = null;
String toolPathInfo = null;
boolean allowBuffer = false;
Object BC = null;
ToolConfiguration siteTool = null;
if ( toolId != null ) {
siteTool = SiteService.findTool(toolId);
if ( siteTool != null && parts.length >= 5 ) {
commonToolId = siteTool.getToolId();
// Does the tool allow us to buffer?
allowBuffer = pdah.allowBufferContent(req, siteTool);
if ( allowBuffer ) {
TCP = req.getContextPath() + req.getServletPath() + Web.makePath(parts, 1, 5);
toolPathInfo = Web.makePath(parts, 5, parts.length);
// Should we bypass buffering based on the request?
boolean matched = pdah.checkBufferBypass(req, siteTool);
if ( matched ) {
ActiveTool tool = ActiveToolManager.getActiveTool(commonToolId);
portal.forwardTool(tool, req, res, siteTool,
siteTool.getSkin(), TCP, toolPathInfo);
return;
}
}
}
}
// start the response
String siteType = portal.calcSiteType(siteId);
PortalRenderContext rcontext = portal.startPageContext(siteType, title, site
.getSkin(), req);
if ( allowBuffer ) {
BC = pdah.bufferContent(req, res, session, toolId,
TCP, toolPathInfo, siteTool);
// If the buffered response was not parseable
if ( BC instanceof ByteArrayServletResponse ) {
ByteArrayServletResponse bufferResponse = (ByteArrayServletResponse) BC;
StringBuffer queryUrl = req.getRequestURL();
String queryString = req.getQueryString();
if ( queryString != null ) queryUrl.append('?').append(queryString);
String msg = "Post buffer bypass CTI="+commonToolId+" URL="+queryUrl;
String redir = bufferResponse.getRedirect();
// We are concerned when we neither got output, nor a redirect
if ( redir != null ) {
msg = msg + " redirect to="+redir;
log.debug(msg);
} else {
log.warn(msg);
}
bufferResponse.forwardResponse();
return;
}
}
// Include the buffered content if we have it
if ( BC instanceof Map ) {
if ( req.getMethod().equals("POST") ) {
StringBuffer queryUrl = req.getRequestURL();
String queryString = req.getQueryString();
if ( queryString != null ) queryUrl.append('?').append(queryString);
log.warn("It is tacky to return markup on a POST CTI="+commonToolId+" URL="+queryUrl);
}
rcontext.put("bufferedResponse", Boolean.TRUE);
Map<String,String> bufferMap = (Map<String,String>) BC;
rcontext.put("responseHead", (String) bufferMap.get("responseHead"));
rcontext.put("responseBody", (String) bufferMap.get("responseBody"));
}
// Have we been requested to display minimized and are we logged in?
if (session.getUserId() != null ) {
Cookie c = portal.findCookie(req, portal.SAKAI_NAV_MINIMIZED);
String reqParm = req.getParameter(portal.SAKAI_NAV_MINIMIZED);
String minStr = ServerConfigurationService.getString("portal.allow.auto.minimize","true");
if ( c != null && "true".equals(c.getValue()) ) {
rcontext.put(portal.SAKAI_NAV_MINIMIZED, Boolean.TRUE);
} else if ( reqParm != null && "true".equals(reqParm) && ! "false".equals(minStr) ) {
rcontext.put(portal.SAKAI_NAV_MINIMIZED, Boolean.TRUE);
}
}
rcontext.put("siteId", siteId);
boolean showShortDescription = Boolean.valueOf(ServerConfigurationService.getBoolean("portal.title.shortdescription.show", false));
if (showShortDescription) {
rcontext.put("shortDescription", Web.escapeHtml(site.getShortDescription()));
}
if (SiteService.isUserSite(siteId)){
rcontext.put("siteTitle", rb.getString("sit_mywor"));
}else{
rcontext.put("siteTitle", Web.escapeHtml(site.getTitle()));
}
addLocale(rcontext, site, session.getUserId());
includeSiteNav(rcontext, req, session, siteId);
includeWorksite(rcontext, res, req, session, site, page, toolContextPath,
getUrlFragment());
// Include sub-sites if appropriate
// TODO: Think through whether we want reset tools or not
portal.includeSubSites(rcontext, req, session, siteId, req.getContextPath()
+ req.getServletPath(), getUrlFragment(),
/* resetTools */false);
portal.includeBottom(rcontext);
//Log the visit into SAKAI_EVENT - begin
try{
boolean presenceEvents = ServerConfigurationService.getBoolean("presence.events.log", true);
if (presenceEvents)
org.sakaiproject.presence.cover.PresenceService.setPresence(siteId + "-presence");
}catch(Exception e){}
//End - log the visit into SAKAI_EVENT
rcontext.put("currentUrlPath", Web.serverUrl(req) + req.getContextPath()
+ URLUtils.getSafePathInfo(req));
doSendResponse(rcontext, res, null);
StoredState ss = portalService.getStoredState();
if (ss != null && toolContextPath.equals(ss.getToolContextPath()))
{
// This request is the destination of the request
portalService.setStoredState(null);
}
}
/*
* If the page id is the mutablePageId then see if can resolve it from the
* the placement of the tool with a supplied tool id.
*/
private String findPageIdFromToolId(String pageId, String toolContextPath,
Site site) {
// If still can't find page id see if can determine it from a well known
// tool id (assumes that such a tool is in the site and the first instance of
// the tool found would be the right one).
String toolSegment = "/tool/";
String toolId = null;
try
{
// does the URL contain a tool id?
if (toolContextPath.contains(toolSegment)) {
toolId = toolContextPath.substring(toolContextPath.lastIndexOf(toolSegment)+toolSegment.length());
ToolConfiguration toolConfig = site.getToolForCommonId(toolId);
if (log.isDebugEnabled()) {
log.debug("trying to resolve page id from toolId: ["+toolId+"]");
}
if (toolConfig != null) {
pageId = toolConfig.getPageId();
}
}
}
catch (Exception e) {
log.error("exception resolving page id from toolid :["+toolId+"]",e);
}
return pageId;
}
/**
* Does the final render response, classes that extend this class
* may/will want to override this method to use their own template
*
* @param rcontext
* @param res
* @param object
* @param b
* @throws IOException
*/
protected void doSendResponse(PortalRenderContext rcontext, HttpServletResponse res,
String contentType) throws IOException
{
portal.sendResponse(rcontext, res, "site", null);
}
protected void includeSiteNav(PortalRenderContext rcontext, HttpServletRequest req,
Session session, String siteId)
{
if (rcontext.uses(INCLUDE_SITE_NAV))
{
boolean loggedIn = session.getUserId() != null;
boolean topLogin = ServerConfigurationService.getBoolean("top.login", true);
String accessibilityURL = ServerConfigurationService
.getString("accessibility.url");
rcontext.put("siteNavHasAccessibilityURL", Boolean
.valueOf((accessibilityURL != null && !accessibilityURL.equals(""))));
rcontext.put("siteNavAccessibilityURL", accessibilityURL);
// rcontext.put("siteNavSitAccessability",
// Web.escapeHtml(rb.getString("sit_accessibility")));
// rcontext.put("siteNavSitJumpContent",
// Web.escapeHtml(rb.getString("sit_jumpcontent")));
// rcontext.put("siteNavSitJumpTools",
// Web.escapeHtml(rb.getString("sit_jumptools")));
// rcontext.put("siteNavSitJumpWorksite",
// Web.escapeHtml(rb.getString("sit_jumpworksite")));
rcontext.put("siteNavTopLogin", Boolean.valueOf(topLogin));
rcontext.put("siteNavLoggedIn", Boolean.valueOf(loggedIn));
try
{
if (loggedIn)
{
includeLogo(rcontext, req, session, siteId);
includeTabs(rcontext, req, session, siteId, getUrlFragment(), false);
}
else
{
includeLogo(rcontext, req, session, siteId);
if (portal.getSiteHelper().doGatewaySiteList())
includeTabs(rcontext, req, session, siteId, getUrlFragment(),
false);
}
}
catch (Exception any)
{
}
}
}
public void includeLogo(PortalRenderContext rcontext, HttpServletRequest req,
Session session, String siteId) throws IOException
{
if (rcontext.uses(INCLUDE_LOGO))
{
String skin = getSiteSkin(siteId);
String skinRepo = ServerConfigurationService.getString("skin.repo");
rcontext.put("logoSkin", skin);
rcontext.put("logoSkinRepo", skinRepo);
String siteType = portal.calcSiteType(siteId);
String cssClass = (siteType != null) ? siteType : "undeterminedSiteType";
rcontext.put("logoSiteType", siteType);
rcontext.put("logoSiteClass", cssClass);
portal.includeLogin(rcontext, req, session);
}
}
private String getSiteSkin(String siteId)
{
// First, try to get the skin the default way
String skin = SiteService.getSiteSkin(siteId);
// If this fails, try to get the real site id if the site is a user site
if (skin == null && SiteService.isUserSite(siteId))
{
try
{
String userId = SiteService.getSiteUserId(siteId);
// If the passed siteId is the users EID, convert it to the internal ID.
// Most lookups should be EID, if most URLs contain internal ID, this results in lots of cache misses.
try
{
userId = UserDirectoryService.getUserId(userId);
}
catch (UserNotDefinedException unde)
{
// Ignore
}
String alternateSiteId = SiteService.getUserSiteId(userId);
skin = SiteService.getSiteSkin(alternateSiteId);
}
catch (Exception e)
{
// Ignore
}
}
if (skin == null)
{
skin = ServerConfigurationService.getString("skin.default");
}
String templates = ServerConfigurationService.getString("portal.templates", "neoskin");
String prefix = portalService.getSkinPrefix();
// Don't add the prefix twice
if ( "neoskin".equals(templates) && !StringUtils.startsWith(skin, prefix) ) skin = prefix + skin;
return skin;
}
public void includeTabs(PortalRenderContext rcontext, HttpServletRequest req,
Session session, String siteId, String prefix, boolean addLogout)
throws IOException
{
if (rcontext.uses(INCLUDE_TABS))
{
// for skinning
String siteType = portal.calcSiteType(siteId);
// If we have turned on auto-state reset on navigation, we generate
// the "site-reset" "worksite-reset" and "gallery-reset" urls
if ("true".equalsIgnoreCase(ServerConfigurationService
.getString(Portal.CONFIG_AUTO_RESET)))
{
prefix = prefix + "-reset";
}
boolean loggedIn = session.getUserId() != null;
// Check to see if we display a link in the UI for swapping the view
boolean roleswapcheck = false; // This variable will tell the UI if we will display any role swapping component; false by default
String roleswitchvalue = SecurityService.getUserEffectiveRole(SiteService.siteReference(siteId)); // checks the session for a role swap value
boolean roleswitchstate = false; // This variable determines if the site is in the switched state or not; false by default
boolean allowroleswap = SiteService.allowRoleSwap(siteId) && !SecurityService.isSuperUser();
// check for the site.roleswap permission
if (allowroleswap || roleswitchvalue != null)
{
Site activeSite = null;
try
{
activeSite = portal.getSiteHelper().getSiteVisit(siteId); // active site
}
catch(IdUnusedException ie)
{
log.error(ie.getMessage(), ie);
throw new IllegalStateException("Site doesn't exist!");
}
catch(PermissionException pe)
{
log.error(pe.getMessage(), pe);
throw new IllegalStateException("No permission to view site!");
}
// this block of code will check to see if the student role exists in the site. It will be used to determine if we need to display any student view component
boolean roleInSite = false;
Set<Role> roles = activeSite.getRoles();
String externalRoles = ServerConfigurationService.getString("studentview.roles"); // get the roles that can be swapped to from sakai.properties
String[] svRoles = externalRoles.split(",");
List<String> svRolesFinal = new ArrayList<String>();
for (Role role : roles)
{
for (int i = 0; i < svRoles.length; i++)
{
if (svRoles[i].trim().equals(role.getId()))
{
roleInSite = true;
svRolesFinal.add(role.getId());
}
}
}
if (activeSite.getType() != null && roleInSite) // the type check filters out some of non-standard sites where swapping roles would not apply. The boolean check makes sure a role is in the site
{
String switchRoleUrl = "";
Role userRole = activeSite.getUserRole(session.getUserId()); // the user's role in the site
//if the userRole is null, this means they are more than likely a Delegated Access user. Since the security check has already allowed
//the user to "swaproles" @allowroleswap, we know they have access to this site
if (roleswitchvalue != null && (userRole == null || !userRole.getId().equals(roleswitchvalue)))
{
switchRoleUrl = ServerConfigurationService.getPortalUrl()
+ "/role-switch-out/"
+ siteId
+ "/?panel=Main";
rcontext.put("roleUrlValue", roleswitchvalue);
roleswitchstate = true; // We're in a switched state, so set to true
}
else
{
if (svRolesFinal.size()>1)
{
rcontext.put("roleswapdropdown", true);
switchRoleUrl = ServerConfigurationService.getPortalUrl()
+ "/role-switch/"
+ siteId
+ "/";
rcontext.put("panelString", "/?panel=Main");
}
else
{
rcontext.put("roleswapdropdown", false);
switchRoleUrl = ServerConfigurationService.getPortalUrl()
+ "/role-switch/"
+ siteId
+ "/"
+ svRolesFinal.get(0)
+ "/?panel=Main";
rcontext.put("roleUrlValue", svRolesFinal.get(0));
}
}
roleswapcheck = true; // We made it this far, so set to true to display a component
rcontext.put("siteRoles", svRolesFinal);
rcontext.put("switchRoleUrl", switchRoleUrl);
}
}
rcontext.put("viewAsStudentLink", Boolean.valueOf(roleswapcheck)); // this will tell our UI if we want the link for swapping roles to display
rcontext.put("roleSwitchState", roleswitchstate); // this will tell our UI if we are in a role swapped state or not
int tabsToDisplay = configuredTabsToDisplay;
int tabDisplayLabel = 1;
if (!loggedIn)
{
tabsToDisplay = ServerConfigurationService.getInt(
"gatewaySiteListDisplayCount", tabsToDisplay);
}
else
{
Preferences prefs = PreferencesService
.getPreferences(session.getUserId());
ResourceProperties props = prefs.getProperties("sakai:portal:sitenav");
try
{
tabsToDisplay = (int) props.getLongProperty("tabs");
}
catch (Exception any)
{
}
try
{
tabDisplayLabel = (int) props.getLongProperty("tab:label");
}
catch (Exception any)
{
}
}
rcontext.put("tabDisplayLabel", tabDisplayLabel);
rcontext.put("useDHTMLMore", useDHTMLMore);
if (useDHTMLMore)
{
SiteView siteView = portal.getSiteHelper().getSitesView(
SiteView.View.DHTML_MORE_VIEW, req, session, siteId);
siteView.setPrefix(prefix);
siteView.setToolContextPath(null);
rcontext.put("tabsSites", siteView.getRenderContextObject());
}
else
{
SiteView siteView = portal.getSiteHelper().getSitesView(
SiteView.View.DEFAULT_SITE_VIEW, req, session, siteId);
siteView.setPrefix(prefix);
siteView.setToolContextPath(null);
rcontext.put("tabsSites", siteView.getRenderContextObject());
}
String cssClass = (siteType != null) ? "siteNavWrap " + siteType
: "siteNavWrap";
rcontext.put("tabsCssClass", cssClass);
rcontext.put("tabsAddLogout", Boolean.valueOf(addLogout));
if (addLogout)
{
String logoutUrl = Web.serverUrl(req)
+ ServerConfigurationService.getString("portalPath")
+ "/logout_gallery";
rcontext.put("tabsLogoutUrl", logoutUrl);
// rcontext.put("tabsSitLog",
// Web.escapeHtml(rb.getString("sit_log")));
}
rcontext.put("tabsCssClass", cssClass);
rcontext.put("tabsAddLogout", Boolean.valueOf(addLogout));
if (addLogout)
{
String logoutUrl = Web.serverUrl(req)
+ ServerConfigurationService.getString("portalPath")
+ "/logout_gallery";
rcontext.put("tabsLogoutUrl", logoutUrl);
// rcontext.put("tabsSitLog",
// Web.escapeHtml(rb.getString("sit_log")));
}
boolean allowAddSite = false;
if(SiteService.allowAddCourseSite()) {
allowAddSite = true;
} else if (SiteService.allowAddPortfolioSite()) {
allowAddSite = true;
} else if (SiteService.allowAddProjectSite()) {
allowAddSite = true;
}
rcontext.put("allowAddSite",allowAddSite);
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.intentions.aliasImport;
import com.intellij.codeInsight.template.Template;
import com.intellij.codeInsight.template.TemplateBuilderImpl;
import com.intellij.codeInsight.template.TemplateEditingAdapter;
import com.intellij.codeInsight.template.TemplateManager;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.SuggestedNameInfo;
import com.intellij.psi.impl.source.PostprocessReformattingAspect;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.searches.MethodReferencesSearch;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.rename.NameSuggestionProvider;
import com.intellij.refactoring.rename.PreferrableNameSuggestionProvider;
import com.intellij.refactoring.rename.inplace.MyLookupExpression;
import com.intellij.usageView.UsageInfo;
import com.intellij.usageView.UsageViewUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.Processor;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.plugins.groovy.intentions.base.Intention;
import org.jetbrains.plugins.groovy.intentions.base.IntentionUtils;
import org.jetbrains.plugins.groovy.intentions.base.PsiElementPredicate;
import org.jetbrains.plugins.groovy.lang.psi.GrReferenceElement;
import org.jetbrains.plugins.groovy.lang.psi.GroovyFileBase;
import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElementFactory;
import org.jetbrains.plugins.groovy.lang.psi.api.GroovyResolveResult;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrAccessorMethod;
import org.jetbrains.plugins.groovy.lang.psi.api.toplevel.imports.GrImportStatement;
import org.jetbrains.plugins.groovy.lang.psi.api.types.GrCodeReferenceElement;
import org.jetbrains.plugins.groovy.lang.psi.util.GroovyPropertyUtils;
import java.util.*;
/**
* @author Max Medvedev
*/
public class GrAliasImportIntention extends Intention {
@Override
protected void processIntention(@NotNull PsiElement element, @NotNull Project project, Editor editor) throws IncorrectOperationException {
final GrImportStatement context;
final PsiMember resolved;
if (element instanceof GrReferenceExpression) {
GrReferenceExpression ref = (GrReferenceExpression)element;
GroovyResolveResult result = ref.advancedResolve();
context = (GrImportStatement)result.getCurrentFileResolveContext();
assert context != null;
resolved = (PsiMember)result.getElement();
}
else if (element instanceof GrImportStatement) {
context = (GrImportStatement)element;
GrCodeReferenceElement reference = context.getImportReference();
assert reference != null;
resolved = (PsiMember)reference.resolve();
}
else {
return;
}
assert resolved != null;
doRefactoring(project, context, resolved);
}
private static void doRefactoring(@NotNull Project project, @NotNull GrImportStatement importStatement, @NotNull PsiMember member) {
if (member instanceof GrAccessorMethod &&
!importStatement.isOnDemand() &&
!Objects.equals(importStatement.getImportedName(), member.getName())) {
member = ((GrAccessorMethod)member).getProperty();
}
final GroovyFileBase file = (GroovyFileBase)importStatement.getContainingFile();
final List<UsageInfo> usages = findUsages(member, file);
GrImportStatement templateImport = createTemplateImport(project, member, file);
if (ApplicationManager.getApplication().isUnitTestMode()) {
if (!importStatement.isOnDemand()) {
importStatement.delete();
}
updateRefs(usages, member.getName(), templateImport);
}
else {
runTemplate(project, importStatement, member, file, usages, templateImport);
}
}
private static GrImportStatement createTemplateImport(Project project,
PsiMember resolved,
GroovyFileBase file) {
final PsiClass aClass = resolved.getContainingClass();
assert aClass != null;
String qname = aClass.getQualifiedName();
final String name = resolved.getName();
GrImportStatement template = GroovyPsiElementFactory.getInstance(project)
.createImportStatementFromText("import static " + qname + "." + name + " as aliased");
return file.addImport(template);
}
private static void runTemplate(Project project,
final GrImportStatement context,
PsiMember resolved,
final GroovyFileBase file,
final List<UsageInfo> usages,
GrImportStatement templateImport) {
PostprocessReformattingAspect.getInstance(project).doPostponedFormatting();
TemplateBuilderImpl templateBuilder = new TemplateBuilderImpl(templateImport);
LinkedHashSet<String> names = getSuggestedNames(resolved, context);
final PsiElement aliasNameElement = templateImport.getAliasNameElement();
assert aliasNameElement != null;
templateBuilder.replaceElement(aliasNameElement, new MyLookupExpression(resolved.getName(), names, (PsiNamedElement)resolved, resolved, true, null));
Template built = templateBuilder.buildTemplate();
final Editor newEditor = IntentionUtils.positionCursor(project, file, templateImport);
final Document document = newEditor.getDocument();
final RangeMarker contextImportPointer = document.createRangeMarker(context.getTextRange());
final TextRange range = templateImport.getTextRange();
document.deleteString(range.getStartOffset(), range.getEndOffset());
final String name = resolved.getName();
TemplateManager manager = TemplateManager.getInstance(project);
manager.startTemplate(newEditor, built, new TemplateEditingAdapter() {
@Override
public void templateFinished(Template template, boolean brokenOff) {
final GrImportStatement importStatement = ReadAction
.compute(() -> PsiTreeUtil.findElementOfClassAtOffset(file, range.getStartOffset(), GrImportStatement.class, true));
if (brokenOff) {
if (importStatement != null) {
ApplicationManager.getApplication().runWriteAction(() -> importStatement.delete());
}
return;
}
updateRefs(usages, name, importStatement);
ApplicationManager.getApplication().runWriteAction(() -> {
final GrImportStatement context1 = PsiTreeUtil.findElementOfClassAtRange(file, contextImportPointer.getStartOffset(),
contextImportPointer.getEndOffset(),
GrImportStatement.class);
if (context1 != null) {
context1.delete();
}
});
}
});
}
private static void updateRefs(List<UsageInfo> usages, final String memberName, final GrImportStatement updatedImport) {
if (updatedImport == null) return;
final String name = ReadAction.compute(() -> updatedImport.getImportedName());
for (final UsageInfo usage : usages) {
ApplicationManager.getApplication().runWriteAction(() -> {
final PsiElement usageElement = usage.getElement();
if (usageElement == null) return;
if (usageElement.getParent() instanceof GrImportStatement) return;
if (usageElement instanceof GrReferenceElement) {
final GrReferenceElement ref = (GrReferenceElement)usageElement;
final PsiElement qualifier = ref.getQualifier();
if (qualifier == null) {
final String refName = ref.getReferenceName();
if (refName == null) return;
if (memberName.equals(refName)) {
ref.handleElementRename(name);
}
else if (refName.equals(GroovyPropertyUtils.getPropertyNameByAccessorName(memberName))) {
final String newPropName = GroovyPropertyUtils.getPropertyNameByAccessorName(name);
if (newPropName != null) {
ref.handleElementRename(newPropName);
}
else {
ref.handleElementRename(name);
}
}
else if (refName.equals(GroovyPropertyUtils.getGetterNameBoolean(memberName))) {
final String getterName = GroovyPropertyUtils.getGetterNameBoolean(name);
ref.handleElementRename(getterName);
}
else if (refName.equals(GroovyPropertyUtils.getGetterNameNonBoolean(memberName))) {
final String getterName = GroovyPropertyUtils.getGetterNameNonBoolean(name);
ref.handleElementRename(getterName);
}
else if (refName.equals(GroovyPropertyUtils.getSetterName(memberName))) {
final String getterName = GroovyPropertyUtils.getSetterName(name);
ref.handleElementRename(getterName);
}
}
}
});
}
}
private static List<UsageInfo> findUsages(PsiMember member, GroovyFileBase file) {
LocalSearchScope scope = new LocalSearchScope(file);
final ArrayList<UsageInfo> infos = new ArrayList<>();
final HashSet<Object> usedRefs = ContainerUtil.newHashSet();
final Processor<PsiReference> consumer = reference -> {
if (usedRefs.add(reference)) {
infos.add(new UsageInfo(reference));
}
return true;
};
if (member instanceof PsiMethod) {
MethodReferencesSearch.search((PsiMethod)member, scope, false).forEach(consumer);
}
else {
ReferencesSearch.search(member, scope).forEach(consumer);
if (member instanceof PsiField) {
final PsiMethod getter = GroovyPropertyUtils.findGetterForField((PsiField)member);
if (getter != null) {
MethodReferencesSearch.search(getter, scope, false).forEach(consumer);
}
final PsiMethod setter = GroovyPropertyUtils.findSetterForField((PsiField)member);
if (setter != null) {
MethodReferencesSearch.search(setter, scope, false).forEach(consumer);
}
}
}
return infos;
}
public static LinkedHashSet<String> getSuggestedNames(PsiElement psiElement, final PsiElement nameSuggestionContext) {
final LinkedHashSet<String> result = new LinkedHashSet<>();
result.add(UsageViewUtil.getShortName(psiElement));
final NameSuggestionProvider[] providers = Extensions.getExtensions(NameSuggestionProvider.EP_NAME);
for (NameSuggestionProvider provider : providers) {
SuggestedNameInfo info = provider.getSuggestedNames(psiElement, nameSuggestionContext, result);
if (info != null) {
if (provider instanceof PreferrableNameSuggestionProvider && !((PreferrableNameSuggestionProvider)provider).shouldCheckOthers()) {
break;
}
}
}
return result;
}
@NotNull
@Override
protected PsiElementPredicate getElementPredicate() {
return AliasImportIntentionPredicate.INSTANCE;
}
}
| |
package com.github.lindenb.jvarkit.tools.misc;
import java.io.BufferedReader;
import java.io.File;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Map;
import htsjdk.samtools.fastq.BasicFastqWriter;
import htsjdk.samtools.fastq.FastqConstants;
import htsjdk.samtools.fastq.FastqRecord;
import htsjdk.samtools.fastq.FastqWriter;
import htsjdk.samtools.ValidationStringency;
import htsjdk.samtools.util.CloserUtil;
import com.github.lindenb.jvarkit.io.IOUtils;
import com.github.lindenb.jvarkit.util.AbstractCommandLineProgram;
import com.github.lindenb.jvarkit.util.picard.FastqReader;
import com.github.lindenb.jvarkit.util.picard.FourLinesFastqReader;
public class FastqGrep
extends AbstractCommandLineProgram
{
private boolean inverse=false;
private Map<String,Integer> readNames=new HashMap<String,Integer>();
private int n_before_remove=-1;
private FastqGrep()
{
}
@Override
protected String getOnlineDocUrl() {
return "https://github.com/lindenb/jvarkit/wiki/FastqGrep";
}
@Override
public String getProgramDescription() {
return "Grep reads names in fastq";
}
@Override
public void printOptions(PrintStream out)
{
out.println(" -f (file) file containing a list of read names..");
out.println(" -R (name) add the read.");
out.println(" -n (int) when found, remove the read from the list of names when found more that 'n' time (increase speed)");
out.println(" -V invert");
out.println(" -o (filename) output file. default: stdout.");
super.printOptions(out);
}
private String getReadName(FastqRecord r)
{
return getReadName(r.getReadHeader());
}
private String getReadName(String s)
{
int beg=(s.startsWith(FastqConstants.SEQUENCE_HEADER)?1:0);
int end=s.indexOf(' ');
if(end==-1) end=s.length();
s= s.substring(beg, end);
return s;
}
private void run(FastqReader r,FastqWriter out)
{
long nRec=0L;
r.setValidationStringency(ValidationStringency.LENIENT);
while(r.hasNext())
{
FastqRecord fastq=r.next();
boolean keep=false;
String readName=getReadName(fastq);
Integer count=readNames.get(readName);
if(count!=null)
{
keep=true;
}
if(inverse) keep=!keep;
if(keep)
{
++nRec;
out.write(fastq);
}
if(n_before_remove!=-1 && !inverse && keep)
{
count++;
if(count>=n_before_remove)
{
readNames.remove(readName);
if(readNames.isEmpty()) break;
}
else
{
readNames.put(readName,count);
}
}
}
info("Done. N-Reads:"+nRec);
}
@Override
public int doWork(String[] args)
{
File fileout=null;
com.github.lindenb.jvarkit.util.cli.GetOpt opt=new com.github.lindenb.jvarkit.util.cli.GetOpt();
int c;
while((c=opt.getopt(args,getGetOptDefault()+ "o:f:R:n"))!=-1)
{
switch(c)
{
case 'n': n_before_remove=Integer.parseInt(opt.getOptArg()); break;
case 'V': inverse=true;break;
case 'R': readNames.put(getReadName(opt.getOptArg()),0);break;
case 'f':
{
BufferedReader in=null;
try
{
in=IOUtils.openURIForBufferedReading(opt.getOptArg());
String line;
while((line=in.readLine())!=null)
{
line=line.trim();
if(line.isEmpty()) continue;
readNames.put(getReadName(line),0);
}
}
catch(Exception err)
{
error(err);
return -1;
}
finally
{
CloserUtil.close(in);
}
break;
}
case 'o': fileout=new File(opt.getOptArg());break;
default:
{
switch(handleOtherOptions(c, opt, null))
{
case EXIT_FAILURE: return -1;
case EXIT_SUCCESS: return 0;
default:break;
}
}
}
}
if(readNames.isEmpty())
{
warning("no read name found.");
}
FastqWriter out=null;
try
{
if(fileout!=null)
{
info("Writing to "+fileout);
out=new BasicFastqWriter(fileout);
}
else
{
info("Writing to stdout");
out=new BasicFastqWriter(System.out);
}
if(opt.getOptInd()==args.length)
{
info("Reading from stdin");
FastqReader fqR=new FourLinesFastqReader(System.in);
run(fqR,out);
fqR.close();
}
else for(int optind=opt.getOptInd(); optind < args.length; ++optind)
{
File f=new File(args[optind]);
info("Reading from "+f);
FastqReader fqR=new FourLinesFastqReader(f);
run(fqR,out);
fqR.close();
}
CloserUtil.close(out);
return 0;
}
catch(Exception err)
{
error(err);
return -1;
}
finally
{
CloserUtil.close(out);
}
}
public static void main(String[] args) {
new FastqGrep().instanceMainWithExit(args);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.core.nodetype;
import org.apache.jackrabbit.spi.Name;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
import java.util.ArrayList;
/**
* <code>EffectiveNodeTypeCache</code> implementation that uses an array of
* node type names as key for caching the effective node types.
*/
public class EffectiveNodeTypeCacheImpl implements EffectiveNodeTypeCache {
/**
* ordered set of keys
*/
private final TreeSet<Key> sortedKeys;
/**
* cache of pre-built aggregations of node types
*/
private final HashMap<Key, EffectiveNodeType> aggregates;
/**
* Creates a new effective node type cache.
*/
EffectiveNodeTypeCacheImpl() {
sortedKeys = new TreeSet<Key>();
aggregates = new HashMap<Key, EffectiveNodeType>();
}
/**
* {@inheritDoc}
*/
public Key getKey(Name[] ntNames) {
return new WeightedKey(ntNames);
}
/**
* {@inheritDoc}
*/
public void put(EffectiveNodeType ent) {
// we define the weight as the total number of included node types
// (through aggregation and inheritance)
int weight = ent.getMergedNodeTypes().length;
// the effective node type is identified by the list of merged
// (i.e. aggregated) node types
WeightedKey k = new WeightedKey(ent.getMergedNodeTypes(), weight);
put(k, ent);
}
/**
* {@inheritDoc}
*/
public void put(Key key, EffectiveNodeType ent) {
aggregates.put(key, ent);
sortedKeys.add(key);
}
/**
* {@inheritDoc}
*/
public boolean contains(Key key) {
return aggregates.containsKey(key);
}
/**
* {@inheritDoc}
*/
public EffectiveNodeType get(Key key) {
return aggregates.get(key);
}
/**
* Removes the effective node type for the given key from the cache.
* @param key the key of the effective node type to remove
* @return the removed effective node type or <code>null</code> if it was
* never cached.
*/
private EffectiveNodeType remove(Key key) {
EffectiveNodeType removed = aggregates.remove(key);
if (removed != null) {
// remove index entry
// FIXME: can't simply call TreeSet.remove(key) because the entry
// in sortedKeys might have a different weight and would thus
// not be found
Iterator<Key> iter = sortedKeys.iterator();
while (iter.hasNext()) {
Key k = iter.next();
// WeightedKey.equals(Object) ignores the weight
if (key.equals(k)) {
sortedKeys.remove(k);
break;
}
}
}
return removed;
}
/**
* {@inheritDoc}
*/
public void invalidate(Name name) {
// remove all affected effective node types from aggregates cache
// (copy keys first to prevent ConcurrentModificationException)
ArrayList<Key> keys = new ArrayList<Key>(sortedKeys);
for (Iterator<Key> keysIter = keys.iterator(); keysIter.hasNext();) {
Key k = keysIter.next();
EffectiveNodeType ent = get(k);
if (ent.includesNodeType(name)) {
remove(k);
}
}
}
/**
* @inheritDoc
*/
public Key findBest(Key key) {
// quick check for already cached key
if (contains(key)) {
return key;
}
Iterator<Key> iter = sortedKeys.iterator();
while (iter.hasNext()) {
Key k = iter.next();
// check if the existing aggregate is a 'subset' of the one we're
// looking for
if (key.contains(k)) {
return k;
}
}
return null;
}
//-------------------------------------------< java.lang.Object overrides >
/**
* {@inheritDoc}
*/
@Override
public Object clone() {
EffectiveNodeTypeCacheImpl clone = new EffectiveNodeTypeCacheImpl();
clone.sortedKeys.addAll(sortedKeys);
clone.aggregates.putAll(aggregates);
return clone;
}
//--------------------------------------------------------------< Object >
/**
* {@inheritDoc}
*/
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("EffectiveNodeTypeCache (" + super.toString() + ")\n");
builder.append("EffectiveNodeTypes in cache:\n");
for (Key key : sortedKeys) {
builder.append(key);
builder.append("\n");
}
return builder.toString();
}
//--------------------------------------------------------< inner classes >
/**
* A <code>WeightedKey</code> uniquely identifies
* a combination (i.e. an aggregation) of one or more node types.
* The weight is an indicator for the cost involved in building such an
* aggregate (e.g. an aggregation of multiple complex node types with deep
* inheritance trees is more costly to build/validate than an aggregation
* of two very simple node types with just one property definition each).
* <p>
* A very simple (and not very accurate) approximation of the weight would
* be the number of explicitly aggregated node types (ignoring inheritance
* and complexity of each involved node type). A better approximation would
* be the number of <b>all</b>, explicitly and implicitly (note that
* inheritance is also an aggregation) aggregated node types.
* <p>
* The more accurate the weight definition, the more efficient is the
* the building of new aggregates.
* <p>
* It is important to note that the weight is not part of the key value,
* i.e. it is not considered by the <code>hashCode()</code> and
* <code>equals(Object)</code> methods. It does however affect the order
* of <code>WeightedKey</code> instances. See
* <code>{@link #compareTo(Object)}</code> for more information.
* <p>
* Let's assume we have an aggregation of node types named "b", "a" and "c".
* Its key would be "[a, b, c]" and the weight 3 (using the simple
* approximation).
*/
private static class WeightedKey implements Key {
/**
* array of node type names, sorted in ascending order
*/
private final Name[] names;
/**
* the weight of this key
*/
private final int weight;
/**
* @param ntNames
*/
WeightedKey(Name[] ntNames) {
this(ntNames, ntNames.length);
}
/**
* @param ntNames
* @param weight
*/
WeightedKey(Name[] ntNames, int weight) {
this.weight = weight;
names = new Name[ntNames.length];
System.arraycopy(ntNames, 0, names, 0, names.length);
Arrays.sort(names);
}
/**
* @param ntNames
*/
WeightedKey(Collection<Name> ntNames) {
this(ntNames, ntNames.size());
}
/**
* @param ntNames
* @param weight
*/
WeightedKey(Collection<Name> ntNames, int weight) {
this((Name[]) ntNames.toArray(new Name[ntNames.size()]), weight);
}
/**
* @return the node type names of this key
*/
public Name[] getNames() {
return names;
}
/**
* {@inheritDoc}
*/
public boolean contains(Key otherKey) {
WeightedKey key = (WeightedKey) otherKey;
Set<Name> tmp = new HashSet<Name>(Arrays.asList(names));
for (int i = 0; i < key.names.length; i++) {
if (!tmp.contains(key.names[i])) {
return false;
}
}
return true;
}
/**
* {@inheritDoc}
*/
public Key subtract(Key otherKey) {
WeightedKey key = (WeightedKey) otherKey;
Set<Name> tmp = new HashSet<Name>(Arrays.asList(names));
tmp.removeAll(Arrays.asList(key.names));
return new WeightedKey(tmp);
}
//-------------------------------------------------------< Comparable >
/**
* The resulting sort-order is: 1. descending weight, 2. ascending key
* (i.e. string representation of this sorted set).
*
* @param o the other key to compare
* @return the result of the comparison
*/
public int compareTo(Key o) {
WeightedKey other = (WeightedKey) o;
// compare weights
if (weight > other.weight) {
return -1;
} else if (weight < other.weight) {
return 1;
}
// compare arrays of names
int len1 = names.length;
int len2 = other.names.length;
int len = Math.min(len1, len2);
for (int i = 0; i < len; i++) {
Name name1 = names[i];
Name name2 = other.names[i];
int result = name1.compareTo(name2);
if (result != 0) {
return result;
}
}
return len1 - len2;
}
//---------------------------------------< java.lang.Object overrides >
/**
* {@inheritDoc}
*/
public int hashCode() {
int h = 17;
// ignore weight
for (Name name : names) {
h *= 37;
h += name.hashCode();
}
return h;
}
/**
* {@inheritDoc}
*/
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof WeightedKey) {
WeightedKey other = (WeightedKey) obj;
// ignore weight
return Arrays.equals(names, other.names);
}
return false;
}
/**
* {@inheritDoc}
*/
public String toString() {
return Arrays.asList(names).toString() + " (" + weight + ")";
}
}
}
| |
/*
* Copyright 2015, Yahoo! Inc.
* Licensed under the terms of the Apache License 2.0. See LICENSE file at the project root for terms.
*/
package com.yahoo.sketches.theta;
import static com.yahoo.sketches.QuickSelect.selectExcludingZeros;
import static com.yahoo.sketches.theta.PreambleUtil.EMPTY_FLAG_MASK;
import static com.yahoo.sketches.theta.PreambleUtil.FAMILY_BYTE;
import static com.yahoo.sketches.theta.PreambleUtil.FLAGS_BYTE;
import static com.yahoo.sketches.theta.PreambleUtil.LG_ARR_LONGS_BYTE;
import static com.yahoo.sketches.theta.PreambleUtil.LG_NOM_LONGS_BYTE;
import static com.yahoo.sketches.theta.PreambleUtil.LG_RESIZE_FACTOR_BYTE;
import static com.yahoo.sketches.theta.PreambleUtil.MAX_THETA_LONG;
import static com.yahoo.sketches.theta.PreambleUtil.P_FLOAT;
import static com.yahoo.sketches.theta.PreambleUtil.RETAINED_ENTRIES_INT;
import static com.yahoo.sketches.theta.PreambleUtil.SEED_HASH_SHORT;
import static com.yahoo.sketches.theta.PreambleUtil.THETA_LONG;
import static com.yahoo.sketches.theta.PreambleUtil.checkSeedHashes;
import static com.yahoo.sketches.theta.PreambleUtil.computeSeedHash;
import static com.yahoo.sketches.theta.UpdateReturnState.InsertedCountIncremented;
import static com.yahoo.sketches.theta.UpdateReturnState.RejectedDuplicate;
import static com.yahoo.sketches.theta.UpdateReturnState.RejectedOverTheta;
import static java.lang.Math.max;
import static java.lang.Math.min;
import com.yahoo.sketches.Family;
import com.yahoo.sketches.memory.Memory;
/**
* @author Lee Rhodes
* @author Kevin Lang
*/
class HeapQuickSelectSketch extends HeapUpdateSketch { //UpdateSketch implements UpdateInternal, SetArgument {
static final int HQS_MIN_LG_ARR_LONGS = 5; //The smallest Log2 cache size allowed; => 32.
static final int HQS_MIN_LG_NOM_LONGS = 4; //The smallest Log2 nom entries allowed; => 16.
static final double HQS_REBUILD_THRESHOLD = 15.0 / 16.0;
static final double HQS_RESIZE_THRESHOLD = .5; //tuned for speed
private final Family MY_FAMILY;
private final int preambleLongs_;
private long[] cache_;
private int lgArrLongs_;
private int hashTableThreshold_; //never serialized
private int curCount_;
private long thetaLong_;
private boolean empty_;
private boolean dirty_;
/**
* Construct a new sketch on the java heap.
*
* @param lgNomLongs <a href="{@docRoot}/resources/dictionary.html#lgNomLogs">See lgNomLongs</a>.
* @param seed <a href="{@docRoot}/resources/dictionary.html#seed">See seed</a>
* @param p <a href="{@docRoot}/resources/dictionary.html#p">See Sampling Probability, <i>p</i></a>
* @param rf <a href="{@docRoot}/resources/dictionary.html#resizeFactor">See Resize Factor</a>
* @param unionGadget true if this sketch is implementing the Union gadget function.
* Otherwise, it is behaving as a normal QuickSelectSketch.
*/
HeapQuickSelectSketch(int lgNomLongs, long seed, float p, ResizeFactor rf, boolean unionGadget) {
super(lgNomLongs,
seed,
p,
rf);
if (lgNomLongs_ < HQS_MIN_LG_NOM_LONGS) throw new IllegalArgumentException(
"This sketch requires a minimum nominal entries of "+(1 << HQS_MIN_LG_NOM_LONGS));
if (unionGadget) {
preambleLongs_ = Family.UNION.getMinPreLongs();
MY_FAMILY = Family.UNION;
}
else {
preambleLongs_ = Family.QUICKSELECT.getMinPreLongs();
MY_FAMILY = Family.QUICKSELECT;
}
lgArrLongs_ = startingSubMultiple(lgNomLongs_+1, rf, HQS_MIN_LG_ARR_LONGS);
cache_ = new long[1 << lgArrLongs_];
hashTableThreshold_ = setHashTableThreshold(lgNomLongs_, lgArrLongs_);
empty_ = true; //other flags: bigEndian = readOnly = compact = ordered = false;
curCount_ = 0;
thetaLong_ = (long)(p * MAX_THETA_LONG);
dirty_ = false;
}
/**
* Heapify a sketch from a Memory UpdateSketch or Union object
* containing sketch data.
* @param srcMem The source Memory object.
* <a href="{@docRoot}/resources/dictionary.html#mem">See Memory</a>
* @param seed <a href="{@docRoot}/resources/dictionary.html#seed">See seed</a>
*/
HeapQuickSelectSketch(Memory srcMem, long seed) {
super(
srcMem.getByte(LG_NOM_LONGS_BYTE),
seed,
srcMem.getFloat(P_FLOAT),
ResizeFactor.getRF(srcMem.getByte(LG_RESIZE_FACTOR_BYTE) >>> 6)
);
short seedHashMem = srcMem.getShort(SEED_HASH_SHORT); //check for seed conflict
short seedHashArg = computeSeedHash(seed);
checkSeedHashes(seedHashMem, seedHashArg);
int familyID = srcMem.getByte(FAMILY_BYTE);
if (familyID == Family.UNION.getID()) {
preambleLongs_ = Family.UNION.getMinPreLongs() & 0X3F;
MY_FAMILY = Family.UNION;
}
else {
preambleLongs_ = Family.QUICKSELECT.getMinPreLongs() & 0X3F;
MY_FAMILY = Family.QUICKSELECT;
}
lgArrLongs_ = srcMem.getByte(LG_ARR_LONGS_BYTE);
hashTableThreshold_ = setHashTableThreshold(lgNomLongs_, lgArrLongs_);
curCount_ = srcMem.getInt(RETAINED_ENTRIES_INT);
thetaLong_ = srcMem.getLong(THETA_LONG);
empty_ = srcMem.isAnyBitsSet(FLAGS_BYTE, (byte) EMPTY_FLAG_MASK);
dirty_ = false;
cache_ = new long[1 << lgArrLongs_];
srcMem.getLongArray(preambleLongs_ << 3, cache_, 0, 1 << lgArrLongs_); //read in as hash table
}
//Sketch
@Override
public int getRetainedEntries(boolean valid) {
return curCount_;
}
@Override
public boolean isEmpty() {
return empty_;
}
@Override
public byte[] toByteArray() {
return toByteArray(preambleLongs_, (byte) MY_FAMILY.getID());
}
//UpdateSketch
@Override
public UpdateSketch rebuild() {
if (getRetainedEntries(true) > (1 << getLgNomLongs())) {
quickSelectAndRebuild();
}
return this;
}
@Override
public final void reset() {
int lgArrLongsSM = startingSubMultiple(lgNomLongs_+1, rf_, HQS_MIN_LG_ARR_LONGS);
if (lgArrLongsSM == lgArrLongs_) {
int arrLongs = cache_.length;
assert (1 << lgArrLongs_) == arrLongs;
java.util.Arrays.fill(cache_, 0L);
}
else {
cache_ = new long[1 << lgArrLongsSM];
lgArrLongs_ = lgArrLongsSM;
}
hashTableThreshold_ = setHashTableThreshold(lgNomLongs_, lgArrLongs_);
empty_ = true;
curCount_ = 0;
thetaLong_ = (long)(p_ * MAX_THETA_LONG);
}
//restricted methods
@Override
int getPreambleLongs() {
return preambleLongs_;
}
//Set Arguments
@Override
long[] getCache() {
return cache_;
}
@Override
long getThetaLong() {
return thetaLong_;
}
@Override
boolean isDirty() {
return dirty_;
}
//Update Internals
@Override
int getLgArrLongs() {
return lgArrLongs_;
}
/**
* All potential updates converge here.
* <p>Don't ever call this unless you really know what you are doing!</p>
*
* @param hash the given input hash value. It should never be zero.
* @return <a href="{@docRoot}/resources/dictionary.html#updateReturnState">See Update Return State</a>
*/
@Override
UpdateReturnState hashUpdate(long hash) {
assert (hash > 0L): "Corruption: negative hashes should not happen. ";
empty_ = false;
//The over-theta test
if (hash >= thetaLong_) {
// very very unlikely that hash == Long.MAX_VALUE. It is ignored just as zero is ignored.
return RejectedOverTheta; //signal that hash was rejected due to theta.
}
//The duplicate/inserted tests
boolean inserted = HashOperations.hashInsert(cache_, lgArrLongs_, hash);
if (inserted) {
curCount_++;
if (curCount_ > hashTableThreshold_) {
//must rebuild or resize
if (lgArrLongs_ <= lgNomLongs_) { //resize
resizeCache();
}
else { //rebuild
//Already at tgt size, must rebuild
assert (lgArrLongs_ == lgNomLongs_ + 1) : "lgArr: " + lgArrLongs_ + ", lgNom: " + lgNomLongs_;
quickSelectAndRebuild(); //Changes thetaLong_, curCount_
}
}
return InsertedCountIncremented;
}
return RejectedDuplicate;
}
//Private
//Must resize. Changes lgArrLongs_ only. theta doesn't change, count doesn't change.
// Used by hashUpdate()
private final void resizeCache() {
int lgTgtLongs = lgNomLongs_ + 1;
int lgDeltaLongs = lgTgtLongs - lgArrLongs_;
int lgResizeFactor = max(min(rf_.lg(), lgDeltaLongs), 1); //rf_.lg() could be 0
lgArrLongs_ += lgResizeFactor; // new tgt size
long[] tgtArr = new long[1 << lgArrLongs_];
int newCount = HashOperations.hashArrayInsert(cache_, tgtArr, lgArrLongs_, thetaLong_);
assert newCount == curCount_; //Assumes no dirty values.
curCount_ = newCount;
cache_ = tgtArr;
hashTableThreshold_ = setHashTableThreshold(lgNomLongs_, lgArrLongs_);
}
//array stays the same size. Changes theta and thus count
private final void quickSelectAndRebuild() {
int arrLongs = 1 << lgArrLongs_;
int pivot = (1 << lgNomLongs_) + 1; // pivot for QS
thetaLong_ = selectExcludingZeros(cache_, curCount_, pivot); //changes cache_
// now we rebuild to clean up dirty data, update count
long[] tgtArr = new long[arrLongs];
curCount_ = HashOperations.hashArrayInsert(cache_, tgtArr, lgArrLongs_, thetaLong_);
cache_ = tgtArr;
//hashTableThreshold stays the same
}
/**
* Returns the cardinality limit given the current size of the hash table array.
*
* @param lgNomLongs <a href="{@docRoot}/resources/dictionary.html#lgNomLongs">See lgNomLongs</a>.
* @param lgArrLongs <a href="{@docRoot}/resources/dictionary.html#lgArrLongs">See lgArrLongs</a>.
* @return the hash table threshold
*/
static final int setHashTableThreshold(final int lgNomLongs, final int lgArrLongs) {
double fraction = (lgArrLongs <= lgNomLongs) ? HQS_RESIZE_THRESHOLD : HQS_REBUILD_THRESHOLD;
return (int) Math.floor(fraction * (1 << lgArrLongs));
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.Http2CodecUtil.CONNECTION_STREAM_ID;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_WINDOW_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_INITIAL_WINDOW_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MIN_INITIAL_WINDOW_SIZE;
import static io.netty.handler.codec.http2.Http2Error.FLOW_CONTROL_ERROR;
import static io.netty.handler.codec.http2.Http2Error.INTERNAL_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.connectionError;
import static io.netty.handler.codec.http2.Http2Exception.streamError;
import static io.netty.util.internal.ObjectUtil.checkNotNull;
import static java.lang.Math.max;
import static java.lang.Math.min;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http2.Http2Exception.CompositeStreamException;
import io.netty.handler.codec.http2.Http2Exception.StreamException;
import io.netty.util.internal.PlatformDependent;
/**
* Basic implementation of {@link Http2LocalFlowController}.
*/
public class DefaultHttp2LocalFlowController implements Http2LocalFlowController {
/**
* The default ratio of window size to initial window size below which a {@code WINDOW_UPDATE}
* is sent to expand the window.
*/
public static final float DEFAULT_WINDOW_UPDATE_RATIO = 0.5f;
private final Http2Connection connection;
private final Http2FrameWriter frameWriter;
private final Http2Connection.PropertyKey stateKey;
private ChannelHandlerContext ctx;
private volatile float windowUpdateRatio;
private volatile int initialWindowSize = DEFAULT_WINDOW_SIZE;
public DefaultHttp2LocalFlowController(Http2Connection connection, Http2FrameWriter frameWriter) {
this(connection, frameWriter, DEFAULT_WINDOW_UPDATE_RATIO);
}
public DefaultHttp2LocalFlowController(Http2Connection connection,
Http2FrameWriter frameWriter, float windowUpdateRatio) {
this.connection = checkNotNull(connection, "connection");
this.frameWriter = checkNotNull(frameWriter, "frameWriter");
windowUpdateRatio(windowUpdateRatio);
// Add a flow state for the connection.
stateKey = connection.newKey();
connection.connectionStream()
.setProperty(stateKey, new DefaultState(connection.connectionStream(), initialWindowSize));
// Register for notification of new streams.
connection.addListener(new Http2ConnectionAdapter() {
@Override
public void onStreamAdded(Http2Stream stream) {
// Unconditionally used the reduced flow control state because it requires no object allocation
// and the DefaultFlowState will be allocated in onStreamActive.
stream.setProperty(stateKey, REDUCED_FLOW_STATE);
}
@Override
public void onStreamActive(Http2Stream stream) {
// Need to be sure the stream's initial window is adjusted for SETTINGS
// frames which may have been exchanged while it was in IDLE
stream.setProperty(stateKey, new DefaultState(stream, initialWindowSize));
}
@Override
public void onStreamClosed(Http2Stream stream) {
try {
// When a stream is closed, consume any remaining bytes so that they
// are restored to the connection window.
FlowState state = state(stream);
int unconsumedBytes = state.unconsumedBytes();
if (ctx != null && unconsumedBytes > 0) {
connectionState().consumeBytes(unconsumedBytes);
state.consumeBytes(unconsumedBytes);
}
} catch (Http2Exception e) {
PlatformDependent.throwException(e);
} finally {
// Unconditionally reduce the amount of memory required for flow control because there is no
// object allocation costs associated with doing so and the stream will not have any more
// local flow control state to keep track of anymore.
stream.setProperty(stateKey, REDUCED_FLOW_STATE);
}
}
});
}
@Override
public void channelHandlerContext(ChannelHandlerContext ctx) {
this.ctx = ctx;
}
@Override
public void initialWindowSize(int newWindowSize) throws Http2Exception {
int delta = newWindowSize - initialWindowSize;
initialWindowSize = newWindowSize;
WindowUpdateVisitor visitor = new WindowUpdateVisitor(delta);
connection.forEachActiveStream(visitor);
visitor.throwIfError();
}
@Override
public int initialWindowSize() {
return initialWindowSize;
}
@Override
public int windowSize(Http2Stream stream) {
return state(stream).windowSize();
}
@Override
public int initialWindowSize(Http2Stream stream) {
return state(stream).initialWindowSize();
}
@Override
public void incrementWindowSize(Http2Stream stream, int delta) throws Http2Exception {
FlowState state = state(stream);
// Just add the delta to the stream-specific initial window size so that the next time the window
// expands it will grow to the new initial size.
state.incrementInitialStreamWindow(delta);
state.writeWindowUpdateIfNeeded();
}
@Override
public boolean consumeBytes(Http2Stream stream, int numBytes) throws Http2Exception {
if (numBytes < 0) {
throw new IllegalArgumentException("numBytes must not be negative");
}
if (numBytes == 0) {
return false;
}
// Streams automatically consume all remaining bytes when they are closed, so just ignore
// if already closed.
if (stream != null && !isClosed(stream)) {
if (stream.id() == CONNECTION_STREAM_ID) {
throw new UnsupportedOperationException("Returning bytes for the connection window is not supported");
}
boolean windowUpdateSent = connectionState().consumeBytes(numBytes);
windowUpdateSent |= state(stream).consumeBytes(numBytes);
return windowUpdateSent;
}
return false;
}
@Override
public int unconsumedBytes(Http2Stream stream) {
return state(stream).unconsumedBytes();
}
private static void checkValidRatio(float ratio) {
if (Double.compare(ratio, 0.0) <= 0 || Double.compare(ratio, 1.0) >= 0) {
throw new IllegalArgumentException("Invalid ratio: " + ratio);
}
}
/**
* The window update ratio is used to determine when a window update must be sent. If the ratio
* of bytes processed since the last update has meet or exceeded this ratio then a window update will
* be sent. This is the global window update ratio that will be used for new streams.
* @param ratio the ratio to use when checking if a {@code WINDOW_UPDATE} is determined necessary for new streams.
* @throws IllegalArgumentException If the ratio is out of bounds (0, 1).
*/
public void windowUpdateRatio(float ratio) {
checkValidRatio(ratio);
windowUpdateRatio = ratio;
}
/**
* The window update ratio is used to determine when a window update must be sent. If the ratio
* of bytes processed since the last update has meet or exceeded this ratio then a window update will
* be sent. This is the global window update ratio that will be used for new streams.
*/
public float windowUpdateRatio() {
return windowUpdateRatio;
}
/**
* The window update ratio is used to determine when a window update must be sent. If the ratio
* of bytes processed since the last update has meet or exceeded this ratio then a window update will
* be sent. This window update ratio will only be applied to {@code streamId}.
* <p>
* Note it is the responsibly of the caller to ensure that the the
* initial {@code SETTINGS} frame is sent before this is called. It would
* be considered a {@link Http2Error#PROTOCOL_ERROR} if a {@code WINDOW_UPDATE}
* was generated by this method before the initial {@code SETTINGS} frame is sent.
* @param stream the stream for which {@code ratio} applies to.
* @param ratio the ratio to use when checking if a {@code WINDOW_UPDATE} is determined necessary.
* @throws Http2Exception If a protocol-error occurs while generating {@code WINDOW_UPDATE} frames
*/
public void windowUpdateRatio(Http2Stream stream, float ratio) throws Http2Exception {
checkValidRatio(ratio);
FlowState state = state(stream);
state.windowUpdateRatio(ratio);
state.writeWindowUpdateIfNeeded();
}
/**
* The window update ratio is used to determine when a window update must be sent. If the ratio
* of bytes processed since the last update has meet or exceeded this ratio then a window update will
* be sent. This window update ratio will only be applied to {@code streamId}.
* @throws Http2Exception If no stream corresponding to {@code stream} could be found.
*/
public float windowUpdateRatio(Http2Stream stream) throws Http2Exception {
return state(stream).windowUpdateRatio();
}
@Override
public void receiveFlowControlledFrame(Http2Stream stream, ByteBuf data, int padding,
boolean endOfStream) throws Http2Exception {
int dataLength = data.readableBytes() + padding;
// Apply the connection-level flow control
FlowState connectionState = connectionState();
connectionState.receiveFlowControlledFrame(dataLength);
if (stream != null && !isClosed(stream)) {
// Apply the stream-level flow control
FlowState state = state(stream);
state.endOfStream(endOfStream);
state.receiveFlowControlledFrame(dataLength);
} else if (dataLength > 0) {
// Immediately consume the bytes for the connection window.
connectionState.consumeBytes(dataLength);
}
}
private FlowState connectionState() {
return connection.connectionStream().getProperty(stateKey);
}
private FlowState state(Http2Stream stream) {
checkNotNull(stream, "stream");
return stream.getProperty(stateKey);
}
private static boolean isClosed(Http2Stream stream) {
return stream.state() == Http2Stream.State.CLOSED;
}
/**
* Flow control window state for an individual stream.
*/
private final class DefaultState implements FlowState {
private final Http2Stream stream;
/**
* The actual flow control window that is decremented as soon as {@code DATA} arrives.
*/
private int window;
/**
* A view of {@link #window} that is used to determine when to send {@code WINDOW_UPDATE}
* frames. Decrementing this window for received {@code DATA} frames is delayed until the
* application has indicated that the data has been fully processed. This prevents sending
* a {@code WINDOW_UPDATE} until the number of processed bytes drops below the threshold.
*/
private int processedWindow;
/**
* This is what is used to determine how many bytes need to be returned relative to {@link #processedWindow}.
* Each stream has their own initial window size.
*/
private volatile int initialStreamWindowSize;
/**
* This is used to determine when {@link #processedWindow} is sufficiently far away from
* {@link #initialStreamWindowSize} such that a {@code WINDOW_UPDATE} should be sent.
* Each stream has their own window update ratio.
*/
private volatile float streamWindowUpdateRatio;
private int lowerBound;
private boolean endOfStream;
public DefaultState(Http2Stream stream, int initialWindowSize) {
this.stream = stream;
window(initialWindowSize);
streamWindowUpdateRatio = windowUpdateRatio;
}
@Override
public void window(int initialWindowSize) {
window = processedWindow = initialStreamWindowSize = initialWindowSize;
}
@Override
public int windowSize() {
return window;
}
@Override
public int initialWindowSize() {
return initialStreamWindowSize;
}
@Override
public void endOfStream(boolean endOfStream) {
this.endOfStream = endOfStream;
}
@Override
public float windowUpdateRatio() {
return streamWindowUpdateRatio;
}
@Override
public void windowUpdateRatio(float ratio) {
streamWindowUpdateRatio = ratio;
}
@Override
public void incrementInitialStreamWindow(int delta) {
// Clip the delta so that the resulting initialStreamWindowSize falls within the allowed range.
int newValue = (int) min(MAX_INITIAL_WINDOW_SIZE,
max(MIN_INITIAL_WINDOW_SIZE, initialStreamWindowSize + (long) delta));
delta = newValue - initialStreamWindowSize;
initialStreamWindowSize += delta;
}
@Override
public void incrementFlowControlWindows(int delta) throws Http2Exception {
if (delta > 0 && window > MAX_INITIAL_WINDOW_SIZE - delta) {
throw streamError(stream.id(), FLOW_CONTROL_ERROR,
"Flow control window overflowed for stream: %d", stream.id());
}
window += delta;
processedWindow += delta;
lowerBound = delta < 0 ? delta : 0;
}
@Override
public void receiveFlowControlledFrame(int dataLength) throws Http2Exception {
assert dataLength >= 0;
// Apply the delta. Even if we throw an exception we want to have taken this delta into account.
window -= dataLength;
// Window size can become negative if we sent a SETTINGS frame that reduces the
// size of the transfer window after the peer has written data frames.
// The value is bounded by the length that SETTINGS frame decrease the window.
// This difference is stored for the connection when writing the SETTINGS frame
// and is cleared once we send a WINDOW_UPDATE frame.
if (window < lowerBound) {
throw streamError(stream.id(), FLOW_CONTROL_ERROR,
"Flow control window exceeded for stream: %d", stream.id());
}
}
private void returnProcessedBytes(int delta) throws Http2Exception {
if (processedWindow - delta < window) {
throw streamError(stream.id(), INTERNAL_ERROR,
"Attempting to return too many bytes for stream %d", stream.id());
}
processedWindow -= delta;
}
@Override
public boolean consumeBytes(int numBytes) throws Http2Exception {
// Return the bytes processed and update the window.
returnProcessedBytes(numBytes);
return writeWindowUpdateIfNeeded();
}
@Override
public int unconsumedBytes() {
return processedWindow - window;
}
@Override
public boolean writeWindowUpdateIfNeeded() throws Http2Exception {
if (endOfStream || initialStreamWindowSize <= 0) {
return false;
}
int threshold = (int) (initialStreamWindowSize * streamWindowUpdateRatio);
if (processedWindow <= threshold) {
writeWindowUpdate();
return true;
}
return false;
}
/**
* Called to perform a window update for this stream (or connection). Updates the window size back
* to the size of the initial window and sends a window update frame to the remote endpoint.
*/
private void writeWindowUpdate() throws Http2Exception {
// Expand the window for this stream back to the size of the initial window.
int deltaWindowSize = initialStreamWindowSize - processedWindow;
try {
incrementFlowControlWindows(deltaWindowSize);
} catch (Throwable t) {
throw connectionError(INTERNAL_ERROR, t,
"Attempting to return too many bytes for stream %d", stream.id());
}
// Send a window update for the stream/connection.
frameWriter.writeWindowUpdate(ctx, stream.id(), deltaWindowSize, ctx.newPromise());
}
}
/**
* The local flow control state for a single stream that is not in a state where flow controlled frames cannot
* be exchanged.
*/
private static final FlowState REDUCED_FLOW_STATE = new FlowState() {
@Override
public int windowSize() {
return 0;
}
@Override
public int initialWindowSize() {
return 0;
}
@Override
public void window(int initialWindowSize) {
throw new UnsupportedOperationException();
}
@Override
public void incrementInitialStreamWindow(int delta) {
// This operation needs to be supported during the initial settings exchange when
// the peer has not yet acknowledged this peer being activated.
}
@Override
public boolean writeWindowUpdateIfNeeded() throws Http2Exception {
throw new UnsupportedOperationException();
}
@Override
public boolean consumeBytes(int numBytes) throws Http2Exception {
return false;
}
@Override
public int unconsumedBytes() {
return 0;
}
@Override
public float windowUpdateRatio() {
throw new UnsupportedOperationException();
}
@Override
public void windowUpdateRatio(float ratio) {
throw new UnsupportedOperationException();
}
@Override
public void receiveFlowControlledFrame(int dataLength) throws Http2Exception {
throw new UnsupportedOperationException();
}
@Override
public void incrementFlowControlWindows(int delta) throws Http2Exception {
// This operation needs to be supported during the initial settings exchange when
// the peer has not yet acknowledged this peer being activated.
}
@Override
public void endOfStream(boolean endOfStream) {
throw new UnsupportedOperationException();
}
};
/**
* An abstraction which provides specific extensions used by local flow control.
*/
private interface FlowState {
int windowSize();
int initialWindowSize();
void window(int initialWindowSize);
/**
* Increment the initial window size for this stream.
* @param delta The amount to increase the initial window size by.
*/
void incrementInitialStreamWindow(int delta);
/**
* Updates the flow control window for this stream if it is appropriate.
*
* @return true if {@code WINDOW_UPDATE} was written, false otherwise.
*/
boolean writeWindowUpdateIfNeeded() throws Http2Exception;
/**
* Indicates that the application has consumed {@code numBytes} from the connection or stream and is
* ready to receive more data.
*
* @param numBytes the number of bytes to be returned to the flow control window.
* @return true if {@code WINDOW_UPDATE} was written, false otherwise.
* @throws Http2Exception
*/
boolean consumeBytes(int numBytes) throws Http2Exception;
int unconsumedBytes();
float windowUpdateRatio();
void windowUpdateRatio(float ratio);
/**
* A flow control event has occurred and we should decrement the amount of available bytes for this stream.
* @param dataLength The amount of data to for which this stream is no longer eligible to use for flow control.
* @throws Http2Exception If too much data is used relative to how much is available.
*/
void receiveFlowControlledFrame(int dataLength) throws Http2Exception;
/**
* Increment the windows which are used to determine many bytes have been processed.
* @param delta The amount to increment the window by.
* @throws Http2Exception if integer overflow occurs on the window.
*/
void incrementFlowControlWindows(int delta) throws Http2Exception;
void endOfStream(boolean endOfStream);
}
/**
* Provides a means to iterate over all active streams and increment the flow control windows.
*/
private final class WindowUpdateVisitor implements Http2StreamVisitor {
private CompositeStreamException compositeException;
private final int delta;
public WindowUpdateVisitor(int delta) {
this.delta = delta;
}
@Override
public boolean visit(Http2Stream stream) throws Http2Exception {
try {
// Increment flow control window first so state will be consistent if overflow is detected.
FlowState state = state(stream);
state.incrementFlowControlWindows(delta);
state.incrementInitialStreamWindow(delta);
} catch (StreamException e) {
if (compositeException == null) {
compositeException = new CompositeStreamException(e.error(), 4);
}
compositeException.add(e);
}
return true;
}
public void throwIfError() throws CompositeStreamException {
if (compositeException != null) {
throw compositeException;
}
}
}
}
| |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeFalse;
import static org.openqa.selenium.Platform.ANDROID;
import static org.openqa.selenium.WaitingConditions.newWindowIsOpened;
import static org.openqa.selenium.WaitingConditions.windowHandleCountToBe;
import static org.openqa.selenium.WaitingConditions.windowHandleCountToBeGreaterThan;
import static org.openqa.selenium.support.ui.ExpectedConditions.alertIsPresent;
import static org.openqa.selenium.testing.Ignore.Driver.HTMLUNIT;
import static org.openqa.selenium.testing.Ignore.Driver.IE;
import static org.openqa.selenium.testing.Ignore.Driver.MARIONETTE;
import static org.openqa.selenium.testing.Ignore.Driver.REMOTE;
import com.google.common.collect.Sets;
import org.junit.Test;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.testing.Ignore;
import org.openqa.selenium.testing.JUnit4TestBase;
import org.openqa.selenium.testing.JavascriptEnabled;
import org.openqa.selenium.testing.NotYetImplemented;
import org.openqa.selenium.testing.TestUtilities;
import org.openqa.selenium.testing.drivers.Browser;
import java.util.Set;
public class WindowSwitchingTest extends JUnit4TestBase {
//@Ignore({MARIONETTE})
@NoDriverAfterTest // So that next test never starts with "inside a frame" base state.
@Test
public void testShouldSwitchFocusToANewWindowWhenItIsOpenedAndNotStopFutureOperations() {
assumeFalse(Browser.detect() == Browser.opera &&
TestUtilities.getEffectivePlatform().is(Platform.WINDOWS));
driver.get(pages.xhtmlTestPage);
String current = driver.getWindowHandle();
Set<String> currentWindowHandles = driver.getWindowHandles();
driver.findElement(By.linkText("Open new window")).click();
wait.until(newWindowIsOpened(currentWindowHandles));
assertThat(driver.getTitle(), equalTo("XHTML Test Page"));
driver.switchTo().window("result");
assertThat(driver.getTitle(), equalTo("We Arrive Here"));
driver.get(pages.iframePage);
final String handle = driver.getWindowHandle();
driver.findElement(By.id("iframe_page_heading"));
driver.switchTo().frame("iframe1");
assertThat(driver.getWindowHandle(), equalTo(handle));
driver.close();
driver.switchTo().window(current);
}
@Test
@Ignore(MARIONETTE)
public void testShouldThrowNoSuchWindowException() {
driver.get(pages.xhtmlTestPage);
String current = driver.getWindowHandle();
try {
driver.switchTo().window("invalid name");
fail("NoSuchWindowException expected");
} catch (NoSuchWindowException e) {
// Expected.
}
driver.switchTo().window(current);
}
@Ignore({MARIONETTE})
@Test
public void testShouldThrowNoSuchWindowExceptionOnAnAttemptToGetItsHandle() {
driver.get(pages.xhtmlTestPage);
String current = driver.getWindowHandle();
Set<String> currentWindowHandles = driver.getWindowHandles();
driver.findElement(By.linkText("Open new window")).click();
wait.until(newWindowIsOpened(currentWindowHandles));
driver.switchTo().window("result");
driver.close();
try {
driver.getWindowHandle();
fail("NoSuchWindowException expected");
} catch (NoSuchWindowException e) {
// Expected.
} finally {
driver.switchTo().window(current);
}
}
@Ignore({MARIONETTE})
@Test
public void testShouldThrowNoSuchWindowExceptionOnAnyOperationIfAWindowIsClosed() {
driver.get(pages.xhtmlTestPage);
String current = driver.getWindowHandle();
Set<String> currentWindowHandles = driver.getWindowHandles();
driver.findElement(By.linkText("Open new window")).click();
wait.until(newWindowIsOpened(currentWindowHandles));
driver.switchTo().window("result");
driver.close();
try {
try {
driver.getTitle();
fail("NoSuchWindowException expected");
} catch (NoSuchWindowException e) {
// Expected.
}
try {
driver.findElement(By.tagName("body"));
fail("NoSuchWindowException expected");
} catch (NoSuchWindowException e) {
// Expected.
}
} finally {
driver.switchTo().window(current);
}
}
@Ignore({MARIONETTE})
@Test
public void testShouldThrowNoSuchWindowExceptionOnAnyElementOperationIfAWindowIsClosed() {
driver.get(pages.xhtmlTestPage);
String current = driver.getWindowHandle();
Set<String> currentWindowHandles = driver.getWindowHandles();
driver.findElement(By.linkText("Open new window")).click();
wait.until(newWindowIsOpened(currentWindowHandles));
driver.switchTo().window("result");
WebElement body = driver.findElement(By.tagName("body"));
driver.close();
try {
body.getText();
fail("NoSuchWindowException expected");
} catch (NoSuchWindowException e) {
// Expected.
} finally {
driver.switchTo().window(current);
}
}
@NeedsFreshDriver
@NoDriverAfterTest
@Ignore({IE, MARIONETTE, REMOTE})
@Test
public void testShouldBeAbleToIterateOverAllOpenWindows() {
driver.get(pages.xhtmlTestPage);
driver.findElement(By.name("windowOne")).click();
driver.findElement(By.name("windowTwo")).click();
wait.until(windowHandleCountToBeGreaterThan(2));
Set<String> allWindowHandles = driver.getWindowHandles();
// There should be three windows. We should also see each of the window titles at least once.
Set<String> seenHandles = Sets.newHashSet();
for (String handle : allWindowHandles) {
assertFalse(seenHandles.contains(handle));
driver.switchTo().window(handle);
seenHandles.add(handle);
}
assertEquals(3, allWindowHandles.size());
}
@JavascriptEnabled
@Test
@Ignore(MARIONETTE)
@NotYetImplemented(HTMLUNIT)
public void testClickingOnAButtonThatClosesAnOpenWindowDoesNotCauseTheBrowserToHang()
throws Exception {
assumeFalse(Browser.detect() == Browser.opera &&
TestUtilities.getEffectivePlatform().is(Platform.WINDOWS));
driver.get(pages.xhtmlTestPage);
Boolean isIEDriver = TestUtilities.isInternetExplorer(driver);
Boolean isIE6 = TestUtilities.isIe6(driver);
String currentHandle = driver.getWindowHandle();
Set<String> currentWindowHandles = driver.getWindowHandles();
driver.findElement(By.name("windowThree")).click();
wait.until(newWindowIsOpened(currentWindowHandles));
driver.switchTo().window("result");
// TODO Remove sleep when https://code.google.com/p/chromedriver/issues/detail?id=1044 is fixed.
if (TestUtilities.isChrome(driver) && TestUtilities.getEffectivePlatform(driver).is(ANDROID)) {
Thread.sleep(1000);
}
try {
wait.until(ExpectedConditions.presenceOfElementLocated(By.id("close")));
driver.findElement(By.id("close")).click();
if (isIEDriver && !isIE6) {
Alert alert = wait.until(alertIsPresent());
alert.accept();
}
// If we make it this far, we're all good.
} finally {
driver.switchTo().window(currentHandle);
driver.findElement(By.id("linkId"));
}
}
@JavascriptEnabled
@Test
@Ignore(MARIONETTE)
@NotYetImplemented(HTMLUNIT)
public void testCanCallGetWindowHandlesAfterClosingAWindow() throws Exception {
assumeFalse(Browser.detect() == Browser.opera &&
TestUtilities.getEffectivePlatform().is(Platform.WINDOWS));
driver.get(pages.xhtmlTestPage);
Boolean isIEDriver = TestUtilities.isInternetExplorer(driver);
Boolean isIE6 = TestUtilities.isIe6(driver);
String currentHandle = driver.getWindowHandle();
Set<String> currentWindowHandles = driver.getWindowHandles();
driver.findElement(By.name("windowThree")).click();
wait.until(newWindowIsOpened(currentWindowHandles));
driver.switchTo().window("result");
int allWindowHandles = driver.getWindowHandles().size();
// TODO Remove sleep when https://code.google.com/p/chromedriver/issues/detail?id=1044 is fixed.
if (TestUtilities.isChrome(driver) && TestUtilities.getEffectivePlatform(driver).is(ANDROID)) {
Thread.sleep(1000);
}
try {
wait.until(ExpectedConditions.presenceOfElementLocated(By.id("close"))).click();
if (isIEDriver && !isIE6) {
Alert alert = wait.until(alertIsPresent());
alert.accept();
}
Set<String> allHandles = wait.until(windowHandleCountToBe(allWindowHandles - 1));
assertEquals(currentWindowHandles.size(), allHandles.size());
} finally {
driver.switchTo().window(currentHandle);
}
}
@Test
public void testCanObtainAWindowHandle() {
driver.get(pages.xhtmlTestPage);
String currentHandle = driver.getWindowHandle();
assertNotNull(currentHandle);
}
@Test
@Ignore(MARIONETTE)
public void testFailingToSwitchToAWindowLeavesTheCurrentWindowAsIs() {
driver.get(pages.xhtmlTestPage);
String current = driver.getWindowHandle();
try {
driver.switchTo().window("i will never exist");
fail("Should not be ablt to change to a non-existant window");
} catch (NoSuchWindowException e) {
// expected
}
String newHandle = driver.getWindowHandle();
assertEquals(current, newHandle);
}
@NeedsFreshDriver
@NoDriverAfterTest
@Test
@Ignore(MARIONETTE)
public void testCanCloseWindowWhenMultipleWindowsAreOpen() {
driver.get(pages.xhtmlTestPage);
Set<String> currentWindowHandles = driver.getWindowHandles();
driver.findElement(By.name("windowOne")).click();
wait.until(newWindowIsOpened(currentWindowHandles));
Set<String> allWindowHandles = driver.getWindowHandles();
// There should be two windows. We should also see each of the window titles at least once.
assertEquals(2, allWindowHandles.size());
String handle1 = (String) allWindowHandles.toArray()[1];
driver.switchTo().window(handle1);
driver.close();
allWindowHandles = driver.getWindowHandles();
assertEquals(1, allWindowHandles.size());
}
@NeedsFreshDriver
@NoDriverAfterTest
@Test
@Ignore(MARIONETTE)
public void testCanCloseWindowAndSwitchBackToMainWindow() {
driver.get(pages.xhtmlTestPage);
Set<String> currentWindowHandles = driver.getWindowHandles();
driver.findElement(By.name("windowOne")).click();
wait.until(newWindowIsOpened(currentWindowHandles));
Set<String> allWindowHandles = driver.getWindowHandles();
// There should be two windows. We should also see each of the window titles at least once.
assertEquals(2, allWindowHandles.size());
String mainHandle = (String) allWindowHandles.toArray()[0];
String handle1 = (String) allWindowHandles.toArray()[1];
driver.switchTo().window(handle1);
driver.close();
driver.switchTo().window(mainHandle);
String newHandle = driver.getWindowHandle();
assertEquals(mainHandle, newHandle);
}
@NeedsFreshDriver
@NoDriverAfterTest
@Test
@Ignore(MARIONETTE)
public void testClosingOnlyWindowShouldNotCauseTheBrowserToHang() {
driver.get(pages.xhtmlTestPage);
driver.close();
}
@NeedsFreshDriver
@NoDriverAfterTest
@Test
@Ignore(MARIONETTE)
public void testShouldFocusOnTheTopMostFrameAfterSwitchingToAWindow() {
driver.get(appServer.whereIs("window_switching_tests/page_with_frame.html"));
Set<String> currentWindowHandles = driver.getWindowHandles();
String mainWindow = driver.getWindowHandle();
driver.findElement(By.id("a-link-that-opens-a-new-window")).click();
wait.until(newWindowIsOpened(currentWindowHandles));
driver.switchTo().frame("myframe");
driver.switchTo().window("newWindow");
driver.close();
driver.switchTo().window(mainWindow);
driver.findElement(By.name("myframe"));
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1beta1/cluster_service.proto
package com.google.container.v1beta1;
/**
*
*
* <pre>
* A set of Shielded Instance options.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.ShieldedInstanceConfig}
*/
public final class ShieldedInstanceConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.container.v1beta1.ShieldedInstanceConfig)
ShieldedInstanceConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use ShieldedInstanceConfig.newBuilder() to construct.
private ShieldedInstanceConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ShieldedInstanceConfig() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ShieldedInstanceConfig();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ShieldedInstanceConfig(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
enableSecureBoot_ = input.readBool();
break;
}
case 16:
{
enableIntegrityMonitoring_ = input.readBool();
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ShieldedInstanceConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ShieldedInstanceConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.ShieldedInstanceConfig.class,
com.google.container.v1beta1.ShieldedInstanceConfig.Builder.class);
}
public static final int ENABLE_SECURE_BOOT_FIELD_NUMBER = 1;
private boolean enableSecureBoot_;
/**
*
*
* <pre>
* Defines whether the instance has Secure Boot enabled.
* Secure Boot helps ensure that the system only runs authentic software by
* verifying the digital signature of all boot components, and halting the
* boot process if signature verification fails.
* </pre>
*
* <code>bool enable_secure_boot = 1;</code>
*
* @return The enableSecureBoot.
*/
@java.lang.Override
public boolean getEnableSecureBoot() {
return enableSecureBoot_;
}
public static final int ENABLE_INTEGRITY_MONITORING_FIELD_NUMBER = 2;
private boolean enableIntegrityMonitoring_;
/**
*
*
* <pre>
* Defines whether the instance has integrity monitoring enabled.
* Enables monitoring and attestation of the boot integrity of the instance.
* The attestation is performed against the integrity policy baseline. This
* baseline is initially derived from the implicitly trusted boot image when
* the instance is created.
* </pre>
*
* <code>bool enable_integrity_monitoring = 2;</code>
*
* @return The enableIntegrityMonitoring.
*/
@java.lang.Override
public boolean getEnableIntegrityMonitoring() {
return enableIntegrityMonitoring_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (enableSecureBoot_ != false) {
output.writeBool(1, enableSecureBoot_);
}
if (enableIntegrityMonitoring_ != false) {
output.writeBool(2, enableIntegrityMonitoring_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (enableSecureBoot_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(1, enableSecureBoot_);
}
if (enableIntegrityMonitoring_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(2, enableIntegrityMonitoring_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.container.v1beta1.ShieldedInstanceConfig)) {
return super.equals(obj);
}
com.google.container.v1beta1.ShieldedInstanceConfig other =
(com.google.container.v1beta1.ShieldedInstanceConfig) obj;
if (getEnableSecureBoot() != other.getEnableSecureBoot()) return false;
if (getEnableIntegrityMonitoring() != other.getEnableIntegrityMonitoring()) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ENABLE_SECURE_BOOT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getEnableSecureBoot());
hash = (37 * hash) + ENABLE_INTEGRITY_MONITORING_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getEnableIntegrityMonitoring());
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.container.v1beta1.ShieldedInstanceConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.ShieldedInstanceConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.ShieldedInstanceConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.ShieldedInstanceConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.ShieldedInstanceConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.ShieldedInstanceConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.ShieldedInstanceConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.ShieldedInstanceConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.ShieldedInstanceConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.ShieldedInstanceConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.ShieldedInstanceConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.ShieldedInstanceConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.container.v1beta1.ShieldedInstanceConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A set of Shielded Instance options.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.ShieldedInstanceConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.container.v1beta1.ShieldedInstanceConfig)
com.google.container.v1beta1.ShieldedInstanceConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ShieldedInstanceConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ShieldedInstanceConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.ShieldedInstanceConfig.class,
com.google.container.v1beta1.ShieldedInstanceConfig.Builder.class);
}
// Construct using com.google.container.v1beta1.ShieldedInstanceConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
enableSecureBoot_ = false;
enableIntegrityMonitoring_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ShieldedInstanceConfig_descriptor;
}
@java.lang.Override
public com.google.container.v1beta1.ShieldedInstanceConfig getDefaultInstanceForType() {
return com.google.container.v1beta1.ShieldedInstanceConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.container.v1beta1.ShieldedInstanceConfig build() {
com.google.container.v1beta1.ShieldedInstanceConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.container.v1beta1.ShieldedInstanceConfig buildPartial() {
com.google.container.v1beta1.ShieldedInstanceConfig result =
new com.google.container.v1beta1.ShieldedInstanceConfig(this);
result.enableSecureBoot_ = enableSecureBoot_;
result.enableIntegrityMonitoring_ = enableIntegrityMonitoring_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.container.v1beta1.ShieldedInstanceConfig) {
return mergeFrom((com.google.container.v1beta1.ShieldedInstanceConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.container.v1beta1.ShieldedInstanceConfig other) {
if (other == com.google.container.v1beta1.ShieldedInstanceConfig.getDefaultInstance())
return this;
if (other.getEnableSecureBoot() != false) {
setEnableSecureBoot(other.getEnableSecureBoot());
}
if (other.getEnableIntegrityMonitoring() != false) {
setEnableIntegrityMonitoring(other.getEnableIntegrityMonitoring());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.container.v1beta1.ShieldedInstanceConfig parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.container.v1beta1.ShieldedInstanceConfig) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private boolean enableSecureBoot_;
/**
*
*
* <pre>
* Defines whether the instance has Secure Boot enabled.
* Secure Boot helps ensure that the system only runs authentic software by
* verifying the digital signature of all boot components, and halting the
* boot process if signature verification fails.
* </pre>
*
* <code>bool enable_secure_boot = 1;</code>
*
* @return The enableSecureBoot.
*/
@java.lang.Override
public boolean getEnableSecureBoot() {
return enableSecureBoot_;
}
/**
*
*
* <pre>
* Defines whether the instance has Secure Boot enabled.
* Secure Boot helps ensure that the system only runs authentic software by
* verifying the digital signature of all boot components, and halting the
* boot process if signature verification fails.
* </pre>
*
* <code>bool enable_secure_boot = 1;</code>
*
* @param value The enableSecureBoot to set.
* @return This builder for chaining.
*/
public Builder setEnableSecureBoot(boolean value) {
enableSecureBoot_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Defines whether the instance has Secure Boot enabled.
* Secure Boot helps ensure that the system only runs authentic software by
* verifying the digital signature of all boot components, and halting the
* boot process if signature verification fails.
* </pre>
*
* <code>bool enable_secure_boot = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearEnableSecureBoot() {
enableSecureBoot_ = false;
onChanged();
return this;
}
private boolean enableIntegrityMonitoring_;
/**
*
*
* <pre>
* Defines whether the instance has integrity monitoring enabled.
* Enables monitoring and attestation of the boot integrity of the instance.
* The attestation is performed against the integrity policy baseline. This
* baseline is initially derived from the implicitly trusted boot image when
* the instance is created.
* </pre>
*
* <code>bool enable_integrity_monitoring = 2;</code>
*
* @return The enableIntegrityMonitoring.
*/
@java.lang.Override
public boolean getEnableIntegrityMonitoring() {
return enableIntegrityMonitoring_;
}
/**
*
*
* <pre>
* Defines whether the instance has integrity monitoring enabled.
* Enables monitoring and attestation of the boot integrity of the instance.
* The attestation is performed against the integrity policy baseline. This
* baseline is initially derived from the implicitly trusted boot image when
* the instance is created.
* </pre>
*
* <code>bool enable_integrity_monitoring = 2;</code>
*
* @param value The enableIntegrityMonitoring to set.
* @return This builder for chaining.
*/
public Builder setEnableIntegrityMonitoring(boolean value) {
enableIntegrityMonitoring_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Defines whether the instance has integrity monitoring enabled.
* Enables monitoring and attestation of the boot integrity of the instance.
* The attestation is performed against the integrity policy baseline. This
* baseline is initially derived from the implicitly trusted boot image when
* the instance is created.
* </pre>
*
* <code>bool enable_integrity_monitoring = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearEnableIntegrityMonitoring() {
enableIntegrityMonitoring_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.container.v1beta1.ShieldedInstanceConfig)
}
// @@protoc_insertion_point(class_scope:google.container.v1beta1.ShieldedInstanceConfig)
private static final com.google.container.v1beta1.ShieldedInstanceConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.container.v1beta1.ShieldedInstanceConfig();
}
public static com.google.container.v1beta1.ShieldedInstanceConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ShieldedInstanceConfig> PARSER =
new com.google.protobuf.AbstractParser<ShieldedInstanceConfig>() {
@java.lang.Override
public ShieldedInstanceConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ShieldedInstanceConfig(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ShieldedInstanceConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ShieldedInstanceConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.container.v1beta1.ShieldedInstanceConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.fileEditor;
import com.intellij.ide.ui.UISettings;
import com.intellij.mock.Mock;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.FoldRegion;
import com.intellij.openapi.editor.FoldingModel;
import com.intellij.openapi.fileEditor.impl.EditorWindow;
import com.intellij.openapi.fileEditor.impl.EditorWithProviderComposite;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.DumbServiceImpl;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.testFramework.FileEditorManagerTestCase;
import com.intellij.testFramework.PlatformTestUtil;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.io.File;
import java.util.Arrays;
import java.util.List;
/**
* @author Dmitry Avdeev
* Date: 4/16/13
*/
@SuppressWarnings("ConstantConditions")
public class FileEditorManagerTest extends FileEditorManagerTestCase {
public void testTabOrder() throws Exception {
openFiles(STRING);
assertOpenFiles("1.txt", "foo.xml", "2.txt", "3.txt");
}
public void testTabLimit() throws Exception {
int limit = UISettings.getInstance().EDITOR_TAB_LIMIT;
try {
UISettings.getInstance().EDITOR_TAB_LIMIT = 2;
openFiles(STRING);
// note that foo.xml is pinned
assertOpenFiles("foo.xml", "3.txt");
}
finally {
UISettings.getInstance().EDITOR_TAB_LIMIT = limit;
}
}
public void testOpenRecentEditorTab() throws Exception {
PlatformTestUtil.registerExtension(FileEditorProvider.EP_FILE_EDITOR_PROVIDER, new MyFileEditorProvider(), getTestRootDisposable());
openFiles(" <component name=\"FileEditorManager\">\n" +
" <leaf>\n" +
" <file leaf-file-name=\"foo.xsd\" pinned=\"false\" current=\"true\" current-in-tab=\"true\">\n" +
" <entry selected=\"true\" file=\"file://$PROJECT_DIR$/src/1.txt\">\n" +
" <provider editor-type-id=\"mock\" selected=\"true\">\n" +
" <state />\n" +
" </provider>\n" +
" <provider editor-type-id=\"text-editor\">\n" +
" <state/>\n" +
" </provider>\n" +
" </entry>\n" +
" </file>\n" +
" </leaf>\n" +
" </component>\n");
FileEditor[] selectedEditors = myManager.getSelectedEditors();
assertEquals(1, selectedEditors.length);
assertEquals("mockEditor", selectedEditors[0].getName());
}
public void testTrackSelectedEditor() throws Exception {
PlatformTestUtil.registerExtension(FileEditorProvider.EP_FILE_EDITOR_PROVIDER, new MyFileEditorProvider(), getTestRootDisposable());
VirtualFile file = getFile("/src/1.txt");
assertNotNull(file);
FileEditor[] editors = myManager.openFile(file, true);
assertEquals(2, editors.length);
assertEquals("Text", myManager.getSelectedEditor(file).getName());
myManager.setSelectedEditor(file, "mock");
assertEquals("mockEditor", myManager.getSelectedEditor(file).getName());
VirtualFile file1 = getFile("/src/2.txt");
myManager.openFile(file1, true);
assertEquals("mockEditor", myManager.getSelectedEditor(file).getName());
}
public void testWindowClosingRetainsOtherWindows() throws Exception {
VirtualFile file = getFile("/src/1.txt");
assertNotNull(file);
myManager.openFile(file, false);
EditorWindow primaryWindow = myManager.getCurrentWindow();
assertNotNull(primaryWindow);
myManager.createSplitter(SwingConstants.VERTICAL, primaryWindow);
EditorWindow secondaryWindow = myManager.getNextWindow(primaryWindow);
assertNotNull(secondaryWindow);
myManager.createSplitter(SwingConstants.VERTICAL, secondaryWindow);
myManager.closeFile(file, primaryWindow);
assertEquals(2, myManager.getWindows().length);
}
public void testStoringCaretStateForFileWithFoldingsWithNoTabs() throws Exception {
int savedValue = UISettings.getInstance().EDITOR_TAB_PLACEMENT;
UISettings.getInstance().EDITOR_TAB_PLACEMENT = UISettings.TABS_NONE;
try {
VirtualFile file = getFile("/src/Test.java");
assertNotNull(file);
FileEditor[] editors = myManager.openFile(file, false);
assertEquals(1, editors.length);
assertTrue(editors[0] instanceof TextEditor);
Editor editor = ((TextEditor)editors[0]).getEditor();
final FoldingModel foldingModel = editor.getFoldingModel();
assertEquals(2, foldingModel.getAllFoldRegions().length);
foldingModel.runBatchFoldingOperation(new Runnable() {
@Override
public void run() {
for (FoldRegion region : foldingModel.getAllFoldRegions()) {
region.setExpanded(false);
}
}
});
int textLength = editor.getDocument().getTextLength();
editor.getCaretModel().moveToOffset(textLength);
editor.getSelectionModel().setSelection(textLength - 1, textLength);
myManager.openFile(getFile("/src/1.txt"), false);
assertEquals(0, myManager.getEditors(file).length);
editors = myManager.openFile(file, false);
assertEquals(1, editors.length);
assertTrue(editors[0] instanceof TextEditor);
editor = ((TextEditor)editors[0]).getEditor();
assertEquals(textLength, editor.getCaretModel().getOffset());
assertEquals(textLength - 1, editor.getSelectionModel().getSelectionStart());
assertEquals(textLength, editor.getSelectionModel().getSelectionEnd());
}
finally {
UISettings.getInstance().EDITOR_TAB_PLACEMENT = savedValue;
}
}
public void testOpenInDumbMode() throws Exception {
PlatformTestUtil.registerExtension(FileEditorProvider.EP_FILE_EDITOR_PROVIDER, new MyFileEditorProvider(), getTestRootDisposable());
PlatformTestUtil.registerExtension(FileEditorProvider.EP_FILE_EDITOR_PROVIDER, new DumbAwareProvider(), getTestRootDisposable());
try {
DumbServiceImpl.getInstance(getProject()).setDumb(true);
VirtualFile file = getFile("/src/foo.bar");
assertEquals(1, myManager.openFile(file, false).length);
DumbServiceImpl.getInstance(getProject()).setDumb(false);
UIUtil.dispatchAllInvocationEvents();
assertEquals(2, myManager.getAllEditors(file).length);
//assertFalse(FileEditorManagerImpl.isDumbAware(editors[0]));
}
finally {
DumbServiceImpl.getInstance(getProject()).setDumb(false);
}
}
private static final String STRING = "<component name=\"FileEditorManager\">\n" +
" <leaf>\n" +
" <file leaf-file-name=\"1.txt\" pinned=\"false\" current=\"false\" current-in-tab=\"false\">\n" +
" <entry file=\"file://$PROJECT_DIR$/src/1.txt\">\n" +
" <provider selected=\"true\" editor-type-id=\"text-editor\">\n" +
" <state line=\"0\" column=\"0\" selection-start=\"0\" selection-end=\"0\" vertical-scroll-proportion=\"0.0\">\n" +
" </state>\n" +
" </provider>\n" +
" </entry>\n" +
" </file>\n" +
" <file leaf-file-name=\"foo.xml\" pinned=\"true\" current=\"false\" current-in-tab=\"false\">\n" +
" <entry file=\"file://$PROJECT_DIR$/src/foo.xml\">\n" +
" <provider selected=\"true\" editor-type-id=\"text-editor\">\n" +
" <state line=\"0\" column=\"0\" selection-start=\"0\" selection-end=\"0\" vertical-scroll-proportion=\"0.0\">\n" +
" </state>\n" +
" </provider>\n" +
" </entry>\n" +
" </file>\n" +
" <file leaf-file-name=\"2.txt\" pinned=\"false\" current=\"true\" current-in-tab=\"true\">\n" +
" <entry file=\"file://$PROJECT_DIR$/src/2.txt\">\n" +
" <provider selected=\"true\" editor-type-id=\"text-editor\">\n" +
" <state line=\"0\" column=\"0\" selection-start=\"0\" selection-end=\"0\" vertical-scroll-proportion=\"0.0\">\n" +
" </state>\n" +
" </provider>\n" +
" </entry>\n" +
" </file>\n" +
" <file leaf-file-name=\"3.txt\" pinned=\"false\" current=\"false\" current-in-tab=\"false\">\n" +
" <entry file=\"file://$PROJECT_DIR$/src/3.txt\">\n" +
" <provider selected=\"true\" editor-type-id=\"text-editor\">\n" +
" <state line=\"0\" column=\"0\" selection-start=\"0\" selection-end=\"0\" vertical-scroll-proportion=\"0.0\">\n" +
" </state>\n" +
" </provider>\n" +
" </entry>\n" +
" </file>\n" +
" </leaf>\n" +
" </component>\n";
private void assertOpenFiles(String... fileNames) {
EditorWithProviderComposite[] files = myManager.getSplitters().getEditorsComposites();
List<String> names = ContainerUtil.map(files, new Function<EditorWithProviderComposite, String>() {
@Override
public String fun(EditorWithProviderComposite composite) {
return composite.getFile().getName();
}
});
assertEquals(Arrays.asList(fileNames), names);
}
@Override
protected String getTestDataPath() {
return PlatformTestUtil.getCommunityPath().replace(File.separatorChar, '/') + "/platform/platform-tests/testData/fileEditorManager";
}
static class MyFileEditorProvider implements FileEditorProvider {
@NotNull
@Override
public String getEditorTypeId() {
return "mock";
}
@Override
public boolean accept(@NotNull Project project, @NotNull VirtualFile file) {
return true;
}
@NotNull
@Override
public FileEditor createEditor(@NotNull Project project, @NotNull VirtualFile file) {
return new Mock.MyFileEditor() {
@Override
public boolean isValid() {
return true;
}
@NotNull
@Override
public JComponent getComponent() {
return new JLabel();
}
@NotNull
@Override
public String getName() {
return "mockEditor";
}
};
}
@Override
public void disposeEditor(@NotNull FileEditor editor) {
}
@NotNull
@Override
public FileEditorPolicy getPolicy() {
return FileEditorPolicy.PLACE_AFTER_DEFAULT_EDITOR;
}
}
private static class DumbAwareProvider extends MyFileEditorProvider implements DumbAware {
@NotNull
@Override
public String getEditorTypeId() {
return "dumbAware";
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.config.impl;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.security.AccessController;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.activemq.artemis.api.config.ActiveMQDefaultConfiguration;
import org.apache.activemq.artemis.api.core.BroadcastGroupConfiguration;
import org.apache.activemq.artemis.api.core.DiscoveryGroupConfiguration;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.TransportConfiguration;
import org.apache.activemq.artemis.core.config.BridgeConfiguration;
import org.apache.activemq.artemis.core.config.ClusterConnectionConfiguration;
import org.apache.activemq.artemis.core.config.Configuration;
import org.apache.activemq.artemis.core.config.ConnectorServiceConfiguration;
import org.apache.activemq.artemis.core.config.CoreQueueConfiguration;
import org.apache.activemq.artemis.core.config.DivertConfiguration;
import org.apache.activemq.artemis.core.config.HAPolicyConfiguration;
import org.apache.activemq.artemis.core.config.ha.ReplicaPolicyConfiguration;
import org.apache.activemq.artemis.core.config.ha.ReplicatedPolicyConfiguration;
import org.apache.activemq.artemis.core.security.Role;
import org.apache.activemq.artemis.core.server.JournalType;
import org.apache.activemq.artemis.core.server.group.impl.GroupingHandlerConfiguration;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.core.settings.impl.ResourceLimitSettings;
import org.apache.activemq.artemis.uri.AcceptorTransportConfigurationParser;
import org.apache.activemq.artemis.uri.ConnectorTransportConfigurationParser;
import org.apache.activemq.artemis.utils.ObjectInputStreamWithClassLoader;
public class ConfigurationImpl implements Configuration, Serializable {
// Constants ------------------------------------------------------------------------------
public static final JournalType DEFAULT_JOURNAL_TYPE = JournalType.ASYNCIO;
private static final long serialVersionUID = 4077088945050267843L;
// Attributes -----------------------------------------------------------------------------
private String name = "ConfigurationImpl::" + System.identityHashCode(this);
private boolean persistenceEnabled = ActiveMQDefaultConfiguration.isDefaultPersistenceEnabled();
protected long fileDeploymentScanPeriod = ActiveMQDefaultConfiguration.getDefaultFileDeployerScanPeriod();
private boolean persistDeliveryCountBeforeDelivery = ActiveMQDefaultConfiguration.isDefaultPersistDeliveryCountBeforeDelivery();
private int scheduledThreadPoolMaxSize = ActiveMQDefaultConfiguration.getDefaultScheduledThreadPoolMaxSize();
private int threadPoolMaxSize = ActiveMQDefaultConfiguration.getDefaultThreadPoolMaxSize();
private long securityInvalidationInterval = ActiveMQDefaultConfiguration.getDefaultSecurityInvalidationInterval();
private boolean securityEnabled = ActiveMQDefaultConfiguration.isDefaultSecurityEnabled();
private boolean gracefulShutdownEnabled = ActiveMQDefaultConfiguration.isDefaultGracefulShutdownEnabled();
private long gracefulShutdownTimeout = ActiveMQDefaultConfiguration.getDefaultGracefulShutdownTimeout();
protected boolean jmxManagementEnabled = ActiveMQDefaultConfiguration.isDefaultJmxManagementEnabled();
protected String jmxDomain = ActiveMQDefaultConfiguration.getDefaultJmxDomain();
protected long connectionTTLOverride = ActiveMQDefaultConfiguration.getDefaultConnectionTtlOverride();
protected boolean asyncConnectionExecutionEnabled = ActiveMQDefaultConfiguration.isDefaultAsyncConnectionExecutionEnabled();
private long messageExpiryScanPeriod = ActiveMQDefaultConfiguration.getDefaultMessageExpiryScanPeriod();
private int messageExpiryThreadPriority = ActiveMQDefaultConfiguration.getDefaultMessageExpiryThreadPriority();
protected int idCacheSize = ActiveMQDefaultConfiguration.getDefaultIdCacheSize();
private boolean persistIDCache = ActiveMQDefaultConfiguration.isDefaultPersistIdCache();
private List<String> incomingInterceptorClassNames = new ArrayList<String>();
private List<String> outgoingInterceptorClassNames = new ArrayList<String>();
protected Map<String, TransportConfiguration> connectorConfigs = new HashMap<String, TransportConfiguration>();
private Set<TransportConfiguration> acceptorConfigs = new HashSet<TransportConfiguration>();
protected List<BridgeConfiguration> bridgeConfigurations = new ArrayList<BridgeConfiguration>();
protected List<DivertConfiguration> divertConfigurations = new ArrayList<DivertConfiguration>();
protected List<ClusterConnectionConfiguration> clusterConfigurations = new ArrayList<ClusterConnectionConfiguration>();
private List<CoreQueueConfiguration> queueConfigurations = new ArrayList<CoreQueueConfiguration>();
protected transient List<BroadcastGroupConfiguration> broadcastGroupConfigurations = new ArrayList<BroadcastGroupConfiguration>();
protected transient Map<String, DiscoveryGroupConfiguration> discoveryGroupConfigurations = new LinkedHashMap<String, DiscoveryGroupConfiguration>();
// Paging related attributes ------------------------------------------------------------
private String pagingDirectory = ActiveMQDefaultConfiguration.getDefaultPagingDir();
// File related attributes -----------------------------------------------------------
private int maxConcurrentPageIO = ActiveMQDefaultConfiguration.getDefaultMaxConcurrentPageIo();
protected String largeMessagesDirectory = ActiveMQDefaultConfiguration.getDefaultLargeMessagesDir();
protected String bindingsDirectory = ActiveMQDefaultConfiguration.getDefaultBindingsDirectory();
protected boolean createBindingsDir = ActiveMQDefaultConfiguration.isDefaultCreateBindingsDir();
protected String journalDirectory = ActiveMQDefaultConfiguration.getDefaultJournalDir();
protected boolean createJournalDir = ActiveMQDefaultConfiguration.isDefaultCreateJournalDir();
public JournalType journalType = ConfigurationImpl.DEFAULT_JOURNAL_TYPE;
protected boolean journalSyncTransactional = ActiveMQDefaultConfiguration.isDefaultJournalSyncTransactional();
protected boolean journalSyncNonTransactional = ActiveMQDefaultConfiguration.isDefaultJournalSyncNonTransactional();
protected int journalCompactMinFiles = ActiveMQDefaultConfiguration.getDefaultJournalCompactMinFiles();
protected int journalCompactPercentage = ActiveMQDefaultConfiguration.getDefaultJournalCompactPercentage();
protected int journalFileSize = ActiveMQDefaultConfiguration.getDefaultJournalFileSize();
protected int journalMinFiles = ActiveMQDefaultConfiguration.getDefaultJournalMinFiles();
// AIO and NIO need different values for these attributes
protected int journalMaxIO_AIO = ActiveMQDefaultConfiguration.getDefaultJournalMaxIoAio();
protected int journalBufferTimeout_AIO = ActiveMQDefaultConfiguration.getDefaultJournalBufferTimeoutAio();
protected int journalBufferSize_AIO = ActiveMQDefaultConfiguration.getDefaultJournalBufferSizeAio();
protected int journalMaxIO_NIO = ActiveMQDefaultConfiguration.getDefaultJournalMaxIoNio();
protected int journalBufferTimeout_NIO = ActiveMQDefaultConfiguration.getDefaultJournalBufferTimeoutNio();
protected int journalBufferSize_NIO = ActiveMQDefaultConfiguration.getDefaultJournalBufferSizeNio();
protected boolean logJournalWriteRate = ActiveMQDefaultConfiguration.isDefaultJournalLogWriteRate();
protected int journalPerfBlastPages = ActiveMQDefaultConfiguration.getDefaultJournalPerfBlastPages();
protected boolean runSyncSpeedTest = ActiveMQDefaultConfiguration.isDefaultRunSyncSpeedTest();
private boolean wildcardRoutingEnabled = ActiveMQDefaultConfiguration.isDefaultWildcardRoutingEnabled();
private boolean messageCounterEnabled = ActiveMQDefaultConfiguration.isDefaultMessageCounterEnabled();
private long messageCounterSamplePeriod = ActiveMQDefaultConfiguration.getDefaultMessageCounterSamplePeriod();
private int messageCounterMaxDayHistory = ActiveMQDefaultConfiguration.getDefaultMessageCounterMaxDayHistory();
private long transactionTimeout = ActiveMQDefaultConfiguration.getDefaultTransactionTimeout();
private long transactionTimeoutScanPeriod = ActiveMQDefaultConfiguration.getDefaultTransactionTimeoutScanPeriod();
private SimpleString managementAddress = ActiveMQDefaultConfiguration.getDefaultManagementAddress();
private SimpleString managementNotificationAddress = ActiveMQDefaultConfiguration.getDefaultManagementNotificationAddress();
protected String clusterUser = ActiveMQDefaultConfiguration.getDefaultClusterUser();
protected String clusterPassword = ActiveMQDefaultConfiguration.getDefaultClusterPassword();
private long serverDumpInterval = ActiveMQDefaultConfiguration.getDefaultServerDumpInterval();
protected boolean failoverOnServerShutdown = ActiveMQDefaultConfiguration.isDefaultFailoverOnServerShutdown();
// percentage of free memory which triggers warning from the memory manager
private int memoryWarningThreshold = ActiveMQDefaultConfiguration.getDefaultMemoryWarningThreshold();
private long memoryMeasureInterval = ActiveMQDefaultConfiguration.getDefaultMemoryMeasureInterval();
protected GroupingHandlerConfiguration groupingHandlerConfiguration;
private Map<String, AddressSettings> addressesSettings = new HashMap<String, AddressSettings>();
private Map<String, ResourceLimitSettings> resourceLimitSettings = new HashMap<String, ResourceLimitSettings>();
private Map<String, Set<Role>> securitySettings = new HashMap<String, Set<Role>>();
protected List<ConnectorServiceConfiguration> connectorServiceConfigurations = new ArrayList<ConnectorServiceConfiguration>();
private boolean maskPassword = ActiveMQDefaultConfiguration.isDefaultMaskPassword();
private transient String passwordCodec;
private boolean resolveProtocols = ActiveMQDefaultConfiguration.isDefaultResolveProtocols();
private long journalLockAcquisitionTimeout = ActiveMQDefaultConfiguration.getDefaultJournalLockAcquisitionTimeout();
private HAPolicyConfiguration haPolicyConfiguration;
/**
* Parent folder for all data folders.
*/
private File artemisInstance;
// Public -------------------------------------------------------------------------
public boolean isClustered() {
return !getClusterConfigurations().isEmpty();
}
public boolean isPersistenceEnabled() {
return persistenceEnabled;
}
public ConfigurationImpl setPersistenceEnabled(final boolean enable) {
persistenceEnabled = enable;
return this;
}
public long getFileDeployerScanPeriod() {
return fileDeploymentScanPeriod;
}
public ConfigurationImpl setFileDeployerScanPeriod(final long period) {
fileDeploymentScanPeriod = period;
return this;
}
/**
* @return the persistDeliveryCountBeforeDelivery
*/
public boolean isPersistDeliveryCountBeforeDelivery() {
return persistDeliveryCountBeforeDelivery;
}
public ConfigurationImpl setPersistDeliveryCountBeforeDelivery(final boolean persistDeliveryCountBeforeDelivery) {
this.persistDeliveryCountBeforeDelivery = persistDeliveryCountBeforeDelivery;
return this;
}
public int getScheduledThreadPoolMaxSize() {
return scheduledThreadPoolMaxSize;
}
public ConfigurationImpl setScheduledThreadPoolMaxSize(final int maxSize) {
scheduledThreadPoolMaxSize = maxSize;
return this;
}
public int getThreadPoolMaxSize() {
return threadPoolMaxSize;
}
public ConfigurationImpl setThreadPoolMaxSize(final int maxSize) {
threadPoolMaxSize = maxSize;
return this;
}
public long getSecurityInvalidationInterval() {
return securityInvalidationInterval;
}
public ConfigurationImpl setSecurityInvalidationInterval(final long interval) {
securityInvalidationInterval = interval;
return this;
}
public long getConnectionTTLOverride() {
return connectionTTLOverride;
}
public ConfigurationImpl setConnectionTTLOverride(final long ttl) {
connectionTTLOverride = ttl;
return this;
}
public boolean isAsyncConnectionExecutionEnabled() {
return asyncConnectionExecutionEnabled;
}
public ConfigurationImpl setEnabledAsyncConnectionExecution(final boolean enabled) {
asyncConnectionExecutionEnabled = enabled;
return this;
}
public List<String> getIncomingInterceptorClassNames() {
return incomingInterceptorClassNames;
}
public ConfigurationImpl setIncomingInterceptorClassNames(final List<String> interceptors) {
incomingInterceptorClassNames = interceptors;
return this;
}
public List<String> getOutgoingInterceptorClassNames() {
return outgoingInterceptorClassNames;
}
public ConfigurationImpl setOutgoingInterceptorClassNames(final List<String> interceptors) {
outgoingInterceptorClassNames = interceptors;
return this;
}
public Set<TransportConfiguration> getAcceptorConfigurations() {
return acceptorConfigs;
}
public ConfigurationImpl setAcceptorConfigurations(final Set<TransportConfiguration> infos) {
acceptorConfigs = infos;
return this;
}
public ConfigurationImpl addAcceptorConfiguration(final TransportConfiguration infos) {
acceptorConfigs.add(infos);
return this;
}
public ConfigurationImpl addAcceptorConfiguration(final String name, final String uri) throws Exception {
AcceptorTransportConfigurationParser parser = new AcceptorTransportConfigurationParser();
List<TransportConfiguration> configurations = parser.newObject(parser.expandURI(uri), name);
for (TransportConfiguration config : configurations) {
addAcceptorConfiguration(config);
}
return this;
}
public ConfigurationImpl clearAcceptorConfigurations() {
acceptorConfigs.clear();
return this;
}
public Map<String, TransportConfiguration> getConnectorConfigurations() {
return connectorConfigs;
}
public ConfigurationImpl setConnectorConfigurations(final Map<String, TransportConfiguration> infos) {
connectorConfigs = infos;
return this;
}
public ConfigurationImpl addConnectorConfiguration(final String key, final TransportConfiguration info) {
connectorConfigs.put(key, info);
return this;
}
public ConfigurationImpl addConnectorConfiguration(final String name, final String uri) throws Exception {
ConnectorTransportConfigurationParser parser = new ConnectorTransportConfigurationParser();
List<TransportConfiguration> configurations = parser.newObject(parser.expandURI(uri), name);
for (TransportConfiguration config : configurations) {
addConnectorConfiguration(name, config);
}
return this;
}
public ConfigurationImpl clearConnectorConfigurations() {
connectorConfigs.clear();
return this;
}
public GroupingHandlerConfiguration getGroupingHandlerConfiguration() {
return groupingHandlerConfiguration;
}
public ConfigurationImpl setGroupingHandlerConfiguration(final GroupingHandlerConfiguration groupingHandlerConfiguration) {
this.groupingHandlerConfiguration = groupingHandlerConfiguration;
return this;
}
public List<BridgeConfiguration> getBridgeConfigurations() {
return bridgeConfigurations;
}
public ConfigurationImpl setBridgeConfigurations(final List<BridgeConfiguration> configs) {
bridgeConfigurations = configs;
return this;
}
public ConfigurationImpl addBridgeConfiguration(final BridgeConfiguration config) {
bridgeConfigurations.add(config);
return this;
}
public List<BroadcastGroupConfiguration> getBroadcastGroupConfigurations() {
return broadcastGroupConfigurations;
}
public ConfigurationImpl setBroadcastGroupConfigurations(final List<BroadcastGroupConfiguration> configs) {
broadcastGroupConfigurations = configs;
return this;
}
public ConfigurationImpl addBroadcastGroupConfiguration(final BroadcastGroupConfiguration config) {
broadcastGroupConfigurations.add(config);
return this;
}
public List<ClusterConnectionConfiguration> getClusterConfigurations() {
return clusterConfigurations;
}
public ConfigurationImpl setClusterConfigurations(final List<ClusterConnectionConfiguration> configs) {
clusterConfigurations = configs;
return this;
}
public ConfigurationImpl addClusterConfiguration(final ClusterConnectionConfiguration config) {
clusterConfigurations.add(config);
return this;
}
public ConfigurationImpl clearClusterConfigurations() {
clusterConfigurations.clear();
return this;
}
public List<DivertConfiguration> getDivertConfigurations() {
return divertConfigurations;
}
public ConfigurationImpl setDivertConfigurations(final List<DivertConfiguration> configs) {
divertConfigurations = configs;
return this;
}
public ConfigurationImpl addDivertConfiguration(final DivertConfiguration config) {
divertConfigurations.add(config);
return this;
}
public List<CoreQueueConfiguration> getQueueConfigurations() {
return queueConfigurations;
}
public ConfigurationImpl setQueueConfigurations(final List<CoreQueueConfiguration> configs) {
queueConfigurations = configs;
return this;
}
public ConfigurationImpl addQueueConfiguration(final CoreQueueConfiguration config) {
queueConfigurations.add(config);
return this;
}
public Map<String, DiscoveryGroupConfiguration> getDiscoveryGroupConfigurations() {
return discoveryGroupConfigurations;
}
public ConfigurationImpl setDiscoveryGroupConfigurations(final Map<String, DiscoveryGroupConfiguration> discoveryGroupConfigurations) {
this.discoveryGroupConfigurations = discoveryGroupConfigurations;
return this;
}
public ConfigurationImpl addDiscoveryGroupConfiguration(final String key,
DiscoveryGroupConfiguration discoveryGroupConfiguration) {
this.discoveryGroupConfigurations.put(key, discoveryGroupConfiguration);
return this;
}
public int getIDCacheSize() {
return idCacheSize;
}
public ConfigurationImpl setIDCacheSize(final int idCacheSize) {
this.idCacheSize = idCacheSize;
return this;
}
public boolean isPersistIDCache() {
return persistIDCache;
}
public ConfigurationImpl setPersistIDCache(final boolean persist) {
persistIDCache = persist;
return this;
}
public File getBindingsLocation() {
return subFolder(getBindingsDirectory());
}
public String getBindingsDirectory() {
return bindingsDirectory;
}
public ConfigurationImpl setBindingsDirectory(final String dir) {
bindingsDirectory = dir;
return this;
}
@Override
public int getPageMaxConcurrentIO() {
return maxConcurrentPageIO;
}
@Override
public ConfigurationImpl setPageMaxConcurrentIO(int maxIO) {
this.maxConcurrentPageIO = maxIO;
return this;
}
public File getJournalLocation() {
return subFolder(getJournalDirectory());
}
public String getJournalDirectory() {
return journalDirectory;
}
public ConfigurationImpl setJournalDirectory(final String dir) {
journalDirectory = dir;
return this;
}
public JournalType getJournalType() {
return journalType;
}
public ConfigurationImpl setPagingDirectory(final String dir) {
pagingDirectory = dir;
return this;
}
public File getPagingLocation() {
return subFolder(getPagingDirectory());
}
public String getPagingDirectory() {
return pagingDirectory;
}
public ConfigurationImpl setJournalType(final JournalType type) {
journalType = type;
return this;
}
public boolean isJournalSyncTransactional() {
return journalSyncTransactional;
}
public ConfigurationImpl setJournalSyncTransactional(final boolean sync) {
journalSyncTransactional = sync;
return this;
}
public boolean isJournalSyncNonTransactional() {
return journalSyncNonTransactional;
}
public ConfigurationImpl setJournalSyncNonTransactional(final boolean sync) {
journalSyncNonTransactional = sync;
return this;
}
public int getJournalFileSize() {
return journalFileSize;
}
public ConfigurationImpl setJournalFileSize(final int size) {
journalFileSize = size;
return this;
}
public int getJournalMinFiles() {
return journalMinFiles;
}
public ConfigurationImpl setJournalMinFiles(final int files) {
journalMinFiles = files;
return this;
}
public boolean isLogJournalWriteRate() {
return logJournalWriteRate;
}
public ConfigurationImpl setLogJournalWriteRate(final boolean logJournalWriteRate) {
this.logJournalWriteRate = logJournalWriteRate;
return this;
}
public int getJournalPerfBlastPages() {
return journalPerfBlastPages;
}
public ConfigurationImpl setJournalPerfBlastPages(final int journalPerfBlastPages) {
this.journalPerfBlastPages = journalPerfBlastPages;
return this;
}
public boolean isRunSyncSpeedTest() {
return runSyncSpeedTest;
}
public ConfigurationImpl setRunSyncSpeedTest(final boolean run) {
runSyncSpeedTest = run;
return this;
}
public boolean isCreateBindingsDir() {
return createBindingsDir;
}
public ConfigurationImpl setCreateBindingsDir(final boolean create) {
createBindingsDir = create;
return this;
}
public boolean isCreateJournalDir() {
return createJournalDir;
}
public ConfigurationImpl setCreateJournalDir(final boolean create) {
createJournalDir = create;
return this;
}
public boolean isWildcardRoutingEnabled() {
return wildcardRoutingEnabled;
}
public ConfigurationImpl setWildcardRoutingEnabled(final boolean enabled) {
wildcardRoutingEnabled = enabled;
return this;
}
public long getTransactionTimeout() {
return transactionTimeout;
}
public ConfigurationImpl setTransactionTimeout(final long timeout) {
transactionTimeout = timeout;
return this;
}
public long getTransactionTimeoutScanPeriod() {
return transactionTimeoutScanPeriod;
}
public ConfigurationImpl setTransactionTimeoutScanPeriod(final long period) {
transactionTimeoutScanPeriod = period;
return this;
}
public long getMessageExpiryScanPeriod() {
return messageExpiryScanPeriod;
}
public ConfigurationImpl setMessageExpiryScanPeriod(final long messageExpiryScanPeriod) {
this.messageExpiryScanPeriod = messageExpiryScanPeriod;
return this;
}
public int getMessageExpiryThreadPriority() {
return messageExpiryThreadPriority;
}
public ConfigurationImpl setMessageExpiryThreadPriority(final int messageExpiryThreadPriority) {
this.messageExpiryThreadPriority = messageExpiryThreadPriority;
return this;
}
public boolean isSecurityEnabled() {
return securityEnabled;
}
public ConfigurationImpl setSecurityEnabled(final boolean enabled) {
securityEnabled = enabled;
return this;
}
public boolean isGracefulShutdownEnabled() {
return gracefulShutdownEnabled;
}
public ConfigurationImpl setGracefulShutdownEnabled(final boolean enabled) {
gracefulShutdownEnabled = enabled;
return this;
}
public long getGracefulShutdownTimeout() {
return gracefulShutdownTimeout;
}
public ConfigurationImpl setGracefulShutdownTimeout(final long timeout) {
gracefulShutdownTimeout = timeout;
return this;
}
public boolean isJMXManagementEnabled() {
return jmxManagementEnabled;
}
public ConfigurationImpl setJMXManagementEnabled(final boolean enabled) {
jmxManagementEnabled = enabled;
return this;
}
public String getJMXDomain() {
return jmxDomain;
}
public ConfigurationImpl setJMXDomain(final String domain) {
jmxDomain = domain;
return this;
}
public String getLargeMessagesDirectory() {
return largeMessagesDirectory;
}
public File getLargeMessagesLocation() {
return subFolder(getLargeMessagesDirectory());
}
public ConfigurationImpl setLargeMessagesDirectory(final String directory) {
largeMessagesDirectory = directory;
return this;
}
public boolean isMessageCounterEnabled() {
return messageCounterEnabled;
}
public ConfigurationImpl setMessageCounterEnabled(final boolean enabled) {
messageCounterEnabled = enabled;
return this;
}
public long getMessageCounterSamplePeriod() {
return messageCounterSamplePeriod;
}
public ConfigurationImpl setMessageCounterSamplePeriod(final long period) {
messageCounterSamplePeriod = period;
return this;
}
public int getMessageCounterMaxDayHistory() {
return messageCounterMaxDayHistory;
}
public ConfigurationImpl setMessageCounterMaxDayHistory(final int maxDayHistory) {
messageCounterMaxDayHistory = maxDayHistory;
return this;
}
public SimpleString getManagementAddress() {
return managementAddress;
}
public ConfigurationImpl setManagementAddress(final SimpleString address) {
managementAddress = address;
return this;
}
public SimpleString getManagementNotificationAddress() {
return managementNotificationAddress;
}
public ConfigurationImpl setManagementNotificationAddress(final SimpleString address) {
managementNotificationAddress = address;
return this;
}
public String getClusterUser() {
return clusterUser;
}
public ConfigurationImpl setClusterUser(final String user) {
clusterUser = user;
return this;
}
public String getClusterPassword() {
return clusterPassword;
}
public boolean isFailoverOnServerShutdown() {
return failoverOnServerShutdown;
}
public ConfigurationImpl setFailoverOnServerShutdown(boolean failoverOnServerShutdown) {
this.failoverOnServerShutdown = failoverOnServerShutdown;
return this;
}
public ConfigurationImpl setClusterPassword(final String theclusterPassword) {
clusterPassword = theclusterPassword;
return this;
}
public int getJournalCompactMinFiles() {
return journalCompactMinFiles;
}
public int getJournalCompactPercentage() {
return journalCompactPercentage;
}
public ConfigurationImpl setJournalCompactMinFiles(final int minFiles) {
journalCompactMinFiles = minFiles;
return this;
}
public ConfigurationImpl setJournalCompactPercentage(final int percentage) {
journalCompactPercentage = percentage;
return this;
}
public long getServerDumpInterval() {
return serverDumpInterval;
}
public ConfigurationImpl setServerDumpInterval(final long intervalInMilliseconds) {
serverDumpInterval = intervalInMilliseconds;
return this;
}
public int getMemoryWarningThreshold() {
return memoryWarningThreshold;
}
public ConfigurationImpl setMemoryWarningThreshold(final int memoryWarningThreshold) {
this.memoryWarningThreshold = memoryWarningThreshold;
return this;
}
public long getMemoryMeasureInterval() {
return memoryMeasureInterval;
}
public ConfigurationImpl setMemoryMeasureInterval(final long memoryMeasureInterval) {
this.memoryMeasureInterval = memoryMeasureInterval;
return this;
}
public int getJournalMaxIO_AIO() {
return journalMaxIO_AIO;
}
public ConfigurationImpl setJournalMaxIO_AIO(final int journalMaxIO) {
journalMaxIO_AIO = journalMaxIO;
return this;
}
public int getJournalBufferTimeout_AIO() {
return journalBufferTimeout_AIO;
}
public ConfigurationImpl setJournalBufferTimeout_AIO(final int journalBufferTimeout) {
journalBufferTimeout_AIO = journalBufferTimeout;
return this;
}
public int getJournalBufferSize_AIO() {
return journalBufferSize_AIO;
}
public ConfigurationImpl setJournalBufferSize_AIO(final int journalBufferSize) {
journalBufferSize_AIO = journalBufferSize;
return this;
}
public int getJournalMaxIO_NIO() {
return journalMaxIO_NIO;
}
public ConfigurationImpl setJournalMaxIO_NIO(final int journalMaxIO) {
journalMaxIO_NIO = journalMaxIO;
return this;
}
public int getJournalBufferTimeout_NIO() {
return journalBufferTimeout_NIO;
}
public ConfigurationImpl setJournalBufferTimeout_NIO(final int journalBufferTimeout) {
journalBufferTimeout_NIO = journalBufferTimeout;
return this;
}
public int getJournalBufferSize_NIO() {
return journalBufferSize_NIO;
}
public ConfigurationImpl setJournalBufferSize_NIO(final int journalBufferSize) {
journalBufferSize_NIO = journalBufferSize;
return this;
}
@Override
public Map<String, AddressSettings> getAddressesSettings() {
return addressesSettings;
}
@Override
public ConfigurationImpl setAddressesSettings(final Map<String, AddressSettings> addressesSettings) {
this.addressesSettings = addressesSettings;
return this;
}
@Override
public ConfigurationImpl addAddressesSetting(String key, AddressSettings addressesSetting) {
this.addressesSettings.put(key, addressesSetting);
return this;
}
@Override
public ConfigurationImpl clearAddressesSettings() {
this.addressesSettings.clear();
return this;
}
@Override
public Map<String, ResourceLimitSettings> getResourceLimitSettings() {
return resourceLimitSettings;
}
@Override
public ConfigurationImpl setResourceLimitSettings(final Map<String, ResourceLimitSettings> resourceLimitSettings) {
this.resourceLimitSettings = resourceLimitSettings;
return this;
}
@Override
public ConfigurationImpl addResourceLimitSettings(ResourceLimitSettings resourceLimitSettings) {
this.resourceLimitSettings.put(resourceLimitSettings.getMatch().toString(), resourceLimitSettings);
return this;
}
@Override
public Map<String, Set<Role>> getSecurityRoles() {
return securitySettings;
}
@Override
public ConfigurationImpl setSecurityRoles(final Map<String, Set<Role>> securitySettings) {
this.securitySettings = securitySettings;
return this;
}
public List<ConnectorServiceConfiguration> getConnectorServiceConfigurations() {
return this.connectorServiceConfigurations;
}
public File getBrokerInstance() {
if (artemisInstance != null) {
return artemisInstance;
}
String strartemisInstance = System.getProperty("artemis.instance");
if (strartemisInstance == null) {
strartemisInstance = System.getProperty("user.dir");
}
artemisInstance = new File(strartemisInstance);
return artemisInstance;
}
public void setBrokerInstance(File directory) {
this.artemisInstance = directory;
}
public boolean isCheckForLiveServer() {
if (haPolicyConfiguration instanceof ReplicaPolicyConfiguration) {
return ((ReplicatedPolicyConfiguration) haPolicyConfiguration).isCheckForLiveServer();
}
else {
return false;
}
}
public ConfigurationImpl setCheckForLiveServer(boolean checkForLiveServer) {
if (haPolicyConfiguration instanceof ReplicaPolicyConfiguration) {
((ReplicatedPolicyConfiguration) haPolicyConfiguration).setCheckForLiveServer(checkForLiveServer);
}
return this;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("Broker Configuration (");
sb.append("clustered=").append(isClustered()).append(",");
sb.append("journalDirectory=").append(journalDirectory).append(",");
sb.append("bindingsDirectory=").append(bindingsDirectory).append(",");
sb.append("largeMessagesDirectory=").append(largeMessagesDirectory).append(",");
sb.append("pagingDirectory=").append(pagingDirectory);
sb.append(")");
return sb.toString();
}
public ConfigurationImpl setConnectorServiceConfigurations(final List<ConnectorServiceConfiguration> configs) {
this.connectorServiceConfigurations = configs;
return this;
}
public ConfigurationImpl addConnectorServiceConfiguration(final ConnectorServiceConfiguration config) {
this.connectorServiceConfigurations.add(config);
return this;
}
public boolean isMaskPassword() {
return maskPassword;
}
public ConfigurationImpl setMaskPassword(boolean maskPassword) {
this.maskPassword = maskPassword;
return this;
}
public ConfigurationImpl setPasswordCodec(String codec) {
passwordCodec = codec;
return this;
}
public String getPasswordCodec() {
return passwordCodec;
}
@Override
public String getName() {
return name;
}
@Override
public ConfigurationImpl setName(String name) {
this.name = name;
return this;
}
@Override
public ConfigurationImpl setResolveProtocols(boolean resolveProtocols) {
this.resolveProtocols = resolveProtocols;
return this;
}
@Override
public boolean isResolveProtocols() {
return resolveProtocols;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((acceptorConfigs == null) ? 0 : acceptorConfigs.hashCode());
result = prime * result + ((addressesSettings == null) ? 0 : addressesSettings.hashCode());
result = prime * result + (asyncConnectionExecutionEnabled ? 1231 : 1237);
result = prime * result + ((bindingsDirectory == null) ? 0 : bindingsDirectory.hashCode());
result = prime * result + ((bridgeConfigurations == null) ? 0 : bridgeConfigurations.hashCode());
result = prime * result + ((broadcastGroupConfigurations == null) ? 0 : broadcastGroupConfigurations.hashCode());
result = prime * result + ((clusterConfigurations == null) ? 0 : clusterConfigurations.hashCode());
result = prime * result + ((clusterPassword == null) ? 0 : clusterPassword.hashCode());
result = prime * result + ((clusterUser == null) ? 0 : clusterUser.hashCode());
result = prime * result + (int) (connectionTTLOverride ^ (connectionTTLOverride >>> 32));
result = prime * result + ((connectorConfigs == null) ? 0 : connectorConfigs.hashCode());
result = prime * result + ((connectorServiceConfigurations == null) ? 0 : connectorServiceConfigurations.hashCode());
result = prime * result + (createBindingsDir ? 1231 : 1237);
result = prime * result + (createJournalDir ? 1231 : 1237);
result = prime * result + ((discoveryGroupConfigurations == null) ? 0 : discoveryGroupConfigurations.hashCode());
result = prime * result + ((divertConfigurations == null) ? 0 : divertConfigurations.hashCode());
result = prime * result + (failoverOnServerShutdown ? 1231 : 1237);
result = prime * result + (int) (fileDeploymentScanPeriod ^ (fileDeploymentScanPeriod >>> 32));
result = prime * result + ((groupingHandlerConfiguration == null) ? 0 : groupingHandlerConfiguration.hashCode());
result = prime * result + idCacheSize;
result = prime * result + ((incomingInterceptorClassNames == null) ? 0 : incomingInterceptorClassNames.hashCode());
result = prime * result + ((jmxDomain == null) ? 0 : jmxDomain.hashCode());
result = prime * result + (jmxManagementEnabled ? 1231 : 1237);
result = prime * result + journalBufferSize_AIO;
result = prime * result + journalBufferSize_NIO;
result = prime * result + journalBufferTimeout_AIO;
result = prime * result + journalBufferTimeout_NIO;
result = prime * result + journalCompactMinFiles;
result = prime * result + journalCompactPercentage;
result = prime * result + ((journalDirectory == null) ? 0 : journalDirectory.hashCode());
result = prime * result + journalFileSize;
result = prime * result + journalMaxIO_AIO;
result = prime * result + journalMaxIO_NIO;
result = prime * result + journalMinFiles;
result = prime * result + journalPerfBlastPages;
result = prime * result + (journalSyncNonTransactional ? 1231 : 1237);
result = prime * result + (journalSyncTransactional ? 1231 : 1237);
result = prime * result + ((journalType == null) ? 0 : journalType.hashCode());
result = prime * result + ((largeMessagesDirectory == null) ? 0 : largeMessagesDirectory.hashCode());
result = prime * result + (logJournalWriteRate ? 1231 : 1237);
result = prime * result + ((managementAddress == null) ? 0 : managementAddress.hashCode());
result = prime * result + ((managementNotificationAddress == null) ? 0 : managementNotificationAddress.hashCode());
result = prime * result + (maskPassword ? 1231 : 1237);
result = prime * result + maxConcurrentPageIO;
result = prime * result + (int) (memoryMeasureInterval ^ (memoryMeasureInterval >>> 32));
result = prime * result + memoryWarningThreshold;
result = prime * result + (messageCounterEnabled ? 1231 : 1237);
result = prime * result + messageCounterMaxDayHistory;
result = prime * result + (int) (messageCounterSamplePeriod ^ (messageCounterSamplePeriod >>> 32));
result = prime * result + (int) (messageExpiryScanPeriod ^ (messageExpiryScanPeriod >>> 32));
result = prime * result + messageExpiryThreadPriority;
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((outgoingInterceptorClassNames == null) ? 0 : outgoingInterceptorClassNames.hashCode());
result = prime * result + ((pagingDirectory == null) ? 0 : pagingDirectory.hashCode());
result = prime * result + (persistDeliveryCountBeforeDelivery ? 1231 : 1237);
result = prime * result + (persistIDCache ? 1231 : 1237);
result = prime * result + (persistenceEnabled ? 1231 : 1237);
result = prime * result + ((queueConfigurations == null) ? 0 : queueConfigurations.hashCode());
result = prime * result + (runSyncSpeedTest ? 1231 : 1237);
result = prime * result + scheduledThreadPoolMaxSize;
result = prime * result + (securityEnabled ? 1231 : 1237);
result = prime * result + (int) (securityInvalidationInterval ^ (securityInvalidationInterval >>> 32));
result = prime * result + ((securitySettings == null) ? 0 : securitySettings.hashCode());
result = prime * result + (int) (serverDumpInterval ^ (serverDumpInterval >>> 32));
result = prime * result + threadPoolMaxSize;
result = prime * result + (int) (transactionTimeout ^ (transactionTimeout >>> 32));
result = prime * result + (int) (transactionTimeoutScanPeriod ^ (transactionTimeoutScanPeriod >>> 32));
result = prime * result + (wildcardRoutingEnabled ? 1231 : 1237);
result = prime * result + (resolveProtocols ? 1231 : 1237);
result = prime * result + (int) (journalLockAcquisitionTimeout ^ (journalLockAcquisitionTimeout >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (!(obj instanceof ConfigurationImpl))
return false;
ConfigurationImpl other = (ConfigurationImpl) obj;
if (acceptorConfigs == null) {
if (other.acceptorConfigs != null)
return false;
}
else if (!acceptorConfigs.equals(other.acceptorConfigs))
return false;
if (addressesSettings == null) {
if (other.addressesSettings != null)
return false;
}
else if (!addressesSettings.equals(other.addressesSettings))
return false;
if (asyncConnectionExecutionEnabled != other.asyncConnectionExecutionEnabled)
return false;
if (bindingsDirectory == null) {
if (other.bindingsDirectory != null)
return false;
}
else if (!bindingsDirectory.equals(other.bindingsDirectory))
return false;
if (bridgeConfigurations == null) {
if (other.bridgeConfigurations != null)
return false;
}
else if (!bridgeConfigurations.equals(other.bridgeConfigurations))
return false;
if (broadcastGroupConfigurations == null) {
if (other.broadcastGroupConfigurations != null)
return false;
}
else if (!broadcastGroupConfigurations.equals(other.broadcastGroupConfigurations))
return false;
if (clusterConfigurations == null) {
if (other.clusterConfigurations != null)
return false;
}
else if (!clusterConfigurations.equals(other.clusterConfigurations))
return false;
if (clusterPassword == null) {
if (other.clusterPassword != null)
return false;
}
else if (!clusterPassword.equals(other.clusterPassword))
return false;
if (clusterUser == null) {
if (other.clusterUser != null)
return false;
}
else if (!clusterUser.equals(other.clusterUser))
return false;
if (connectionTTLOverride != other.connectionTTLOverride)
return false;
if (connectorConfigs == null) {
if (other.connectorConfigs != null)
return false;
}
else if (!connectorConfigs.equals(other.connectorConfigs))
return false;
if (connectorServiceConfigurations == null) {
if (other.connectorServiceConfigurations != null)
return false;
}
else if (!connectorServiceConfigurations.equals(other.connectorServiceConfigurations))
return false;
if (createBindingsDir != other.createBindingsDir)
return false;
if (createJournalDir != other.createJournalDir)
return false;
if (discoveryGroupConfigurations == null) {
if (other.discoveryGroupConfigurations != null)
return false;
}
else if (!discoveryGroupConfigurations.equals(other.discoveryGroupConfigurations))
return false;
if (divertConfigurations == null) {
if (other.divertConfigurations != null)
return false;
}
else if (!divertConfigurations.equals(other.divertConfigurations))
return false;
if (failoverOnServerShutdown != other.failoverOnServerShutdown)
return false;
if (fileDeploymentScanPeriod != other.fileDeploymentScanPeriod)
return false;
if (groupingHandlerConfiguration == null) {
if (other.groupingHandlerConfiguration != null)
return false;
}
else if (!groupingHandlerConfiguration.equals(other.groupingHandlerConfiguration))
return false;
if (idCacheSize != other.idCacheSize)
return false;
if (incomingInterceptorClassNames == null) {
if (other.incomingInterceptorClassNames != null)
return false;
}
else if (!incomingInterceptorClassNames.equals(other.incomingInterceptorClassNames))
return false;
if (jmxDomain == null) {
if (other.jmxDomain != null)
return false;
}
else if (!jmxDomain.equals(other.jmxDomain))
return false;
if (jmxManagementEnabled != other.jmxManagementEnabled)
return false;
if (journalBufferSize_AIO != other.journalBufferSize_AIO)
return false;
if (journalBufferSize_NIO != other.journalBufferSize_NIO)
return false;
if (journalBufferTimeout_AIO != other.journalBufferTimeout_AIO)
return false;
if (journalBufferTimeout_NIO != other.journalBufferTimeout_NIO)
return false;
if (journalCompactMinFiles != other.journalCompactMinFiles)
return false;
if (journalCompactPercentage != other.journalCompactPercentage)
return false;
if (journalDirectory == null) {
if (other.journalDirectory != null)
return false;
}
else if (!journalDirectory.equals(other.journalDirectory))
return false;
if (journalFileSize != other.journalFileSize)
return false;
if (journalMaxIO_AIO != other.journalMaxIO_AIO)
return false;
if (journalMaxIO_NIO != other.journalMaxIO_NIO)
return false;
if (journalMinFiles != other.journalMinFiles)
return false;
if (journalPerfBlastPages != other.journalPerfBlastPages)
return false;
if (journalSyncNonTransactional != other.journalSyncNonTransactional)
return false;
if (journalSyncTransactional != other.journalSyncTransactional)
return false;
if (journalType != other.journalType)
return false;
if (largeMessagesDirectory == null) {
if (other.largeMessagesDirectory != null)
return false;
}
else if (!largeMessagesDirectory.equals(other.largeMessagesDirectory))
return false;
if (logJournalWriteRate != other.logJournalWriteRate)
return false;
if (managementAddress == null) {
if (other.managementAddress != null)
return false;
}
else if (!managementAddress.equals(other.managementAddress))
return false;
if (managementNotificationAddress == null) {
if (other.managementNotificationAddress != null)
return false;
}
else if (!managementNotificationAddress.equals(other.managementNotificationAddress))
return false;
if (maskPassword != other.maskPassword)
return false;
if (maxConcurrentPageIO != other.maxConcurrentPageIO)
return false;
if (memoryMeasureInterval != other.memoryMeasureInterval)
return false;
if (memoryWarningThreshold != other.memoryWarningThreshold)
return false;
if (messageCounterEnabled != other.messageCounterEnabled)
return false;
if (messageCounterMaxDayHistory != other.messageCounterMaxDayHistory)
return false;
if (messageCounterSamplePeriod != other.messageCounterSamplePeriod)
return false;
if (messageExpiryScanPeriod != other.messageExpiryScanPeriod)
return false;
if (messageExpiryThreadPriority != other.messageExpiryThreadPriority)
return false;
if (name == null) {
if (other.name != null)
return false;
}
else if (!name.equals(other.name))
return false;
if (outgoingInterceptorClassNames == null) {
if (other.outgoingInterceptorClassNames != null)
return false;
}
else if (!outgoingInterceptorClassNames.equals(other.outgoingInterceptorClassNames))
return false;
if (pagingDirectory == null) {
if (other.pagingDirectory != null)
return false;
}
else if (!pagingDirectory.equals(other.pagingDirectory))
return false;
if (persistDeliveryCountBeforeDelivery != other.persistDeliveryCountBeforeDelivery)
return false;
if (persistIDCache != other.persistIDCache)
return false;
if (persistenceEnabled != other.persistenceEnabled)
return false;
if (queueConfigurations == null) {
if (other.queueConfigurations != null)
return false;
}
else if (!queueConfigurations.equals(other.queueConfigurations))
return false;
if (runSyncSpeedTest != other.runSyncSpeedTest)
return false;
if (scheduledThreadPoolMaxSize != other.scheduledThreadPoolMaxSize)
return false;
if (securityEnabled != other.securityEnabled)
return false;
if (securityInvalidationInterval != other.securityInvalidationInterval)
return false;
if (securitySettings == null) {
if (other.securitySettings != null)
return false;
}
else if (!securitySettings.equals(other.securitySettings))
return false;
if (serverDumpInterval != other.serverDumpInterval)
return false;
if (threadPoolMaxSize != other.threadPoolMaxSize)
return false;
if (transactionTimeout != other.transactionTimeout)
return false;
if (transactionTimeoutScanPeriod != other.transactionTimeoutScanPeriod)
return false;
if (wildcardRoutingEnabled != other.wildcardRoutingEnabled)
return false;
if (resolveProtocols != other.resolveProtocols)
return false;
if (journalLockAcquisitionTimeout != other.journalLockAcquisitionTimeout)
return false;
return true;
}
@Override
public Configuration copy() throws Exception {
return AccessController.doPrivileged(new PrivilegedExceptionAction<Configuration>() {
@Override
public Configuration run() throws Exception {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream os = new ObjectOutputStream(bos);
os.writeObject(ConfigurationImpl.this);
ObjectInputStream ois = new ObjectInputStreamWithClassLoader(new ByteArrayInputStream(bos.toByteArray()));
Configuration config = (Configuration) ois.readObject();
// this is transient because of possible jgroups integration, we need to copy it manually
config.setBroadcastGroupConfigurations(ConfigurationImpl.this.getBroadcastGroupConfigurations());
// this is transient because of possible jgroups integration, we need to copy it manually
config.setDiscoveryGroupConfigurations(ConfigurationImpl.this.getDiscoveryGroupConfigurations());
return config;
}
});
}
@Override
public ConfigurationImpl setJournalLockAcquisitionTimeout(long journalLockAcquisitionTimeout) {
this.journalLockAcquisitionTimeout = journalLockAcquisitionTimeout;
return this;
}
@Override
public long getJournalLockAcquisitionTimeout() {
return journalLockAcquisitionTimeout;
}
@Override
public HAPolicyConfiguration getHAPolicyConfiguration() {
return haPolicyConfiguration;
}
@Override
public ConfigurationImpl setHAPolicyConfiguration(HAPolicyConfiguration haPolicyConfiguration) {
this.haPolicyConfiguration = haPolicyConfiguration;
return this;
}
/**
* It will find the right location of a subFolder, related to artemisInstance
*/
private File subFolder(String subFolder) {
try {
return getBrokerInstance().toPath().resolve(subFolder).toFile();
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| |
package com.fasterxml.jackson.databind.ser.impl;
import java.io.IOException;
import java.lang.reflect.Type;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.*;
import com.fasterxml.jackson.databind.annotation.JacksonStdImpl;
import com.fasterxml.jackson.databind.introspect.AnnotatedMember;
import com.fasterxml.jackson.databind.jsonFormatVisitors.JsonFormatTypes;
import com.fasterxml.jackson.databind.jsonFormatVisitors.JsonFormatVisitorWrapper;
import com.fasterxml.jackson.databind.jsontype.TypeSerializer;
import com.fasterxml.jackson.databind.ser.ContainerSerializer;
import com.fasterxml.jackson.databind.ser.ContextualSerializer;
import com.fasterxml.jackson.databind.ser.std.ArraySerializerBase;
import com.fasterxml.jackson.databind.type.TypeFactory;
/**
* Standard serializer used for <code>String[]</code> values.
*/
@JacksonStdImpl
@SuppressWarnings("serial")
public class StringArraySerializer
extends ArraySerializerBase<String[]>
implements ContextualSerializer
{
/* Note: not clean in general, but we are betting against
* anyone re-defining properties of String.class here...
*/
@SuppressWarnings("deprecation")
private final static JavaType VALUE_TYPE = TypeFactory.defaultInstance().uncheckedSimpleType(String.class);
public final static StringArraySerializer instance = new StringArraySerializer();
/**
* Value serializer to use, if it's not the standard one
* (if it is we can optimize serialization a lot)
*/
protected final JsonSerializer<Object> _elementSerializer;
/*
/**********************************************************
/* Life-cycle
/**********************************************************
*/
protected StringArraySerializer() {
super(String[].class);
_elementSerializer = null;
}
@SuppressWarnings("unchecked")
public StringArraySerializer(StringArraySerializer src,
BeanProperty prop, JsonSerializer<?> ser, Boolean unwrapSingle) {
super(src, prop, unwrapSingle);
_elementSerializer = (JsonSerializer<Object>) ser;
}
@Override
public JsonSerializer<?> _withResolved(BeanProperty prop, Boolean unwrapSingle) {
return new StringArraySerializer(this, prop, _elementSerializer, unwrapSingle);
}
/**
* Strings never add type info; hence, even if type serializer is suggested,
* we'll ignore it...
*/
@Override
public ContainerSerializer<?> _withValueTypeSerializer(TypeSerializer vts) {
return this;
}
/*
/**********************************************************
/* Post-processing
/**********************************************************
*/
@Override
public JsonSerializer<?> createContextual(SerializerProvider provider,
BeanProperty property)
throws JsonMappingException
{
/* 29-Sep-2012, tatu: Actually, we need to do much more contextual
* checking here since we finally know for sure the property,
* and it may have overrides
*/
JsonSerializer<?> ser = null;
// First: if we have a property, may have property-annotation overrides
if (property != null) {
final AnnotationIntrospector ai = provider.getAnnotationIntrospector();
AnnotatedMember m = property.getMember();
if (m != null) {
Object serDef = ai.findContentSerializer(m);
if (serDef != null) {
ser = provider.serializerInstance(m, serDef);
}
}
}
// but since formats have both property overrides and global per-type defaults,
// need to do that separately
Boolean unwrapSingle = findFormatFeature(provider, property, String[].class,
JsonFormat.Feature.WRITE_SINGLE_ELEM_ARRAYS_UNWRAPPED);
if (ser == null) {
ser = _elementSerializer;
}
// May have a content converter
ser = findContextualConvertingSerializer(provider, property, ser);
if (ser == null) {
ser = provider.findContentValueSerializer(String.class, property);
}
// Optimization: default serializer just writes String, so we can avoid a call:
if (isDefaultSerializer(ser)) {
ser = null;
}
// note: will never have TypeSerializer, because Strings are "natural" type
if ((ser == _elementSerializer) && (Objects.equals(unwrapSingle, _unwrapSingle))) {
return this;
}
return new StringArraySerializer(this, property, ser, unwrapSingle);
}
/*
/**********************************************************
/* Simple accessors
/**********************************************************
*/
@Override
public JavaType getContentType() {
return VALUE_TYPE;
}
@Override
public JsonSerializer<?> getContentSerializer() {
return _elementSerializer;
}
@Override
public boolean isEmpty(SerializerProvider prov, String[] value) {
return (value.length == 0);
}
@Override
public boolean hasSingleElement(String[] value) {
return (value.length == 1);
}
/*
/**********************************************************
/* Actual serialization
/**********************************************************
*/
@Override
public final void serialize(String[] value, JsonGenerator gen, SerializerProvider provider)
throws IOException
{
final int len = value.length;
if (len == 1) {
if (((_unwrapSingle == null) &&
provider.isEnabled(SerializationFeature.WRITE_SINGLE_ELEM_ARRAYS_UNWRAPPED))
|| (_unwrapSingle == Boolean.TRUE)) {
serializeContents(value, gen, provider);
return;
}
}
gen.writeStartArray(value, len);
serializeContents(value, gen, provider);
gen.writeEndArray();
}
@Override
public void serializeContents(String[] value, JsonGenerator gen, SerializerProvider provider)
throws IOException
{
final int len = value.length;
if (len == 0) {
return;
}
if (_elementSerializer != null) {
serializeContentsSlow(value, gen, provider, _elementSerializer);
return;
}
for (int i = 0; i < len; ++i) {
String str = value[i];
if (str == null) {
gen.writeNull();
} else {
gen.writeString(value[i]);
}
}
}
private void serializeContentsSlow(String[] value, JsonGenerator gen, SerializerProvider provider, JsonSerializer<Object> ser)
throws IOException
{
for (int i = 0, len = value.length; i < len; ++i) {
String str = value[i];
if (str == null) {
provider.defaultSerializeNull(gen);
} else {
ser.serialize(value[i], gen, provider);
}
}
}
@Override
public JsonNode getSchema(SerializerProvider provider, Type typeHint) {
return createSchemaNode("array", true).set("items", createSchemaNode("string"));
}
@Override
public void acceptJsonFormatVisitor(JsonFormatVisitorWrapper visitor, JavaType typeHint) throws JsonMappingException
{
visitArrayFormat(visitor, typeHint, JsonFormatTypes.STRING);
}
}
| |
// -*- mode: java; c-basic-offset: 2; -*-
// Copyright 2016-2017 MIT, All rights reserved
// Released under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
package com.google.appinventor.client.editor.simple.components;
import static com.google.appinventor.client.Ode.MESSAGES;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.google.appinventor.client.ErrorReporter;
import com.google.appinventor.client.Ode;
import com.google.appinventor.client.editor.simple.SimpleEditor;
import com.google.appinventor.client.editor.simple.palette.SimplePaletteItem;
import com.google.appinventor.client.widgets.dnd.DragSource;
import com.google.appinventor.components.common.ComponentConstants;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.event.logical.shared.AttachEvent;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.ui.AbsolutePanel;
import com.google.gwt.user.client.ui.Image;
public final class MockMap extends MockContainer {
public static final String TYPE = "Map";
protected static final String PROPERTY_NAME_LATITUDE = "Latitude";
protected static final String PROPERTY_NAME_LONGITUDE = "Longitude";
protected static final String PROPERTY_NAME_MAP_TYPE = "MapType";
protected static final String PROPERTY_NAME_CENTER_FROM_STRING = "CenterFromString";
protected static final String PROPERTY_NAME_ZOOM_LEVEL = "ZoomLevel";
protected static final String PROPERTY_NAME_SHOW_COMPASS = "ShowCompass";
protected static final String PROPERTY_NAME_SHOW_ZOOM = "ShowZoom";
protected static final String PROPERTY_NAME_SHOW_USER = "ShowUser";
protected static final String PROPERTY_NAME_ENABLE_ROTATION = "EnableRotation";
protected static final String PROPERTY_NAME_SHOW_SCALE = "ShowScale";
protected static final String PROPERTY_NAME_SCALE_UNITS = "ScaleUnits";
/**
* The Widget wrapping the element where the map tiles will be rendered.
*/
protected final AbsolutePanel mapWidget;
/**
* The JavaScript object representing the non-GWT maps renderer.
*/
private JavaScriptObject mapInstance;
/**
* A JavaScript array containing the (1-indexed) tile layers used for maps.
*/
private JavaScriptObject tileLayers;
/**
* Active base tile layer.
*/
private JavaScriptObject baseLayer;
/**
* Set of event listeners that will be triggered on native map events.
*/
private final Set<MockMapEventListener> listeners = new HashSet<MockMapEventListener>();
// Settings for the internal maps component
private double latitude = 42.359144;
private double longitude = -71.093612;
private int zoomLevel = 13;
private int selectedTileLayer = 1;
private boolean zoomControl = false;
private boolean compassEnabled = false;
private boolean userLocationEnabled = false;
private boolean showScale = false;
private int scaleUnits = 1;
public MockMap(SimpleEditor editor) {
super(editor, TYPE, images.map(), new MockMapLayout());
initToolbarItems();
rootPanel.setHeight("100%");
mapWidget = new AbsolutePanel();
mapWidget.setStylePrimaryName("ode-SimpleMockContainer");
mapWidget.add(rootPanel);
initComponent(mapWidget);
mapWidget.addAttachHandler(new AttachEvent.Handler() {
@Override
public void onAttachOrDetach(AttachEvent arg0) {
if (arg0.isAttached()) {
initPanel();
invalidateMap();
for (MockComponent child : children) {
((MockMapFeature) child).addToMap(MockMap.this);
}
}
}
});
}
@Override
public void collectTypesAndIcons(Map<String, String> typesAndIcons) {
super.collectTypesAndIcons(typesAndIcons);
// These types can be loaded dynamically using LoadFromURL, so we want to show
// generic options even though the user might not have explicitly created one
typesAndIcons.put("Marker", new Image(images.marker()).getElement().getString());
typesAndIcons.put("LineString", new Image(images.linestring()).getElement().getString());
typesAndIcons.put("Polygon", new Image(images.polygon()).getElement().getString());
}
public void addEventListener(MockMapEventListener listener) {
listeners.add(listener);
}
public void removeEventListener(MockMapEventListener listener) {
listeners.remove(listener);
}
@Override
public int getPreferredWidth() {
return ComponentConstants.VIDEOPLAYER_PREFERRED_WIDTH;
}
@Override
public int getPreferredHeight() {
return ComponentConstants.VIDEOPLAYER_PREFERRED_HEIGHT;
}
@Override
public void onBrowserEvent(Event event) {
if (isUnlocked()) {
setShouldCancel(event, false);
} else {
super.onBrowserEvent(event);
}
}
@Override
protected boolean acceptableSource(DragSource source) {
MockComponent component = null;
if (source instanceof MockComponent) {
component = (MockComponent) source;
} else if (source instanceof SimplePaletteItem) {
component = (MockComponent) source.getDragWidget();
}
return component instanceof MockMapFeature;
}
private void setBackgroundColorProperty(String text) {
if (MockComponentsUtil.isDefaultColor(text)) {
text = "&HFFFFFFFF";
}
MockComponentsUtil.setWidgetBackgroundColor(mapWidget, text);
}
private void setEnabledProperty(String text) {
MockComponentsUtil.setEnabled(this, text);
}
@Override
public void onPropertyChange(String propertyName, String newValue) {
super.onPropertyChange(propertyName, newValue);
if (propertyName.equals(PROPERTY_NAME_ENABLED)) {
setEnabledProperty(newValue);
} else if (propertyName.equals(PROPERTY_NAME_BACKGROUNDCOLOR)) {
setBackgroundColorProperty(newValue);
} else if (propertyName.equals(PROPERTY_NAME_LATITUDE)) {
setLatitude(newValue);
} else if (propertyName.equals(PROPERTY_NAME_LONGITUDE)) {
setLongitude(newValue);
} else if (propertyName.equals(PROPERTY_NAME_WIDTH)) {
invalidateMap();
} else if (propertyName.equals(PROPERTY_NAME_HEIGHT)) {
invalidateMap();
} else if (propertyName.equals(PROPERTY_NAME_MAP_TYPE)) {
setMapType(newValue);
} else if (propertyName.equals(PROPERTY_NAME_CENTER_FROM_STRING)) {
setCenter(newValue);
} else if (propertyName.equals(PROPERTY_NAME_ZOOM_LEVEL)) {
setZoomLevel(newValue);
} else if (propertyName.equals(PROPERTY_NAME_SHOW_COMPASS)) {
setShowCompass(newValue);
} else if (propertyName.equals(PROPERTY_NAME_SHOW_USER)) {
setShowUser(newValue);
} else if (propertyName.equals(PROPERTY_NAME_SHOW_ZOOM)) {
setShowZoom(newValue);
} else if (propertyName.equals(PROPERTY_NAME_SHOW_SCALE)) {
setShowScale(newValue);
} else if (propertyName.equals(PROPERTY_NAME_SCALE_UNITS)) {
setScaleUnits(newValue);
}
}
public final JavaScriptObject getMapInstance() {
return mapInstance;
}
private void setLatitude(String text) {
latitude = Double.parseDouble(text);
updateMapLatitude(latitude);
}
private void setLongitude(String text) {
longitude = Double.parseDouble(text);
updateMapLongitude(longitude);
}
private void setMapType(String tileLayerId) {
try {
selectedTileLayer = Integer.parseInt(tileLayerId);
updateMapType(selectedTileLayer);
} catch(NumberFormatException e) {
ErrorReporter.reportError(MESSAGES.unknownMapTypeException(tileLayerId));
changeProperty(PROPERTY_NAME_MAP_TYPE, Integer.toString(selectedTileLayer));
}
}
private void setCenter(String center) {
String[] parts = center.split(",");
if (parts.length != 2) {
ErrorReporter.reportError(MESSAGES.mapCenterWrongNumberArgumentsException(parts.length));
changeProperty(PROPERTY_NAME_CENTER_FROM_STRING, latitude + ", " + longitude);
} else {
latitude = Double.parseDouble(parts[0].trim());
longitude = Double.parseDouble(parts[1].trim());
updateMapCenter(latitude, longitude);
}
}
private void setZoomLevel(String zoom) {
int zoomLevel = Integer.parseInt(zoom);
if (zoomLevel < 1 || zoomLevel > 18) {
ErrorReporter.reportError(MESSAGES.mapZoomLevelOutOfBoundsException());
changeProperty(PROPERTY_NAME_ZOOM_LEVEL, Integer.toString(this.zoomLevel));
} else {
this.zoomLevel = zoomLevel;
updateMapZoomLevel(Integer.parseInt(zoom));
}
}
private void setShowCompass(String state) {
this.compassEnabled = Boolean.parseBoolean(state);
updateMapCompassControl(this.compassEnabled);
}
private void setShowUser(String state) {
this.userLocationEnabled = Boolean.parseBoolean(state);
updateMapShowUser(this.userLocationEnabled);
}
private void setShowZoom(String state) {
this.zoomControl = Boolean.parseBoolean(state);
updateMapZoomControl(this.zoomControl);
}
private void setShowScale(String state) {
this.showScale = Boolean.parseBoolean(state);
updateMapShowScale(this.showScale);
}
private void setScaleUnits(String state) {
if (state.equals("1")) {
this.scaleUnits = 1;
} else if (state.equals("2")) {
this.scaleUnits = 2;
} else {
throw new IllegalArgumentException("Unexpected value for scale: " + state);
}
updateScaleUnits(this.scaleUnits);
}
// event handlers
protected void onBoundsChanged() {
// TODO(ewpatton): Send incremental update to companion
for (MockMapEventListener listener : listeners) {
listener.onBoundsChanged();
}
}
protected void onResetButtonClicked() {
try {
updateMapZoomLevel(zoomLevel);
updateMapCenter(latitude, longitude);
} catch(NumberFormatException e) {
// this shouldn't happen in the normal use of the component
}
for (MockMapEventListener listener : listeners) {
listener.onResetButtonClicked();
}
}
protected void onLockButtonClicked() {
// we are moving to an unlocked state
for (MockMapEventListener listener : listeners) {
listener.onLockButtonClicked();
}
}
protected void onUnlockButtonClicked() {
// we are moving to a locked state
for (MockMapEventListener listener : listeners) {
listener.onUnlockButtonClicked();
}
}
protected void onSetInitialBoundsClicked() {
final LatLng centerPoint = getCenter();
final int zoom = getZoom();
this.latitude = centerPoint.latitude;
this.longitude = centerPoint.longitude;
this.zoomLevel = zoom;
properties.changePropertyValue("CenterFromString", centerPoint.toString());
properties.changePropertyValue("ZoomLevel", Integer.toString(zoom));
for (MockMapEventListener listener : listeners) {
listener.onSetInitialBoundsClicked();
}
}
// Native Javascript Methods (JSNI)
/**
* Initialize the controls for the AppInventor map toolbar.
* These controls allow the user to:
* <ul>
* <li>change the drag behavior from the default of component reordering to panning the map.
* <li>update the starting center and zoom level from the map viewport.
* <li>reset the map viewport to the center and zoom level specified in the properties.
* </ul>
* This method will be called with every MockMap created, but will only instantiate a singleton
* set of items.
*/
private static native void initToolbarItems()/*-{
var MESSAGES = @com.google.appinventor.client.Ode::MESSAGES;
var L = $wnd.top.L;
if (L.AI2Lock === undefined) {
L.AI2Lock = L.ToolbarAction.extend({
options: {
toolbarIcon: {
tooltip: MESSAGES.@com.google.appinventor.client.OdeMessages::mapLockMovementTooltip()()
}
},
_createIcon: function(toolbar, container, args) {
L.ToolbarAction.prototype._createIcon.call(this, toolbar, container, args);
var lockIcon = L.DomUtil.create('i'),
unlockIcon = L.DomUtil.create('i');
lockIcon.setAttribute('class', 'fa fa-lock');
lockIcon.setAttribute('aria-hidden', 'true');
unlockIcon.setAttribute('class', 'fa fa-unlock');
unlockIcon.setAttribute('aria-hidden', 'true');
this.locked = false;
L.DomUtil.addClass(this._link, 'unlocked');
this._link.appendChild(lockIcon);
this._link.appendChild(unlockIcon);
var self = this;
L.DomEvent.on(this._link, 'mousedown', function(e) {
e.stopPropagation();
});
L.DomEvent.on(this._link, 'click', function(e) {
self.locked = !self.locked;
var map = self.toolbar._control._map;
map.unlocked = !self.locked;
var interactions = [map.dragging, map.touchZoom, map.doubleClickZoom, map.scrollWheelZoom, map.boxZoom, map.keyboard, map.tap];
if (self.locked) {
for (var i in interactions) interactions[i] && interactions[i].disable();
L.DomUtil.addClass(self._link, 'locked');
L.DomUtil.removeClass(self._link, 'unlocked');
self._link.setAttribute('title', MESSAGES.@com.google.appinventor.client.OdeMessages::mapUnlockMovementTooltip()());
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::onUnlockButtonClicked()();
} else {
for (var i in interactions) interactions[i] && interactions[i].enable();
L.DomUtil.addClass(self._link, 'unlocked');
L.DomUtil.removeClass(self._link, 'locked');
self._link.setAttribute('title', MESSAGES.@com.google.appinventor.client.OdeMessages::mapLockMovementTooltip()());
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::onLockButtonClicked()();
}
});
}
});
L.AI2Center = L.ToolbarAction.extend({
options: {
toolbarIcon: {
tooltip: MESSAGES.@com.google.appinventor.client.OdeMessages::mapSetInitialMapTooltip()()
}
},
_createIcon: function(toolbar, container, args) {
var icon = L.DomUtil.create('i');
L.ToolbarAction.prototype._createIcon.call(this, toolbar, container, args);
icon.setAttribute('class', 'fa fa-crosshairs');
this._link.appendChild(icon);
var self = this;
L.DomEvent.on(this._link, 'click', function() {
var javaMockMap = self.toolbar._control._map.owner;
javaMockMap.@com.google.appinventor.client.editor.simple.components.MockMap::onSetInitialBoundsClicked()();
});
}
});
L.AI2Reset = L.ToolbarAction.extend({
options: {
toolbarIcon: {
tooltip: MESSAGES.@com.google.appinventor.client.OdeMessages::mapResetBoundingBoxTooltip()()
}
},
_createIcon: function(toolbar, container, args) {
var icon = L.DomUtil.create('i');
L.ToolbarAction.prototype._createIcon.call(this, toolbar, container, args);
icon.setAttribute('class', 'fa fa-history');
this._link.appendChild(icon);
var self = this;
L.DomEvent.on(this._link, 'click', $entry(function() {
var javaMockMap = self.toolbar._control._map.owner;
javaMockMap.@com.google.appinventor.client.editor.simple.components.MockMap::onResetButtonClicked()();
}));
}
});
L.Control.Compass = L.Control.extend({
options: { position: 'topright' },
onAdd: function () {
var container = L.DomUtil.create('div', 'compass-control'),
img = L.DomUtil.create('img');
img.setAttribute('src', '/static/leaflet/assets/compass.svg');
container.appendChild(img);
return container;
}
});
L.control.compass = function(options) {
return new L.Control.Compass(options);
};
L.UserOverlay = L.Layer.extend({
onAdd: function(map) {
this._map = map;
this._el = L.DomUtil.create('div', 'ai2-user-mock-location leaflet-zoom-hide');
var img = L.DomUtil.create('img');
this._el.appendChild(img);
img.setAttribute('src', '/static/leaflet/assets/location.png');
map.getPanes()['overlayPane'].appendChild(this._el);
map.on('viewreset', this._reposition, this);
this._reposition();
return this._el;
},
onRemove: function(map) {
map.getPanes().overlayPane.removeChild(this._el);
map.off('resize', this._reposition);
},
_reposition: function(e) {
var pos = this._map.latLngToLayerPoint(this._map.getCenter());
L.DomUtil.setPosition(this._el, pos);
}
});
}
}-*/;
public native LatLng getCenter()/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
var center = map.getCenter();
return @com.google.appinventor.client.editor.simple.components.MockMap.LatLng::new(DD)(center.lat, center.lng);
}
return null;
}-*/;
public native int getZoom()/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
return map ? map.getZoom() : 0;
}-*/;
private native void initPanel()/*-{
var L = $wnd.top.L;
var tileLayers = [
null, // because AppInventor is 1-indexed, we leave element 0 as null
L.tileLayer('http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
{minZoom: 0, maxZoom: 18,
attribution: 'Map data © <a href="http://openstreetmap.org">OpenStreetMap</a> contributors'}),
L.tileLayer('http://basemap.nationalmap.gov/arcgis/rest/services/USGSImageryTopo/MapServer/tile/{z}/{y}/{x}',
{minZoom: 0, maxZoom: 15,
attribution: 'Satellite imagery © <a href="http://mapquest.com">USGS</a>'}),
L.tileLayer('http://basemap.nationalmap.gov/ArcGIS/rest/services/USGSTopo/MapServer/tile/{z}/{y}/{x}',
{minZoom: 0, maxZoom: 15,
attribution: 'Map data © <a href="http://www.usgs.gov">USGS</a>'})
];
this.@com.google.appinventor.client.editor.simple.components.MockMap::tileLayers = tileLayers;
this.@com.google.appinventor.client.editor.simple.components.MockMap::baseLayer =
tileLayers[this.@com.google.appinventor.client.editor.simple.components.MockMap::selectedTileLayer];
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
// map exists but may be invalid due to change in the dom, so invalidate and redraw
map.invalidateSize(false);
} else {
var panel = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapWidget;
var elem = panel.@com.google.gwt.user.client.ui.UIObject::getElement()();
if (elem.firstElementChild != null) elem = elem.firstElementChild;
var latitude = this.@com.google.appinventor.client.editor.simple.components.MockMap::latitude,
longitude = this.@com.google.appinventor.client.editor.simple.components.MockMap::longitude,
zoomControl = this.@com.google.appinventor.client.editor.simple.components.MockMap::zoomControl,
zoom = this.@com.google.appinventor.client.editor.simple.components.MockMap::zoomLevel,
showScale = this.@com.google.appinventor.client.editor.simple.components.MockMap::showScale,
scaleUnits = this.@com.google.appinventor.client.editor.simple.components.MockMap::scaleUnits;
map = L.map(elem, {zoomControl: false, editable: true}).setView([latitude, longitude], zoom);
var messages = @com.google.appinventor.client.Ode::getMessages()();
map.zoomControl = L.control.zoom({
position: 'topleft',
zoomInTitle: messages.@com.google.appinventor.client.OdeMessages::mapZoomIn()(),
zoomOutTitle: messages.@com.google.appinventor.client.OdeMessages::mapZoomOut()()
});
if (zoomControl) {
map.zoomControl.addTo(map);
}
var scaleOptions = {metric: true, imperial: false, position: 'bottomright'};
if (scaleUnits == 2) {
scaleOptions.metric = false;
scaleOptions.imperial = true;
}
map.scaleControl = L.control.scale(scaleOptions);
if (showScale) {
map.scaleControl.addTo(map);
}
map.owner = this;
map.unlocked = true;
map.aiControls = new L.Toolbar.Control({position: 'bottomleft',
actions: [ L.AI2Lock, L.AI2Center, L.AI2Reset ]});
map.aiControls.addTo(map);
map.compassLayer = L.control.compass();
map.userLayer = new L.UserOverlay();
map.on('mouseup click', function(e) {
e = e.originalEvent;
if (e.eventPhase !== 3) return;
var el = e.target,
overlay = this.getPanes()['overlayPane'],
markers = this.getPanes()['markerPane'],
background = this.getPanes()['tilePane'],
container = this.getContainer();
while (el && el.parentNode !== container) {
if (el === overlay || el === markers) {
// Overlays handle their own click events, but sometimes it propagates to the map and eventually GWT.
// This is not desirable because it causes issues with the selected component.
return;
} else if (el === background) {
this.owner.@com.google.appinventor.client.editor.simple.components.MockComponent::select()();
return;
}
el = el.parentNode;
}
});
this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance = map;
setTimeout(function() {
map.addLayer(map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::baseLayer);
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::updateMapZoomControl(*)(
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::zoomControl);
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::updateMapCompassControl(*)(
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::compassEnabled);
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::updateMapShowUser(*)(
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::userLocationEnabled);
});
}
}-*/;
native void invalidateMap()/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) { // Map may not be initialized yet, e.g., during project load.
setTimeout(function() {
map.invalidateSize(false);
}, 0);
}
}-*/;
private native void updateMapLatitude(double latitude)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
var longitude = this.@com.google.appinventor.client.editor.simple.components.MockMap::longitude;
map.panTo($wnd.top.L.latLng(latitude, longitude));
}-*/;
private native void updateMapLongitude(double longitude)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
var latitude = this.@com.google.appinventor.client.editor.simple.components.MockMap::latitude;
map.panTo($wnd.top.L.latLng(latitude, longitude));
}-*/;
private native void updateMapCenter(double latitude, double longitude)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) { // Map may not be initialized yet, e.g., during project load.
map.panTo([latitude, longitude], {animate: true});
}
}-*/;
private native void updateMapType(int type)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
var tileLayers = this.@com.google.appinventor.client.editor.simple.components.MockMap::tileLayers;
var baseLayer = this.@com.google.appinventor.client.editor.simple.components.MockMap::baseLayer;
if (map && baseLayer && tileLayers) {
if (0 < type && type < tileLayers.length) {
map.removeLayer(baseLayer);
baseLayer = tileLayers[type];
map.addLayer(baseLayer);
baseLayer.bringToBack();
this.@com.google.appinventor.client.editor.simple.components.MockMap::baseLayer = baseLayer;
}
}
}-*/;
native LatLng projectFromXY(int x, int y)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
var result = map.containerPointToLatLng([x, y]);
return @com.google.appinventor.client.editor.simple.components.MockMap.LatLng::new(DD)(result.lat, result.lng);
}
}-*/;
private native void updateMapZoomLevel(int zoomLevel)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
map.setZoom(zoomLevel);
}
}-*/;
private native void updateMapCompassControl(boolean enable)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
if (enable) {
map.addControl(map.compassLayer);
} else {
map.removeControl(map.compassLayer);
}
}
}-*/;
private native void updateMapShowUser(boolean enable)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
if (enable) {
map.addLayer(map.userLayer);
} else {
map.removeLayer(map.userLayer);
}
}
}-*/;
private native void updateMapZoomControl(boolean enable)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
if (!map.zoomControl) {
map.zoomControl = $wnd.top.L.control.zoom();
}
if (enable) {
map.zoomControl.addTo(map);
} else {
map.removeControl(map.zoomControl);
}
}
}-*/;
private native void updateMapShowScale(boolean enable)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
if (!map.scaleControl) {
map.scaleControl = $wnd.top.L.control.scale({position: 'topleft'});
}
if (enable) {
map.scaleControl.addTo(map);
} else {
map.removeControl(map.scaleControl);
}
}
}-*/;
private native void updateScaleUnits(int units)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance,
scaleVisible = this.@com.google.appinventor.client.editor.simple.components.MockMap::showScale;
if (map) {
if (scaleVisible) {
map.removeControl(map.scaleControl);
}
map.scaleControl = $wnd.top.L.control.scale({
metric: units == 1,
imperial: units == 2,
position: 'bottomright'
});
if (scaleVisible) {
map.scaleControl.addTo(map);
}
}
}-*/;
private native boolean isUnlocked()/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
return map.unlocked;
} else {
return false;
}
}-*/;
public static class LatLng {
public double latitude;
public double longitude;
public LatLng(double latitude, double longitude) {
this.latitude = latitude;
this.longitude = longitude;
}
@Override
public String toString() {
return Double.toString(latitude) + ", " + Double.toString(longitude);
}
public native NativeLatLng toNative()/*-{
return {
lat: this.@com.google.appinventor.client.editor.simple.components.MockMap.LatLng::latitude,
lng: this.@com.google.appinventor.client.editor.simple.components.MockMap.LatLng::longitude
};
}-*/;
}
public static class NativeLatLng extends JavaScriptObject {
protected NativeLatLng() {}
public final native double getLatitude()/*-{
return this.lat;
}-*/;
public final native double getLongitude()/*-{
return this.lng;
}-*/;
}
public interface MockMapEventListener {
void onBoundsChanged();
void onResetButtonClicked();
void onLockButtonClicked();
void onUnlockButtonClicked();
void onSetInitialBoundsClicked();
}
}
| |
/*
* Part of the CCNx Java Library.
*
* Copyright (C) 2008-2012 Palo Alto Research Center, Inc.
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details. You should have received
* a copy of the GNU Lesser General Public License along with this library;
* if not, write to the Free Software Foundation, Inc., 51 Franklin Street,
* Fifth Floor, Boston, MA 02110-1301 USA.
*/
package org.ccnx.ccn.protocol;
import static org.ccnx.ccn.protocol.Component.NONCE;
import java.io.IOException;
import java.util.Arrays;
import java.util.logging.Level;
import org.ccnx.ccn.TrustManager;
import org.ccnx.ccn.impl.encoding.CCNProtocolDTags;
import org.ccnx.ccn.impl.encoding.GenericXMLEncodable;
import org.ccnx.ccn.impl.encoding.XMLDecoder;
import org.ccnx.ccn.impl.encoding.XMLEncodable;
import org.ccnx.ccn.impl.encoding.XMLEncoder;
import org.ccnx.ccn.impl.support.DataUtils;
import org.ccnx.ccn.impl.support.Log;
import org.ccnx.ccn.io.content.ContentDecodingException;
import org.ccnx.ccn.io.content.ContentEncodingException;
/**
* Represents a CCN Interest packet, and performs all the allowed specializations
* of queries recognized and supported by them.
* cf. ContentObject
*
* Implements Comparable to make it easy to store in a Set and avoid duplicates.
*/
public class Interest extends GenericXMLEncodable implements XMLEncodable, Comparable<Interest>, Cloneable, ContentNameProvider {
// Used to remove spurious *'s
public static final String RECURSIVE_POSTFIX = "*";
// ChildSelector values
public static final int CHILD_SELECTOR_LEFT = 0;
public static final int CHILD_SELECTOR_RIGHT = 1;
/**
* AnswerOriginKind values
* These are bitmapped. Default is 3. 2 is not allowed
*/
public static final int ANSWER_CONTENT_STORE = 1;
public static final int ANSWER_GENERATED = 2;
public static final int ANSWER_STALE = 4; // Stale answer OK
public static final int MARK_STALE = 16; // Must have Scope 0. Michael calls this a "hack"
public static final int DEFAULT_ANSWER_ORIGIN_KIND = ANSWER_CONTENT_STORE | ANSWER_GENERATED;
protected ContentName _name;
protected Integer _maxSuffixComponents;
protected Integer _minSuffixComponents;
// DKS TODO can we really support a PublisherID here, or just a PublisherPublicKeyDigest?
protected PublisherID _publisher;
protected Exclude _exclude;
protected Integer _childSelector;
protected Integer _answerOriginKind = null;
protected Integer _scope;
protected byte[] _interestLifetime = null; // For now we don't have the ability to set an interest lifetime
protected byte[] _nonce;
public long userTime;
/**
* TODO: DKS figure out how to handle encoding faster,
* and how to handle shorter version of names without
* copying, particularly without 1.6 array ops.
* @param name ContentName of Interest
* @param publisher PublisherID of Interest or null
*/
public Interest(ContentName name,
PublisherID publisher) {
_name = name;
_publisher = publisher;
}
/**
* @param name ContentName of Interest
* @param publisher PublisherPublicKeyDigest or null
*/
public Interest(ContentName name, PublisherPublicKeyDigest publisher) {
this(name, (null != publisher) ? new PublisherID(publisher) : (PublisherID)null);
}
/**
* Creates Interest with null publisher ID
* @param name
*/
public Interest(ContentName name) {
this(name, (PublisherID)null);
}
public Interest(String name) throws MalformedContentNameStringException {
this(ContentName.fromURI(name), (PublisherID)null);
}
public Interest() {} // for use by decoders
public ContentName name() { return _name; }
public void name(ContentName name) { _name = name; }
public Integer maxSuffixComponents() { return _maxSuffixComponents; }
public void maxSuffixComponents(Integer maxSuffixComponents) { _maxSuffixComponents = maxSuffixComponents; }
public Integer minSuffixComponents() { return _minSuffixComponents; }
public void minSuffixComponents(Integer minSuffixComponents) { _minSuffixComponents = minSuffixComponents; }
public PublisherID publisherID() { return _publisher; }
public void publisherID(PublisherID publisherID) { _publisher = publisherID; }
public Exclude exclude() { return _exclude; }
public void exclude(Exclude exclude) { _exclude = exclude; }
public Integer childSelector() { return _childSelector;}
public void childSelector(int childSelector) { _childSelector = childSelector; }
public byte[] interestLifetime() { return _interestLifetime;}
public void interestLifetime(byte[] interestLifetime) { _interestLifetime = interestLifetime; }
public Integer answerOriginKind() {
if (null == _answerOriginKind) {
return DEFAULT_ANSWER_ORIGIN_KIND;
}
return _answerOriginKind;
}
public void answerOriginKind(int answerOriginKind) {
if (DEFAULT_ANSWER_ORIGIN_KIND == answerOriginKind) {
_answerOriginKind = null;
} else {
_answerOriginKind = answerOriginKind;
}
}
public Integer scope() { return _scope; }
public void scope(int scope) { _scope = scope; }
/**
* XXX - This isn't user settable and is only useful for ccnd internal functionality.
* Do we ever need to return it?
* @return
*/
public byte[] nonce() { return _nonce; }
/**
* Determine whether a piece of content matches the Interest
* @param test
* @return true if the test data packet matches the Interest
*/
public boolean matches(ContentObject test) {
return matches(test, (null != test.signedInfo()) ? test.signedInfo().getPublisherKeyID() : null);
}
/**
* Determine whether a piece of content's name *without* digest component matches this Interest.
*
* This doesn't match if the digest is specified in the Interest.
* @see Interest#matches(ContentObject, PublisherPublicKeyDigest)
*
* @param name - Name of a content object missing it's implied digest component
* @param resultPublisherKeyID
* @return true if the content/publisherPublicKeyDigest matches the Interest
*/
public boolean matches(ContentName name, PublisherPublicKeyDigest resultPublisherKeyID) {
if (null == name() || null == name)
return false; // null name() should not happen, null arg can
// to get interest that matches everything, should
// use / (ROOT)
if (isPrefixOf(name)) {
return internalMatch(name, false, resultPublisherKeyID);
}
return false;
}
/**
* Determine whether a piece of content matches this Interest.
* Note: this computes the digest for the ContentObject, to know the full name. This is
* computationally expensive.
* @see Interest#matches(ContentName, PublisherPublicKeyDigest)
* TODO: compute digests once when ContentObjects are received into the machine, and pass them
* around with the ContentObjects.
*
* @param co - ContentObject
* @param resultPublisherKeyID
* @return true if the content & publisherID match the Interest
*/
public boolean matches(ContentObject co, PublisherPublicKeyDigest resultPublisherKeyID) {
if (null == name() || null == co)
return false; // null name() should not happen, null arg can
// to get interest that matches everything, should
// use / (ROOT)
boolean digest = co.name().count()+1 == name().count();
if (co.name().count() == name().count() && (exclude() != null && !exclude().empty())) {
//the interest does not have a digest in the name, but it does have at least one excluded
digest = true;
}
ContentName name = digest ? co.fullName() : co.name();
if (isPrefixOf(name)) {
return internalMatch(name, digest, resultPublisherKeyID);
}
return false;
}
// TODO We need to beef this up to deal with the more complex interest specs.
private boolean internalMatch(ContentName name, boolean digestIncluded,
PublisherPublicKeyDigest resultPublisherKeyID) {
if (null != maxSuffixComponents() || null != minSuffixComponents()) {
// we know our specified name is a prefix of the result.
// the number of additional components must be this value
int nameCount = name.count();
int lengthDiff = nameCount + (digestIncluded?0:1) - name().count();
if (null != maxSuffixComponents() && lengthDiff > maxSuffixComponents()) {
//Log.fine("Interest match failed: " + lengthDiff + " more than the " + maxSuffixComponents() + " components between expected " +
// name() + " and tested " + name);
if(Log.isLoggable(Log.FAC_ENCODING, Level.FINE))
Log.fine(Log.FAC_ENCODING, "Interest match failed: {0} more than the {1} components between expected {2} and tested {3}",lengthDiff, maxSuffixComponents(), name(), name);
return false;
}
if (null != minSuffixComponents() && lengthDiff < minSuffixComponents()) {
//Log.fine("Interest match failed: " + lengthDiff + " less than the " + minSuffixComponents() + " components between expected " +
// name() + " and tested " + name);
if(Log.isLoggable(Log.FAC_ENCODING, Level.FINE))
Log.fine(Log.FAC_ENCODING, "Interest match failed: {0} less than the {1} components between expected {2} and tested {3}",lengthDiff, minSuffixComponents(), name(), name);
return false;
}
}
if (null != exclude()) {
if (exclude().match(name.component(name().count()))) {
if (Log.isLoggable(Log.FAC_ENCODING, Level.FINEST))
Log.finest(Log.FAC_ENCODING, "Interest match failed. {0} has been excluded", name);
return false;
}
}
if (null != publisherID()) {
if (null == resultPublisherKeyID) {
if (Log.isLoggable(Log.FAC_ENCODING, Level.FINEST))
Log.finest(Log.FAC_ENCODING, "Interest match failed, target {0} doesn't specify a publisherID and we require a particular one.", name);
return false;
}
// Should this be more general?
// TODO DKS handle issuer
if (Log.isLoggable(Log.FAC_ENCODING, Level.FINEST))
Log.finest(Log.FAC_ENCODING, "Interest match handed off to trust manager for name: {0}", name);
return TrustManager.getTrustManager().matchesRole(publisherID(), resultPublisherKeyID);
}
if (Log.isLoggable(Log.FAC_ENCODING, Level.FINEST))
Log.finest(Log.FAC_ENCODING, "Interest match succeeded to name: {0}", name);
return true;
}
/**
* Return data a specified number of levels below us in the hierarchy, with
* order preference of leftmost.
* @param name name prefix for interest
* @param level number of levels below us we want content. Includes the ephemeral
* digest component in the count.
* @param publisher who should have signed content (can be null)
*/
public static Interest lower(ContentName name, int level, PublisherPublicKeyDigest publisher) {
Interest interest = new Interest(name, publisher);
interest.maxSuffixComponents(level);
interest.minSuffixComponents(level);
return interest;
}
/**
* Construct an Interest that will give you the next content after the argument
* name's first prefixCount components
* @param name
* @param prefixCount may be null
* @param publisher may be null
* @return new Interest
*/
public static Interest next(ContentName name, Integer prefixCount, PublisherPublicKeyDigest publisher) {
return next(name, null, prefixCount, null, null, publisher);
}
/**
* Construct an Interest that will give you the next content after the argument
* names's first prefixCount components excluding the components specified in the omissions
* @param name
* @param omissions components to exclude - may be null
* @param prefixCount may be null
* @param publisher may be null
* @return
*/
public static Interest next(ContentName name,Exclude exclude, Integer prefixCount, Integer maxSuffixComponents, Integer minSuffixComponents,
PublisherPublicKeyDigest publisher) {
return nextOrLast(name, exclude, new Integer(CHILD_SELECTOR_LEFT), prefixCount, maxSuffixComponents, minSuffixComponents, publisher);
}
/**
* Regardless of whether we are looking for the next or the last Content
* we always want to exclude everything before the first component at the
* prefix level.
*
* @param name
* @param exclude contains elements to exclude
* @param order corresponds to ChildSelector values
* @param prefixCount may be null
* @param publisher may be null
* @return the Interest
*/
private static Interest nextOrLast(ContentName name, Exclude exclude, Integer order, Integer prefixCount, Integer maxSuffixComponents,
Integer minSuffixComponents, PublisherPublicKeyDigest publisher ) {
if (null != prefixCount) {
if (prefixCount > name.count())
throw new IllegalArgumentException("Invalid prefixCount > components: " + prefixCount);
} else
prefixCount = name.count() - 1;
if (prefixCount < name.count()) {
byte [] component = name.component(prefixCount);
name = name.cut(prefixCount);
if (exclude == null) {
exclude = Exclude.uptoFactory(component);
} else
exclude.excludeUpto(component);
}
return constructInterest(name, exclude, order, maxSuffixComponents, minSuffixComponents, publisher);
}
/**
* Construct an Interest that will give you the last content after the argument
* name's first prefixCount components
* @param name
* @param prefixCount may be null
* @param publisher may be null
* @return new Interest
*/
public static Interest last(ContentName name, Integer prefixCount, PublisherPublicKeyDigest publisher) {
return last(name, null, prefixCount, null, null, publisher);
}
/**
* Construct an Interest that will give you the last content after the argument
* name excluding the components specified in the Exclude
* @param name
* @param exclude contains components to exclude - may be null
* @param prefixCount may be null
* @param publisher may be null
* @return the Interest
*/
public static Interest last(ContentName name, Exclude exclude, Integer prefixCount, Integer maxSuffixComponents, Integer minSuffixComponents,
PublisherPublicKeyDigest publisher) {
return nextOrLast(name, exclude, new Integer(CHILD_SELECTOR_RIGHT), prefixCount, maxSuffixComponents, minSuffixComponents, publisher);
}
/**
* Construct an Interest that will exclude the values in omissions and require maxSuffixComponents and
* minSuffixComponents as specific
* @param name
* @param omissions components to exclude
* @param publisherID
* @param maxSuffixComponents
* @param minSuffixComponents
* @return the Interest
*/
public static Interest exclude(ContentName name, Exclude exclude, Integer maxSuffixComponents, Integer minSuffixComponents, PublisherPublicKeyDigest publisherID) {
return constructInterest(name, exclude, null, maxSuffixComponents, minSuffixComponents, publisherID);
}
/**
* Construct an Interest with specified values set
* @param name
* @param filter may be null
* @param childSelector may be null
* @param publisherID may be null
* @param maxSuffixComponents may be null
* @param minSuffixComponents may be null
* @return the Interest
*/
public static Interest constructInterest(ContentName name, Exclude filter,
Integer childSelector, Integer maxSuffixComponents, Integer minSuffixComponents, PublisherPublicKeyDigest publisher) {
PublisherID pubID = null;
if (publisher!=null)
pubID = new PublisherID(publisher);
Interest interest = new Interest(name);
if (null != childSelector)
interest.childSelector(childSelector);
if (null != filter)
interest.exclude(filter);
if (null != pubID)
interest.publisherID(pubID);
if (null != maxSuffixComponents)
interest.maxSuffixComponents(maxSuffixComponents);
if (null != minSuffixComponents)
interest.minSuffixComponents(minSuffixComponents);
return interest;
}
/**
* Currently used as an Interest name component to disambiguate multiple requests for the
* same content.
*
* @return the nonce in component form
* @deprecated use {@link Component#NONCE} instead.
*/
@Deprecated
public static byte[] generateNonce() {
return NONCE.getComponent();
}
/**
* Determine if this Interest's name is a prefix of the specified name
* @param name
* @return true if our name is a prefix of the specified name
*/
public boolean isPrefixOf(ContentName name) {
int count = name().count();
if (null != maxSuffixComponents() && 0 == maxSuffixComponents()) {
// This Interest is trying to match a complete content name with digest explicitly included
// so we must drop the last component for the prefix test against a name that is
// designed to be direct from ContentObject and so does not include digest explicitly
//count--;
}
return name().isPrefixOf(name, count);
}
/**
* Determine if this Interest's name is a prefix of the first "count" components of the input name
* @param name
* @param count
* @return true if our name is a prefix of the specified name's first "count" components
*/
public boolean isPrefixOf(ContentName name, int count) {
return name().isPrefixOf(name, count);
}
/**
* Determine if this Interest's name is a prefix of the specified ContentObject's name
* @param other
* @return true if our name is a prefix of the specified ContentObject's name
*/
public boolean isPrefixOf(ContentObject other) {
return name().isPrefixOf(other, name().count());
}
/**
* Thought about encoding and decoding as flat -- no wrapping
* declaration. But then couldn't use these solo.
*/
public void decode(XMLDecoder decoder) throws ContentDecodingException {
decoder.readStartElement(getElementLabel());
_name = new ContentName();
_name.decode(decoder);
if (decoder.peekStartElement(CCNProtocolDTags.MinSuffixComponents)) {
_minSuffixComponents = decoder.readIntegerElement(CCNProtocolDTags.MinSuffixComponents);
}
if (decoder.peekStartElement(CCNProtocolDTags.MaxSuffixComponents)) {
_maxSuffixComponents = decoder.readIntegerElement(CCNProtocolDTags.MaxSuffixComponents);
}
if (PublisherID.peek(decoder)) {
_publisher = new PublisherID();
_publisher.decode(decoder);
}
if (decoder.peekStartElement(CCNProtocolDTags.Exclude)) {
_exclude = new Exclude();
_exclude.decode(decoder);
}
if (decoder.peekStartElement(CCNProtocolDTags.ChildSelector)) {
_childSelector = decoder.readIntegerElement(CCNProtocolDTags.ChildSelector);
}
if (decoder.peekStartElement(CCNProtocolDTags.AnswerOriginKind)) {
// call setter to handle defaulting
_answerOriginKind = decoder.readIntegerElement(CCNProtocolDTags.AnswerOriginKind);
}
if (decoder.peekStartElement(CCNProtocolDTags.Scope)) {
_scope = decoder.readIntegerElement(CCNProtocolDTags.Scope);
}
if (decoder.peekStartElement(CCNProtocolDTags.InterestLifetime)) {
_interestLifetime = decoder.readBinaryElement(CCNProtocolDTags.InterestLifetime);
}
if (decoder.peekStartElement(CCNProtocolDTags.Nonce)) {
_nonce = decoder.readBinaryElement(CCNProtocolDTags.Nonce);
}
decoder.readEndElement();
}
public void encode(XMLEncoder encoder) throws ContentEncodingException {
if (!validate()) {
throw new ContentEncodingException("Cannot encode " + this.getClass().getName() + ": field values missing.");
}
encoder.writeStartElement(getElementLabel());
name().encode(encoder);
if (null != minSuffixComponents())
encoder.writeElement(CCNProtocolDTags.MinSuffixComponents, minSuffixComponents());
if (null != maxSuffixComponents())
encoder.writeElement(CCNProtocolDTags.MaxSuffixComponents, maxSuffixComponents());
if (null != publisherID())
publisherID().encode(encoder);
if (null != exclude())
exclude().encode(encoder);
if (null != childSelector())
encoder.writeElement(CCNProtocolDTags.ChildSelector, childSelector());
if (DEFAULT_ANSWER_ORIGIN_KIND != answerOriginKind())
encoder.writeElement(CCNProtocolDTags.AnswerOriginKind, answerOriginKind());
if (null != scope())
encoder.writeElement(CCNProtocolDTags.Scope, scope());
if (null != nonce())
encoder.writeElement(CCNProtocolDTags.Nonce, nonce());
encoder.writeEndElement();
}
@Override
public long getElementLabel() { return CCNProtocolDTags.Interest; }
@Override
public boolean validate() {
// DKS -- do we do recursive validation?
// null authenticator ok
return (null != name());
}
public int compareTo(Interest o) {
int result = DataUtils.compare(name(), o.name());
if (result != 0) return result;
result = DataUtils.compare(maxSuffixComponents(), o.maxSuffixComponents());
if (result != 0) return result;
result = DataUtils.compare(minSuffixComponents(), o.minSuffixComponents());
if (result != 0) return result;
result = DataUtils.compare(publisherID(), o.publisherID());
if (result != 0) return result;
result = DataUtils.compare(exclude(), o.exclude());
if (result != 0) return result;
result = DataUtils.compare(childSelector(), o.childSelector());
if (result != 0) return result;
result = DataUtils.compare(answerOriginKind(), o.answerOriginKind());
if (result != 0) return result;
result = DataUtils.compare(scope(), o.scope());
if (result != 0) return result;
return result;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime
* result
+ ((_maxSuffixComponents == null) ? 0 : _maxSuffixComponents
.hashCode());
result = prime
* result
+ ((_minSuffixComponents == null) ? 0 : _minSuffixComponents
.hashCode());
result = prime
* result
+ ((_answerOriginKind == null) ? 0 : _answerOriginKind
.hashCode());
result = prime * result
+ ((_exclude == null) ? 0 : _exclude.hashCode());
result = prime * result + ((_name == null) ? 0 : _name.hashCode());
result = prime
* result
+ ((_childSelector == null) ? 0 : _childSelector.hashCode());
result = prime * result
+ ((_publisher == null) ? 0 : _publisher.hashCode());
result = prime * result + ((_scope == null) ? 0 : _scope.hashCode());
result = prime * result + Arrays.hashCode(_interestLifetime);
result = prime * result + Arrays.hashCode(_nonce);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Interest other = (Interest) obj;
if (_maxSuffixComponents == null) {
if (other._maxSuffixComponents != null)
return false;
} else if (!_maxSuffixComponents.equals(other._maxSuffixComponents))
return false;
if (_minSuffixComponents == null) {
if (other._minSuffixComponents != null)
return false;
} else if (!_minSuffixComponents.equals(other._minSuffixComponents))
return false;
if (_answerOriginKind == null) {
if (other._answerOriginKind != null)
return false;
} else if (!_answerOriginKind.equals(other._answerOriginKind))
return false;
if (_exclude == null) {
if (other._exclude != null)
return false;
} else if (!_exclude.equals(other._exclude))
return false;
if (_name == null) {
if (other._name != null)
return false;
} else if (!_name.equals(other._name))
return false;
if (_childSelector == null) {
if (other._childSelector != null)
return false;
} else if (!_childSelector.equals(other._childSelector))
return false;
if (_publisher == null) {
if (other._publisher != null)
return false;
} else if (!_publisher.equals(other._publisher))
return false;
if (_scope == null) {
if (other._scope != null)
return false;
} else if (!_scope.equals(other._scope))
return false;
return true;
}
public String toString() {
StringBuffer sb = new StringBuffer(_name.toString());
sb.append(": ");
if (null != _maxSuffixComponents)
sb.append(" maxsc:" + _maxSuffixComponents);
if (null != _minSuffixComponents)
sb.append(" minsc:" + _minSuffixComponents);
if (null != _publisher)
sb.append(" p:" + _publisher);
if (null != _exclude)
sb.append(" ex("+_exclude+")");
return sb.toString();
}
public Interest clone() {
Interest clone = new Interest(name());
if (null != _maxSuffixComponents)
clone.maxSuffixComponents(maxSuffixComponents());
if (null != _minSuffixComponents)
clone.minSuffixComponents(minSuffixComponents());
if (null != _publisher)
clone.publisherID(publisherID());
if (null != _exclude)
clone.exclude(exclude());
if (null != _childSelector)
clone.childSelector(childSelector());
if (null != _answerOriginKind)
clone.answerOriginKind(answerOriginKind());
if (null != _interestLifetime)
clone.interestLifetime(interestLifetime());
if (null != _scope)
clone.scope(scope());
return clone;
}
@SuppressWarnings("serial")
public static class NoResponseException extends IOException {
protected Interest interest;
public NoResponseException(Interest i) {
super(i.toString());
interest = i;
}
public Interest getInterest() {
return interest;
}
}
public ContentName getContentName() {
return _name;
}
}
| |
/*
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Tomcat", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
* [Additional notices, if required by prior licensing conditions]
*
*/
package org.apache.naming.modules.java;
import java.util.Hashtable;
import javax.naming.Context;
import javax.naming.NamingException;
import org.apache.naming.core.ContextAccessController;
import org.apache.tomcat.util.res.StringManager;
// this can be a nice generic util that binds per thread or CL any object.
/**
* Handles the associations :
* <ul>
* <li>Catalina context name with the NamingContext</li>
* <li>Calling thread with the NamingContext</li>
* </ul>
*
* @author Remy Maucherat
*/
public class ContextBindings {
private static org.apache.commons.logging.Log log=
org.apache.commons.logging.LogFactory.getLog( ContextBindings.class );
// -------------------------------------------------------------- Variables
/**
* Bindings name - naming context. Keyed by name.
*/
private static Hashtable contextNameBindings = new Hashtable();
/**
* Bindings thread - naming context. Keyed by thread id.
*/
private static Hashtable threadBindings = new Hashtable();
/**
* Bindings thread - name. Keyed by thread id.
*/
private static Hashtable threadNameBindings = new Hashtable();
/**
* Bindings class loader - naming context. Keyed by CL id.
*/
private static Hashtable clBindings = new Hashtable();
/**
* Bindings class loader - name. Keyed by CL id.
*/
private static Hashtable clNameBindings = new Hashtable();
/**
* The string manager for this package.
*/
protected static StringManager sm =
StringManager.getManager("org.apache.naming");
// --------------------------------------------------------- Public Methods
/**
* Binds a context name.
*
* @param name Name of the context
* @param context Associated naming context instance
*/
public static void bindContext(Object name, Context context) {
bindContext(name, context, null);
}
/**
* Binds a context name.
*
* @param name Name of the context
* @param context Associated naming context instance
* @param token Security token
*/
public static void bindContext(Object name, Context context,
Object token) {
if (ContextAccessController.checkSecurityToken(name, token))
contextNameBindings.put(name, context);
}
/**
* Unbind context name.
*
* @param name Name of the context
*/
public static void unbindContext(Object name) {
unbindContext(name, null);
}
/**
* Unbind context name.
*
* @param name Name of the context
* @param token Security token
*/
public static void unbindContext(Object name, Object token) {
if (ContextAccessController.checkSecurityToken(name, token))
contextNameBindings.remove(name);
}
/**
* Retrieve a naming context.
*
* @param name Name of the context
*/
static Context getContext(Object name) {
return (Context) contextNameBindings.get(name);
}
/**
* Binds a naming context to a thread.
*
* @param name Name of the context
*/
public static void bindThread(Object name)
throws NamingException {
bindThread(name, null);
}
/**
* Binds a naming context to a thread.
*
* @param name Name of the context
* @param token Security token
*/
public static void bindThread(Object name, Object token)
throws NamingException {
// log.info( "BIND: " + name + " " + token );
if (ContextAccessController.checkSecurityToken(name, token)) {
Context context = (Context) contextNameBindings.get(name);
if (context == null)
throw new NamingException
(sm.getString("contextBindings.unknownContext", name));
threadBindings.put(Thread.currentThread(), context);
threadNameBindings.put(Thread.currentThread(), name);
}
}
/**
* Unbinds a naming context to a thread.
*
* @param name Name of the context
*/
public static void unbindThread(Object name) {
unbindThread(name, null);
}
/**
* Unbinds a naming context to a thread.
*
* @param name Name of the context
* @param token Security token
*/
public static void unbindThread(Object name, Object token) {
if (ContextAccessController.checkSecurityToken(name, token)) {
threadBindings.remove(Thread.currentThread());
threadNameBindings.remove(Thread.currentThread());
}
}
/**
* Retrieves the naming context bound to a thread.
*/
public static Context getThread()
throws NamingException {
Context context =
(Context) threadBindings.get(Thread.currentThread());
log.info( "Context=getThread: " + context );
if (context == null)
throw new NamingException
(sm.getString("contextBindings.noContextBoundToThread"));
return context;
}
/**
* Retrieves the naming context name bound to a thread.
*/
static Object getThreadName()
throws NamingException {
Object name = threadNameBindings.get(Thread.currentThread());
if (name == null)
throw new NamingException
(sm.getString("contextBindings.noContextBoundToThread"));
return name;
}
/**
* Tests if current thread is bound to a context.
*/
public static boolean isThreadBound() {
return (threadBindings.containsKey(Thread.currentThread()));
}
/**
* Binds a naming context to a class loader.
*
* @param name Name of the context
*/
public static void bindClassLoader(Object name)
throws NamingException {
bindClassLoader(name, null);
}
/**
* Binds a naming context to a thread.
*
* @param name Name of the context
* @param token Security token
*/
public static void bindClassLoader(Object name, Object token)
throws NamingException {
bindClassLoader
(name, token, Thread.currentThread().getContextClassLoader());
}
/**
* Binds a naming context to a thread.
*
* @param name Name of the context
* @param token Security token
*/
public static void bindClassLoader(Object name, Object token,
ClassLoader classLoader)
throws NamingException {
if (ContextAccessController.checkSecurityToken(name, token)) {
Context context = (Context) contextNameBindings.get(name);
if (context == null)
throw new NamingException
(sm.getString("contextBindings.unknownContext", name));
clBindings.put(classLoader, context);
clNameBindings.put(classLoader, name);
}
}
/**
* Unbinds a naming context to a class loader.
*
* @param name Name of the context
*/
public static void unbindClassLoader(Object name) {
unbindClassLoader(name, null);
}
/**
* Unbinds a naming context to a class loader.
*
* @param name Name of the context
* @param token Security token
*/
public static void unbindClassLoader(Object name, Object token) {
unbindClassLoader(name, token,
Thread.currentThread().getContextClassLoader());
}
/**
* Unbinds a naming context to a class loader.
*
* @param name Name of the context
* @param token Security token
*/
public static void unbindClassLoader(Object name, Object token,
ClassLoader classLoader) {
if (ContextAccessController.checkSecurityToken(name, token)) {
Object n = clNameBindings.get(classLoader);
if (!(n.equals(name))) {
return;
}
clBindings.remove(classLoader);
clNameBindings.remove(classLoader);
}
}
/**
* Retrieves the naming context bound to a class loader.
*/
public static Context getClassLoader()
throws NamingException {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
Context context = null;
do {
context = (Context) clBindings.get(cl);
log.info( "Context=getClassLoader: " + context + " " + cl );
if (context != null) {
return context;
}
} while ((cl = cl.getParent()) != null);
throw new NamingException
(sm.getString("contextBindings.noContextBoundToCL"));
}
/**
* Retrieves the naming context name bound to a class loader.
*/
static Object getClassLoaderName()
throws NamingException {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
Object name = null;
do {
name = clNameBindings.get(cl);
if (name != null) {
return name;
}
} while ((cl = cl.getParent()) != null);
throw new NamingException
(sm.getString("contextBindings.noContextBoundToCL"));
}
/**
* Tests if current class loader is bound to a context.
*/
public static boolean isClassLoaderBound() {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
do {
if (clBindings.containsKey(cl)) {
return true;
}
} while ((cl = cl.getParent()) != null);
return false;
}
}
| |
/*
* Copyright (c) 2019, Livio, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of the Livio Inc. nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.smartdevicelink.managers.lifecycle;
import com.smartdevicelink.managers.ISdl;
import com.smartdevicelink.managers.ManagerUtility;
import com.smartdevicelink.protocol.enums.FunctionID;
import com.smartdevicelink.proxy.RPCMessage;
import com.smartdevicelink.proxy.RPCResponse;
import com.smartdevicelink.proxy.rpc.AppServiceCapability;
import com.smartdevicelink.proxy.rpc.AppServicesCapabilities;
import com.smartdevicelink.proxy.rpc.ButtonCapabilities;
import com.smartdevicelink.proxy.rpc.DisplayCapabilities;
import com.smartdevicelink.proxy.rpc.DisplayCapability;
import com.smartdevicelink.proxy.rpc.GetSystemCapability;
import com.smartdevicelink.proxy.rpc.GetSystemCapabilityResponse;
import com.smartdevicelink.proxy.rpc.HMICapabilities;
import com.smartdevicelink.proxy.rpc.OnHMIStatus;
import com.smartdevicelink.proxy.rpc.OnSystemCapabilityUpdated;
import com.smartdevicelink.proxy.rpc.RegisterAppInterfaceResponse;
import com.smartdevicelink.proxy.rpc.SdlMsgVersion;
import com.smartdevicelink.proxy.rpc.SetDisplayLayoutResponse;
import com.smartdevicelink.proxy.rpc.SoftButtonCapabilities;
import com.smartdevicelink.proxy.rpc.SystemCapability;
import com.smartdevicelink.proxy.rpc.WindowCapability;
import com.smartdevicelink.proxy.rpc.WindowTypeCapabilities;
import com.smartdevicelink.proxy.rpc.enums.DisplayType;
import com.smartdevicelink.proxy.rpc.enums.HMILevel;
import com.smartdevicelink.proxy.rpc.enums.ImageType;
import com.smartdevicelink.proxy.rpc.enums.MediaClockFormat;
import com.smartdevicelink.proxy.rpc.enums.PredefinedWindows;
import com.smartdevicelink.proxy.rpc.enums.SystemCapabilityType;
import com.smartdevicelink.proxy.rpc.enums.WindowType;
import com.smartdevicelink.proxy.rpc.listeners.OnRPCListener;
import com.smartdevicelink.proxy.rpc.listeners.OnRPCResponseListener;
import com.smartdevicelink.util.CorrelationIdGenerator;
import com.smartdevicelink.util.DebugTool;
import com.smartdevicelink.util.Version;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.ListIterator;
import java.util.concurrent.CopyOnWriteArrayList;
abstract class BaseSystemCapabilityManager {
private static final String TAG = "SystemCapabilityManager";
private final HashMap<SystemCapabilityType, Object> cachedSystemCapabilities;
private final HashMap<SystemCapabilityType, Boolean> systemCapabilitiesSubscriptionStatus;
private final HashMap<SystemCapabilityType, CopyOnWriteArrayList<OnSystemCapabilityListener>> onSystemCapabilityListeners;
private final Object LISTENER_LOCK;
private final ISdl callback;
private boolean shouldConvertDeprecatedDisplayCapabilities;
private HMILevel currentHMILevel;
BaseSystemCapabilityManager(ISdl callback) {
this.callback = callback;
this.LISTENER_LOCK = new Object();
this.onSystemCapabilityListeners = new HashMap<>();
this.cachedSystemCapabilities = new HashMap<>();
this.systemCapabilitiesSubscriptionStatus = new HashMap<>();
this.systemCapabilitiesSubscriptionStatus.put(SystemCapabilityType.DISPLAYS, true);
this.shouldConvertDeprecatedDisplayCapabilities = true;
this.currentHMILevel = HMILevel.HMI_NONE;
setupRpcListeners();
}
private List<DisplayCapability> createDisplayCapabilityList(RegisterAppInterfaceResponse rpc) {
return createDisplayCapabilityList(rpc.getDisplayCapabilities(), rpc.getButtonCapabilities(), rpc.getSoftButtonCapabilities());
}
private List<DisplayCapability> createDisplayCapabilityList(SetDisplayLayoutResponse rpc) {
return createDisplayCapabilityList(rpc.getDisplayCapabilities(), rpc.getButtonCapabilities(), rpc.getSoftButtonCapabilities());
}
private List<DisplayCapability> createDisplayCapabilityList(DisplayCapabilities display, List<ButtonCapabilities> button, List<SoftButtonCapabilities> softButton) {
// Based on deprecated Display capabilities we don't know if widgets are supported,
// The Default MAIN window is the only window we know is supported
WindowTypeCapabilities windowTypeCapabilities = new WindowTypeCapabilities(WindowType.MAIN, 1);
DisplayCapability displayCapability = new DisplayCapability();
if (display != null) {
if (display.getDisplayName() != null) {
displayCapability.setDisplayName(display.getDisplayName());
} else if (display.getDisplayType() != null) {
displayCapability.setDisplayName(display.getDisplayType().toString());
}
}
displayCapability.setWindowTypeSupported(Collections.singletonList(windowTypeCapabilities));
// Create a window capability object for the default MAIN window
WindowCapability defaultWindowCapability = new WindowCapability();
defaultWindowCapability.setWindowID(PredefinedWindows.DEFAULT_WINDOW.getValue());
defaultWindowCapability.setButtonCapabilities(button);
defaultWindowCapability.setSoftButtonCapabilities(softButton);
// return if display capabilities don't exist.
if (display == null) {
defaultWindowCapability.setTextFields(ManagerUtility.WindowCapabilityUtility.getAllTextFields());
defaultWindowCapability.setImageFields(ManagerUtility.WindowCapabilityUtility.getAllImageFields());
displayCapability.setWindowCapabilities(Collections.singletonList(defaultWindowCapability));
return Collections.singletonList(displayCapability);
}
// copy all available display capabilities
defaultWindowCapability.setTemplatesAvailable(display.getTemplatesAvailable());
defaultWindowCapability.setNumCustomPresetsAvailable(display.getNumCustomPresetsAvailable());
defaultWindowCapability.setTextFields(display.getTextFields());
defaultWindowCapability.setImageFields(display.getImageFields());
ArrayList<ImageType> imageTypeSupported = new ArrayList<>();
imageTypeSupported.add(ImageType.STATIC); // static images expected to always work on any head unit
if (display.getGraphicSupported()) {
imageTypeSupported.add(ImageType.DYNAMIC);
}
defaultWindowCapability.setImageTypeSupported(imageTypeSupported);
displayCapability.setWindowCapabilities(Collections.singletonList(defaultWindowCapability));
return Collections.singletonList(displayCapability);
}
private DisplayCapabilities createDeprecatedDisplayCapabilities(String displayName, WindowCapability defaultMainWindow) {
DisplayCapabilities convertedCapabilities = new DisplayCapabilities();
convertedCapabilities.setDisplayType(DisplayType.SDL_GENERIC); //deprecated but it is mandatory...
convertedCapabilities.setDisplayName(displayName);
convertedCapabilities.setTextFields(defaultMainWindow.getTextFields());
convertedCapabilities.setImageFields(defaultMainWindow.getImageFields());
convertedCapabilities.setTemplatesAvailable(defaultMainWindow.getTemplatesAvailable());
convertedCapabilities.setNumCustomPresetsAvailable(defaultMainWindow.getNumCustomPresetsAvailable());
convertedCapabilities.setMediaClockFormats(new ArrayList<MediaClockFormat>()); // mandatory field but allows empty array
// if there are imageTypes in the response, we must assume graphics are supported
convertedCapabilities.setGraphicSupported(defaultMainWindow.getImageTypeSupported() != null && defaultMainWindow.getImageTypeSupported().size() > 0);
return convertedCapabilities;
}
private void updateDeprecatedDisplayCapabilities() {
WindowCapability defaultMainWindowCapabilities = getDefaultMainWindowCapability();
List<DisplayCapability> displayCapabilityList = convertToList(getCapability(SystemCapabilityType.DISPLAYS, null, false), DisplayCapability.class);
if (defaultMainWindowCapabilities == null || displayCapabilityList == null || displayCapabilityList.size() == 0) {
return;
}
// cover the deprecated capabilities for backward compatibility
setCapability(SystemCapabilityType.DISPLAY, createDeprecatedDisplayCapabilities(displayCapabilityList.get(0).getDisplayName(), defaultMainWindowCapabilities));
setCapability(SystemCapabilityType.BUTTON, defaultMainWindowCapabilities.getButtonCapabilities());
setCapability(SystemCapabilityType.SOFTBUTTON, defaultMainWindowCapabilities.getSoftButtonCapabilities());
}
private void updateCachedDisplayCapabilityList(List<DisplayCapability> newCapabilities) {
if (newCapabilities == null || newCapabilities.size() == 0) {
DebugTool.logWarning(TAG, "Received invalid display capability list");
return;
}
List<DisplayCapability> oldCapabilities = convertToList(getCapability(SystemCapabilityType.DISPLAYS, null, false), DisplayCapability.class);
if (oldCapabilities == null || oldCapabilities.size() == 0) {
setCapability(SystemCapabilityType.DISPLAYS, newCapabilities);
updateDeprecatedDisplayCapabilities();
return;
}
DisplayCapability oldDefaultDisplayCapabilities = oldCapabilities.get(0);
ArrayList<WindowCapability> copyWindowCapabilities = new ArrayList<>(oldDefaultDisplayCapabilities.getWindowCapabilities());
DisplayCapability newDefaultDisplayCapabilities = newCapabilities.get(0);
List<WindowCapability> newWindowCapabilities = newDefaultDisplayCapabilities.getWindowCapabilities();
if (newWindowCapabilities != null && !newWindowCapabilities.isEmpty()) {
for (WindowCapability newWindow : newWindowCapabilities) {
ListIterator<WindowCapability> iterator = copyWindowCapabilities.listIterator();
boolean oldFound = false;
while (iterator.hasNext()) {
WindowCapability oldWindow = iterator.next();
int newWindowID = newWindow.getWindowID() != null ? newWindow.getWindowID() : PredefinedWindows.DEFAULT_WINDOW.getValue();
int oldWindowID = oldWindow.getWindowID() != null ? oldWindow.getWindowID() : PredefinedWindows.DEFAULT_WINDOW.getValue();
if (newWindowID == oldWindowID) {
iterator.set(newWindow); // replace the old window caps with new ones
oldFound = true;
break;
}
}
if (!oldFound) {
copyWindowCapabilities.add(newWindow); // this is a new unknown window
}
}
}
// replace the window capabilities array with the merged one.
newDefaultDisplayCapabilities.setWindowCapabilities(copyWindowCapabilities);
setCapability(SystemCapabilityType.DISPLAYS, Collections.singletonList(newDefaultDisplayCapabilities));
updateDeprecatedDisplayCapabilities();
}
public WindowCapability getWindowCapability(int windowID) {
List<DisplayCapability> capabilities = convertToList(getCapability(SystemCapabilityType.DISPLAYS, null, false), DisplayCapability.class);
if (capabilities == null || capabilities.size() == 0) {
return null;
}
DisplayCapability display = capabilities.get(0);
for (WindowCapability windowCapability : display.getWindowCapabilities()) {
int currentWindowID = windowCapability.getWindowID() != null ? windowCapability.getWindowID() : PredefinedWindows.DEFAULT_WINDOW.getValue();
if (currentWindowID == windowID) {
return windowCapability;
}
}
return null;
}
public WindowCapability getDefaultMainWindowCapability() {
return getWindowCapability(PredefinedWindows.DEFAULT_WINDOW.getValue());
}
void parseRAIResponse(RegisterAppInterfaceResponse response) {
if (response != null && response.getSuccess()) {
this.shouldConvertDeprecatedDisplayCapabilities = true; // reset the flag
setCapability(SystemCapabilityType.DISPLAYS, createDisplayCapabilityList(response));
setCapability(SystemCapabilityType.HMI, response.getHmiCapabilities());
setCapability(SystemCapabilityType.DISPLAY, response.getDisplayCapabilities());
setCapability(SystemCapabilityType.AUDIO_PASSTHROUGH, response.getAudioPassThruCapabilities());
setCapability(SystemCapabilityType.PCM_STREAMING, response.getPcmStreamingCapabilities());
setCapability(SystemCapabilityType.BUTTON, response.getButtonCapabilities());
setCapability(SystemCapabilityType.HMI_ZONE, response.getHmiZoneCapabilities());
setCapability(SystemCapabilityType.PRESET_BANK, response.getPresetBankCapabilities());
setCapability(SystemCapabilityType.SOFTBUTTON, response.getSoftButtonCapabilities());
setCapability(SystemCapabilityType.SPEECH, response.getSpeechCapabilities());
setCapability(SystemCapabilityType.VOICE_RECOGNITION, response.getVrCapabilities());
setCapability(SystemCapabilityType.PRERECORDED_SPEECH, response.getPrerecordedSpeech());
}
}
private void setupRpcListeners() {
OnRPCListener rpcListener = new OnRPCListener() {
@Override
public void onReceived(RPCMessage message) {
if (message != null) {
if (RPCMessage.KEY_RESPONSE.equals(message.getMessageType())) {
switch (message.getFunctionID()) {
case SET_DISPLAY_LAYOUT:
SetDisplayLayoutResponse response = (SetDisplayLayoutResponse) message;
// If a setDisplayLayout fails, Capabilities did not change
if (!response.getSuccess()) {
return;
}
setCapability(SystemCapabilityType.DISPLAY, response.getDisplayCapabilities());
setCapability(SystemCapabilityType.BUTTON, response.getButtonCapabilities());
setCapability(SystemCapabilityType.PRESET_BANK, response.getPresetBankCapabilities());
setCapability(SystemCapabilityType.SOFTBUTTON, response.getSoftButtonCapabilities());
if (shouldConvertDeprecatedDisplayCapabilities) {
setCapability(SystemCapabilityType.DISPLAYS, createDisplayCapabilityList(response));
}
break;
case GET_SYSTEM_CAPABILITY:
GetSystemCapabilityResponse systemCapabilityResponse = (GetSystemCapabilityResponse) message;
SystemCapability systemCapability = systemCapabilityResponse.getSystemCapability();
if (systemCapabilityResponse.getSuccess() && SystemCapabilityType.DISPLAYS.equals(systemCapability.getSystemCapabilityType())) {
shouldConvertDeprecatedDisplayCapabilities = false; // Successfully got DISPLAYS data. No conversion needed anymore
List<DisplayCapability> newCapabilities = (List<DisplayCapability>) systemCapability.getCapabilityForType(SystemCapabilityType.DISPLAYS);
updateCachedDisplayCapabilityList(newCapabilities);
}
break;
}
} else if (RPCMessage.KEY_NOTIFICATION.equals(message.getMessageType())) {
switch (message.getFunctionID()) {
case ON_HMI_STATUS:
OnHMIStatus onHMIStatus = (OnHMIStatus) message;
if (onHMIStatus.getWindowID() != null && onHMIStatus.getWindowID() != PredefinedWindows.DEFAULT_WINDOW.getValue()) {
return;
}
currentHMILevel = onHMIStatus.getHmiLevel();
break;
case ON_SYSTEM_CAPABILITY_UPDATED:
OnSystemCapabilityUpdated onSystemCapabilityUpdated = (OnSystemCapabilityUpdated) message;
if (onSystemCapabilityUpdated.getSystemCapability() != null) {
SystemCapability systemCapability = onSystemCapabilityUpdated.getSystemCapability();
SystemCapabilityType systemCapabilityType = systemCapability.getSystemCapabilityType();
Object capability = systemCapability.getCapabilityForType(systemCapabilityType);
if (cachedSystemCapabilities.containsKey(systemCapabilityType)) { //The capability already exists
switch (systemCapabilityType) {
case APP_SERVICES:
// App services only updates what was changed so we need
// to update the capability rather than override it
AppServicesCapabilities appServicesCapabilities = (AppServicesCapabilities) capability;
if (capability != null) {
List<AppServiceCapability> appServicesCapabilitiesList = appServicesCapabilities.getAppServices();
AppServicesCapabilities cachedAppServicesCapabilities = (AppServicesCapabilities) cachedSystemCapabilities.get(systemCapabilityType);
//Update the cached app services
if (cachedAppServicesCapabilities != null) {
cachedAppServicesCapabilities.updateAppServices(appServicesCapabilitiesList);
}
//Set the new capability object to the updated cached capabilities
capability = cachedAppServicesCapabilities;
}
break;
case DISPLAYS:
shouldConvertDeprecatedDisplayCapabilities = false; // Successfully got DISPLAYS data. No conversion needed anymore
// this notification can return only affected windows (hence not all windows)
List<DisplayCapability> newCapabilities = (List<DisplayCapability>) capability;
updateCachedDisplayCapabilityList(newCapabilities);
}
}
if (capability != null) {
setCapability(systemCapabilityType, capability);
}
}
}
}
}
}
};
if (callback != null) {
callback.addOnRPCListener(FunctionID.GET_SYSTEM_CAPABILITY, rpcListener);
callback.addOnRPCListener(FunctionID.SET_DISPLAY_LAYOUT, rpcListener);
callback.addOnRPCListener(FunctionID.ON_SYSTEM_CAPABILITY_UPDATED, rpcListener);
callback.addOnRPCListener(FunctionID.ON_HMI_STATUS, rpcListener);
}
}
/**
* Sets a capability in the cached map. This should only be done when an RPC is received and contains updates to the capability
* that is being cached in the SystemCapabilityManager.
*
* @param systemCapabilityType the system capability type that will be set
* @param capability the value of the capability that will be set
*/
synchronized void setCapability(SystemCapabilityType systemCapabilityType, Object capability) {
cachedSystemCapabilities.put(systemCapabilityType, capability);
notifyListeners(systemCapabilityType, capability);
}
/**
* Notifies listeners in the list about the new retrieved capability
*
* @param systemCapabilityType the system capability type that was retrieved
* @param capability the system capability value that was retrieved
*/
private void notifyListeners(SystemCapabilityType systemCapabilityType, Object capability) {
synchronized (LISTENER_LOCK) {
CopyOnWriteArrayList<OnSystemCapabilityListener> listeners = onSystemCapabilityListeners.get(systemCapabilityType);
if (listeners != null && listeners.size() > 0) {
for (OnSystemCapabilityListener listener : listeners) {
listener.onCapabilityRetrieved(capability);
}
}
}
}
/**
* Ability to see if the connected module supports the given capability. Useful to check before
* attempting to query for capabilities that require asynchronous calls to initialize.
*
* @param type the SystemCapabilityType that is to be checked
* @return if that capability is supported with the current, connected module
*/
public boolean isCapabilitySupported(SystemCapabilityType type) {
if (cachedSystemCapabilities.get(type) != null) {
//The capability exists in the map and is not null
return true;
} else if (cachedSystemCapabilities.containsKey(SystemCapabilityType.HMI)) {
HMICapabilities hmiCapabilities = ((HMICapabilities) cachedSystemCapabilities.get(SystemCapabilityType.HMI));
Version rpcVersion = null;
if (callback != null) {
SdlMsgVersion version = callback.getSdlMsgVersion();
if (version != null) {
rpcVersion = new Version(version.getMajorVersion(), version.getMinorVersion(), version.getPatchVersion());
}
}
if (hmiCapabilities != null) {
switch (type) {
case NAVIGATION:
return hmiCapabilities.isNavigationAvailable();
case PHONE_CALL:
return hmiCapabilities.isPhoneCallAvailable();
case VIDEO_STREAMING:
if (rpcVersion != null) {
if (rpcVersion.isBetween(new Version(3, 0, 0), new Version(4, 4, 0)) >= 0) {
//This was before the system capability feature was added so check if
// graphics are supported instead
DisplayCapabilities displayCapabilities = (DisplayCapabilities) getCapability(SystemCapabilityType.DISPLAY, null, false);
if (displayCapabilities != null) {
return displayCapabilities.getGraphicSupported() != null && displayCapabilities.getGraphicSupported();
}
}
}
return hmiCapabilities.isVideoStreamingAvailable();
case REMOTE_CONTROL:
return hmiCapabilities.isRemoteControlAvailable();
case APP_SERVICES:
if (rpcVersion != null) {
if (rpcVersion.getMajor() == 5 && rpcVersion.getMinor() == 1) {
//This is a corner case that the param was not available in 5.1.0, but
//the app services feature was available.
return true;
}
}
return hmiCapabilities.isAppServicesAvailable();
case DISPLAYS:
return hmiCapabilities.isDisplaysCapabilityAvailable();
case SEAT_LOCATION:
return hmiCapabilities.isSeatLocationAvailable();
case DRIVER_DISTRACTION:
return hmiCapabilities.isDriverDistractionAvailable();
default:
return false;
}
}
}
return false;
}
/**
* Checks is subscriptions are available on the connected head unit.
*
* @return True if subscriptions are supported. False if not.
*/
public boolean supportsSubscriptions() {
if (callback != null && callback.getSdlMsgVersion() != null) {
Version onSystemCapabilityNotificationRPCVersion = new Version(5, 1, 0);
Version headUnitRPCVersion = new Version(callback.getSdlMsgVersion());
return headUnitRPCVersion.isNewerThan(onSystemCapabilityNotificationRPCVersion) >= 0;
}
return false;
}
/**
* Checks if the supplied capability type is currently subscribed for or not
*
* @param systemCapabilityType type of capability desired
* @return true if subscribed and false if not
*/
private boolean isSubscribedToSystemCapability(SystemCapabilityType systemCapabilityType) {
return Boolean.TRUE.equals(systemCapabilitiesSubscriptionStatus.get(systemCapabilityType));
}
/**
* Gets the capability object that corresponds to the supplied capability type by returning the currently cached value immediately (or null) as well as calling the listener immediately with the cached value, if available. If not available, the listener will retrieve a new value and return that when the head unit responds.
* <strong>If capability is not cached, the method will return null and trigger the supplied listener when the capability becomes available</strong>
*
* @param systemCapabilityType type of capability desired
* @param scListener callback to execute upon retrieving capability
* @param subscribe flag to subscribe to updates of the supplied capability type. True means subscribe; false means cancel subscription; null means don't change current subscription status.
* @param forceUpdate flag to force getting a new fresh copy of the capability from the head unit even if it is cached
* @return desired capability if it is cached in the manager, otherwise returns a null object and works in the background to retrieve the capability for the next call
*/
private Object getCapabilityPrivate(final SystemCapabilityType systemCapabilityType, final OnSystemCapabilityListener scListener, final Boolean subscribe, final boolean forceUpdate) {
Object cachedCapability = cachedSystemCapabilities.get(systemCapabilityType);
// No need to force update if the app is subscribed to that type because updated values will be received via notifications anyway
boolean shouldForceUpdate = forceUpdate && !isSubscribedToSystemCapability(systemCapabilityType);
boolean shouldUpdateSystemCapabilitySubscription = (subscribe != null) && (subscribe != isSubscribedToSystemCapability(systemCapabilityType)) && supportsSubscriptions();
boolean shouldSendGetCapabilityRequest = shouldForceUpdate || (cachedCapability == null) || shouldUpdateSystemCapabilitySubscription;
boolean shouldCallListenerWithCachedValue = (cachedCapability != null) && (scListener != null) && !shouldSendGetCapabilityRequest;
if (shouldCallListenerWithCachedValue) {
scListener.onCapabilityRetrieved(cachedCapability);
}
if (shouldSendGetCapabilityRequest) {
retrieveCapability(systemCapabilityType, scListener, subscribe);
}
return cachedCapability;
}
/**
* Gets the capability object that corresponds to the supplied capability type by returning the currently cached value immediately (or null) as well as calling the listener immediately with the cached value, if available. If not available, the listener will retrieve a new value and return that when the head unit responds.
* <strong>If capability is not cached, the method will return null and trigger the supplied listener when the capability becomes available</strong>
*
* @param systemCapabilityType type of capability desired
* @param scListener callback to execute upon retrieving capability
* @param forceUpdate flag to force getting a new fresh copy of the capability from the head unit even if it is cached
* @return desired capability if it is cached in the manager, otherwise returns a null object
*/
public Object getCapability(final SystemCapabilityType systemCapabilityType, final OnSystemCapabilityListener scListener, final boolean forceUpdate) {
return getCapabilityPrivate(systemCapabilityType, scListener, null, forceUpdate);
}
/**
* Adds a listener to be called whenever a new capability is retrieved. This method automatically subscribes to the supplied capability type and may call the listener multiple times if there are future updates, unlike getCapability() methods, which only call the listener one time.
*
* @param systemCapabilityType Type of capability desired
* @param listener callback to execute upon retrieving capability
*/
public void addOnSystemCapabilityListener(final SystemCapabilityType systemCapabilityType, final OnSystemCapabilityListener listener) {
synchronized (LISTENER_LOCK) {
if (systemCapabilityType != null && listener != null) {
if (onSystemCapabilityListeners.get(systemCapabilityType) == null) {
onSystemCapabilityListeners.put(systemCapabilityType, new CopyOnWriteArrayList<OnSystemCapabilityListener>());
}
onSystemCapabilityListeners.get(systemCapabilityType).add(listener);
}
}
getCapabilityPrivate(systemCapabilityType, listener, true, false);
}
/**
* Removes an OnSystemCapabilityListener that was previously added
*
* @param systemCapabilityType Type of capability
* @param listener the listener that should be removed
* @return boolean that represents whether the removal was successful or not
*/
public boolean removeOnSystemCapabilityListener(final SystemCapabilityType systemCapabilityType, final OnSystemCapabilityListener listener) {
boolean success = false;
synchronized (LISTENER_LOCK) {
if (onSystemCapabilityListeners != null
&& systemCapabilityType != null
&& listener != null
&& onSystemCapabilityListeners.get(systemCapabilityType) != null) {
success = onSystemCapabilityListeners.get(systemCapabilityType).remove(listener);
// If the last listener for the supplied capability type is removed, unsubscribe from the capability type
if (success && onSystemCapabilityListeners.get(systemCapabilityType).isEmpty() && isSubscribedToSystemCapability(systemCapabilityType) && systemCapabilityType != SystemCapabilityType.DISPLAYS) {
retrieveCapability(systemCapabilityType, null, false);
}
}
}
return success;
}
/**
* Sends a GetSystemCapability request for the supplied SystemCapabilityType and call the listener's callback if the systemCapabilityType is queryable
*
* @param systemCapabilityType Type of capability desired
* @param subscribe flag to subscribe to updates of the supplied capability type. True means subscribe; false means cancel subscription; null means don't change current subscription status.
*/
private void retrieveCapability(final SystemCapabilityType systemCapabilityType, final OnSystemCapabilityListener scListener, final Boolean subscribe) {
if (currentHMILevel != null && currentHMILevel.equals(HMILevel.HMI_NONE)) {
String message = String.format("Attempted to update type: %s in HMI level NONE, which is not allowed. " +
"Please wait until you are in HMI BACKGROUND, LIMITED, or FULL before attempting to update any SystemCapabilityType", systemCapabilityType);
DebugTool.logError(TAG, message);
if (scListener != null) {
scListener.onError(message);
}
return;
}
if (!systemCapabilityType.isQueryable() || systemCapabilityType == SystemCapabilityType.DISPLAYS) {
String message = "This systemCapabilityType cannot be queried for";
DebugTool.logError(TAG, message);
if (scListener != null) {
scListener.onError(message);
}
return;
}
final GetSystemCapability request = new GetSystemCapability();
request.setSystemCapabilityType(systemCapabilityType);
/*
The subscription flag in the request should be set based on multiple variables:
- if subscribe is null (no change), willSubscribe = current subscription status, or false if the HU does not support subscriptions
- if subscribe is false, then willSubscribe = false
- if subscribe is true and the HU supports subscriptions, then willSubscribe = true
*/
boolean shouldSubscribe = (subscribe != null) ? subscribe : isSubscribedToSystemCapability(systemCapabilityType);
final boolean willSubscribe = shouldSubscribe && supportsSubscriptions();
request.setSubscribe(willSubscribe);
request.setOnRPCResponseListener(new OnRPCResponseListener() {
@Override
public void onResponse(int correlationId, RPCResponse response) {
if (response.getSuccess()) {
Object retrievedCapability = ((GetSystemCapabilityResponse) response).getSystemCapability().getCapabilityForType(systemCapabilityType);
setCapability(systemCapabilityType, retrievedCapability);
// If the listener is not included in the onSystemCapabilityListeners map, then notify it
// This will be triggered if we are just getting capability without adding a listener to the map
if (scListener != null) {
synchronized (LISTENER_LOCK) {
CopyOnWriteArrayList<OnSystemCapabilityListener> notifiedListeners = onSystemCapabilityListeners.get(systemCapabilityType);
boolean listenerAlreadyNotified = (notifiedListeners != null) && notifiedListeners.contains(scListener);
if (!listenerAlreadyNotified) {
scListener.onCapabilityRetrieved(retrievedCapability);
}
}
}
if (supportsSubscriptions()) {
systemCapabilitiesSubscriptionStatus.put(systemCapabilityType, willSubscribe);
}
} else {
if (scListener != null) {
scListener.onError(response.getInfo());
}
}
}
});
request.setCorrelationID(CorrelationIdGenerator.generateId());
if (callback != null) {
callback.sendRPC(request);
}
}
/**
* Converts a capability object into a list.
*
* @param object the capability that needs to be converted
* @param classType The class type of that should be contained in the list
* @return a List of capabilities if object is instance of List, otherwise it will return null.
*/
@SuppressWarnings({"unchecked"})
public static <T> List<T> convertToList(Object object, Class<T> classType) {
if (classType != null && object != null && object instanceof List) {
List list = (List) object;
if (!list.isEmpty()) {
if (classType.isInstance(list.get(0))) {
return (List<T>) object;
} else {
//The list is not of the correct list type
return null;
}
} else {
//We return a new list of type T instead of null because while we don't know if
//the original list was of type T we want to ensure that we don't throw a cast class exception
//but still
return new ArrayList<>();
}
} else {
return null;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.auth;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.io.IOException;
import java.lang.reflect.Field;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
import java.util.function.Supplier;
import org.apache.bookkeeper.client.BKException;
import org.apache.bookkeeper.client.BookKeeper;
import org.apache.bookkeeper.client.PulsarMockBookKeeper;
import org.apache.bookkeeper.test.PortManager;
import org.apache.bookkeeper.util.ZkUtils;
import org.apache.pulsar.broker.BookKeeperClientFactory;
import org.apache.pulsar.broker.PulsarService;
import org.apache.pulsar.broker.ServiceConfiguration;
import org.apache.pulsar.broker.namespace.NamespaceService;
import org.apache.pulsar.client.admin.PulsarAdmin;
import org.apache.pulsar.client.api.PulsarClient;
import org.apache.pulsar.client.api.PulsarClientException;
import org.apache.pulsar.compaction.Compactor;
import org.apache.pulsar.zookeeper.ZooKeeperClientFactory;
import org.apache.pulsar.zookeeper.ZookeeperClientFactoryImpl;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.MockZooKeeper;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.ACL;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Base class for all tests that need a Pulsar instance without a ZK and BK cluster
*/
public abstract class MockedPulsarServiceBaseTest {
protected ServiceConfiguration conf;
protected PulsarService pulsar;
protected PulsarAdmin admin;
protected PulsarClient pulsarClient;
protected URL brokerUrl;
protected URL brokerUrlTls;
protected URI lookupUrl;
protected final int BROKER_WEBSERVICE_PORT = PortManager.nextFreePort();
protected final int BROKER_WEBSERVICE_PORT_TLS = PortManager.nextFreePort();
protected final int BROKER_PORT = PortManager.nextFreePort();
protected final int BROKER_PORT_TLS = PortManager.nextFreePort();
protected MockZooKeeper mockZookKeeper;
protected NonClosableMockBookKeeper mockBookKeeper;
protected boolean isTcpLookup = false;
protected final String configClusterName = "test";
private SameThreadOrderedSafeExecutor sameThreadOrderedSafeExecutor;
private ExecutorService bkExecutor;
public MockedPulsarServiceBaseTest() {
resetConfig();
}
protected void resetConfig() {
this.conf = new ServiceConfiguration();
this.conf.setBrokerServicePort(BROKER_PORT);
this.conf.setAdvertisedAddress("localhost");
this.conf.setWebServicePort(BROKER_WEBSERVICE_PORT);
this.conf.setClusterName(configClusterName);
this.conf.setAdvertisedAddress("localhost"); // there are TLS tests in here, they need to use localhost because of the certificate
this.conf.setManagedLedgerCacheSizeMB(8);
this.conf.setActiveConsumerFailoverDelayTimeMillis(0);
this.conf.setDefaultNumberOfNamespaceBundles(1);
this.conf.setZookeeperServers("localhost:2181");
this.conf.setConfigurationStoreServers("localhost:3181");
}
protected final void internalSetup() throws Exception {
init();
lookupUrl = new URI(brokerUrl.toString());
if (isTcpLookup) {
lookupUrl = new URI("pulsar://localhost:" + BROKER_PORT);
}
pulsarClient = newPulsarClient(lookupUrl.toString(), 0);
}
protected PulsarClient newPulsarClient(String url, int intervalInSecs) throws PulsarClientException {
return PulsarClient.builder().serviceUrl(url).statsInterval(intervalInSecs, TimeUnit.SECONDS).build();
}
protected final void internalSetupForStatsTest() throws Exception {
init();
String lookupUrl = brokerUrl.toString();
if (isTcpLookup) {
lookupUrl = new URI("pulsar://localhost:" + BROKER_PORT).toString();
}
pulsarClient = newPulsarClient(lookupUrl, 1);
}
protected final void init() throws Exception {
sameThreadOrderedSafeExecutor = new SameThreadOrderedSafeExecutor();
bkExecutor = Executors.newSingleThreadExecutor(
new ThreadFactoryBuilder().setNameFormat("mock-pulsar-bk")
.setUncaughtExceptionHandler((thread, ex) -> log.info("Uncaught exception", ex))
.build());
mockZookKeeper = createMockZooKeeper();
mockBookKeeper = createMockBookKeeper(mockZookKeeper, bkExecutor);
startBroker();
brokerUrl = new URL("http://" + pulsar.getAdvertisedAddress() + ":" + BROKER_WEBSERVICE_PORT);
brokerUrlTls = new URL("https://" + pulsar.getAdvertisedAddress() + ":" + BROKER_WEBSERVICE_PORT_TLS);
admin = spy(PulsarAdmin.builder().serviceHttpUrl(brokerUrl.toString()).build());
}
protected final void internalCleanup() throws Exception {
try {
// if init fails, some of these could be null, and if so would throw
// an NPE in shutdown, obscuring the real error
if (admin != null) {
admin.close();
}
if (pulsarClient != null) {
pulsarClient.close();
}
if (pulsar != null) {
pulsar.close();
}
if (mockBookKeeper != null) {
mockBookKeeper.reallyShutdow();
}
if (mockZookKeeper != null) {
mockZookKeeper.shutdown();
}
if (sameThreadOrderedSafeExecutor != null) {
sameThreadOrderedSafeExecutor.shutdown();
}
if (bkExecutor != null) {
bkExecutor.shutdown();
}
} catch (Exception e) {
log.warn("Failed to clean up mocked pulsar service:", e);
throw e;
}
}
protected abstract void setup() throws Exception;
protected abstract void cleanup() throws Exception;
protected void restartBroker() throws Exception {
stopBroker();
startBroker();
}
protected void stopBroker() throws Exception {
pulsar.close();
// Simulate cleanup of ephemeral nodes
//mockZookKeeper.delete("/loadbalance/brokers/localhost:" + pulsar.getConfiguration().getWebServicePort(), -1);
}
protected void startBroker() throws Exception {
this.pulsar = startBroker(conf);
}
protected PulsarService startBroker(ServiceConfiguration conf) throws Exception {
PulsarService pulsar = spy(new PulsarService(conf));
setupBrokerMocks(pulsar);
boolean isAuthorizationEnabled = conf.isAuthorizationEnabled();
// enable authrorization to initialize authorization service which is used by grant-permission
conf.setAuthorizationEnabled(true);
pulsar.start();
conf.setAuthorizationEnabled(isAuthorizationEnabled);
Compactor spiedCompactor = spy(pulsar.getCompactor());
doReturn(spiedCompactor).when(pulsar).getCompactor();
return pulsar;
}
protected void setupBrokerMocks(PulsarService pulsar) throws Exception {
// Override default providers with mocked ones
doReturn(mockZooKeeperClientFactory).when(pulsar).getZooKeeperClientFactory();
doReturn(mockBookKeeperClientFactory).when(pulsar).newBookKeeperClientFactory();
Supplier<NamespaceService> namespaceServiceSupplier = () -> spy(new NamespaceService(pulsar));
doReturn(namespaceServiceSupplier).when(pulsar).getNamespaceServiceProvider();
doReturn(sameThreadOrderedSafeExecutor).when(pulsar).getOrderedExecutor();
}
public static MockZooKeeper createMockZooKeeper() throws Exception {
MockZooKeeper zk = MockZooKeeper.newInstance(MoreExecutors.newDirectExecutorService());
List<ACL> dummyAclList = new ArrayList<ACL>(0);
ZkUtils.createFullPathOptimistic(zk, "/ledgers/available/192.168.1.1:" + 5000,
"".getBytes(ZookeeperClientFactoryImpl.ENCODING_SCHEME), dummyAclList, CreateMode.PERSISTENT);
zk.create("/ledgers/LAYOUT", "1\nflat:1".getBytes(ZookeeperClientFactoryImpl.ENCODING_SCHEME), dummyAclList,
CreateMode.PERSISTENT);
return zk;
}
public static NonClosableMockBookKeeper createMockBookKeeper(ZooKeeper zookeeper,
ExecutorService executor) throws Exception {
return spy(new NonClosableMockBookKeeper(zookeeper, executor));
}
// Prevent the MockBookKeeper instance from being closed when the broker is restarted within a test
public static class NonClosableMockBookKeeper extends PulsarMockBookKeeper {
public NonClosableMockBookKeeper(ZooKeeper zk, ExecutorService executor) throws Exception {
super(zk, executor);
}
@Override
public void close() throws InterruptedException, BKException {
// no-op
}
@Override
public void shutdown() {
// no-op
}
public void reallyShutdow() {
super.shutdown();
}
}
protected ZooKeeperClientFactory mockZooKeeperClientFactory = new ZooKeeperClientFactory() {
@Override
public CompletableFuture<ZooKeeper> create(String serverList, SessionType sessionType,
int zkSessionTimeoutMillis) {
// Always return the same instance (so that we don't loose the mock ZK content on broker restart
return CompletableFuture.completedFuture(mockZookKeeper);
}
};
private BookKeeperClientFactory mockBookKeeperClientFactory = new BookKeeperClientFactory() {
@Override
public BookKeeper create(ServiceConfiguration conf, ZooKeeper zkClient) throws IOException {
// Always return the same instance (so that we don't loose the mock BK content on broker restart
return mockBookKeeper;
}
@Override
public void close() {
// no-op
}
};
public static void retryStrategically(Predicate<Void> predicate, int retryCount, long intSleepTimeInMillis)
throws Exception {
for (int i = 0; i < retryCount; i++) {
if (predicate.test(null) || i == (retryCount - 1)) {
break;
}
Thread.sleep(intSleepTimeInMillis + (intSleepTimeInMillis * i));
}
}
public static void setFieldValue(Class clazz, Object classObj, String fieldName, Object fieldValue) throws Exception {
Field field = clazz.getDeclaredField(fieldName);
field.setAccessible(true);
field.set(classObj, fieldValue);
}
private static final Logger log = LoggerFactory.getLogger(MockedPulsarServiceBaseTest.class);
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.waveprotocol.wave.client.editor.integration;
import org.waveprotocol.wave.client.editor.content.paragraph.Paragraph;
import org.waveprotocol.wave.model.document.util.LineContainers;
import org.waveprotocol.wave.model.operation.OperationException;
/**
* Unit tests for {@link Paragraph} element
*
*/
public class ParagraphGwtTest extends ElementTestBase {
/**
* Test Rendering
* @throws OperationException
*/
public void testRendering() throws OperationException {
testRendering(LineContainers.LINE_TAGNAME);
}
/**
* Test Keydown
* @throws OperationException
*/
public void testKeyDown() throws OperationException {
testKeyDowns(LineContainers.LINE_TAGNAME);
}
/**
* Goes thru all tests for a given paragraph tag name
*
* @param tagName
* @throws OperationException
*/
public void testTag(String tagName) throws OperationException {
testKeyDowns(tagName);
testRendering(tagName);
}
/**
* Tests rendering of a paragraph element
*
* @param tagName
* @throws OperationException
*/
public void testRendering(String tagName) throws OperationException {
// Setup abbreviations from <p> to <line></line>
abbreviations.clear();
abbreviations.add("<p>", "<" + tagName + "></" + tagName + ">");
abbreviations.add("</p>", "");
abbreviations.add("<p/>", "<" + tagName + "></" + tagName + ">");
abbreviations.add("<lc>", "<body>");
abbreviations.add("</lc>", "</body>");
// TODO(user): These tests aren't measuring the right things. (Its
// measuring the size of the editor, but the editor has a minimum size, so
// the newline only expands the editor by a small amount. Also these tests
// are not very useful as it is obvious when a trivial new line doesn't
// create a new line.) Either fix or remove.
// int minHeight = 15;
// testMinHeight("<p></p>", minHeight);
// testTaller("<p></p><p></p>", "<p></p>", minHeight);
// testTaller("<p></p><p></p><p></p>", "<p></p><p></p>", minHeight);
testEqualHeight(format("<p>a</p>"), format("<p>aXj</p>"));
// testEqualHeight("<p></p><p></p>", "<p>aXj</p><p>aXjADFSG</p>");
testContentWrap("<p>|</p>");
testContentWrap("<p>|</p><p></p>");
testContentWrap("<p></p><p>|</p>");
testContentWrap("<p>|</p><p>XX</p>");
testContentWrap("<p>XX</p><p>|</p>");
testContentWrap("<p></p><p>|</p><p></p>");
testContentWrap("<p>XX</p><p>|</p><p></p>");
testContentWrap("<p></p><p>|</p><p>XX</p>");
testContentWrap("<p>XX</p><p>|</p><p>XX</p>");
testContentWrap("<p>XX|</p><p></p>");
testContentWrap("<p>XX|</p><p>XX</p>");
}
/**
* Tests various keydowns that we can simulate
*
* @param tagName
* @throws OperationException
*/
private void testKeyDowns(String tagName) throws OperationException {
LineContainers.setTopLevelContainerTagname("body");
// Setup abbreviations from <pp> to <tagName>
abbreviations.clear();
abbreviations.add("<pp>", "<" + tagName + "></" + tagName + ">");
abbreviations.add("</pp>", "");
abbreviations.add("<pp/>", "<" + tagName + "></" + tagName + ">");
abbreviations.add("<lc>", "<body>");
abbreviations.add("</lc>", "</body>");
// TODO(user): also test heights are ok when two <p>s are constructed with <enter>
testEnterBackspaceDeleteWrap(
"<pp>|</pp>",
"<pp></pp><pp>|</pp>",
"<pp>|</pp><pp></pp>"
);
testEnterBackspaceDeleteWrap(
"<pp></pp><pp>|</pp><pp></pp>",
"<pp></pp><pp></pp><pp>|</pp><pp></pp>",
"<pp></pp><pp>|</pp><pp></pp><pp></pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>|abcd</pp>",
"<pp></pp><pp>|abcd</pp>",
"<pp>|</pp><pp>abcd</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>ab|cd</pp>",
"<pp>ab</pp><pp>|cd</pp>",
"<pp>ab|</pp><pp>cd</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>abcd|</pp>",
"<pp>abcd</pp><pp>|</pp>",
"<pp>abcd|</pp><pp></pp>"
);
testBackspaceWrap("<pp>|</pp><pp></pp>", "<pp>|</pp><pp></pp>");
testBackspaceWrap("<pp>|xx</pp><pp></pp>", "<pp>|xx</pp><pp></pp>");
testDeleteWrap("<pp></pp><pp>|</pp>", "<pp></pp><pp>|</pp>");
testDeleteWrap("<pp></pp><pp>xx|</pp>", "<pp></pp><pp>xx|</pp>");
/*
* NOTE(patcoleman): the below use <i> and <u> tags for styles.
* That behaviour for annotations is tested elsewhere, but any new actual element tags
* could be tested here instead. For now, disabling.
testEnterBackspaceDeleteWrap(
"<pp>a|<i>bc</i>d</pp>",
"<pp>a</pp><pp>|<i>bc</i>d</pp>",
"<pp>a|</pp><pp><i>bc</i>d</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i>|bc</i>d</pp>",
"<pp>a</pp><pp><i>|bc</i>d</pp>",
"<pp>a|</pp><pp><i>bc</i>d</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i>b|c</i>d</pp>",
"<pp>a<i>b</i></pp><pp><i>|c</i>d</pp>",
"<pp>a<i>b|</i></pp><pp><i>c</i>d</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i>bc|</i>d</pp>",
"<pp>a<i>bc</i></pp><pp>|d</pp>",
"<pp>a<i>bc|</i></pp><pp>d</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i>bc</i>|d</pp>",
"<pp>a<i>bc</i></pp><pp>|d</pp>",
"<pp>a<i>bc|</i></pp><pp>d</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp><i>|abc</i>d</pp>",
"<pp></pp><pp><i>|abc</i>d</pp>",
"<pp>|</pp><pp><i>abc</i>d</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i><u>b|c</u></i>d</pp>",
"<pp>a<i><u>b</u></i></pp><pp><i><u>|c</u></i>d</pp>",
"<pp>a<i><u>b|</u></i></pp><pp><i><u>c</u></i>d</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i><u>|bc</u></i>d</pp>",
"<pp>a</pp><pp><i><u>|bc</u></i>d</pp>",
"<pp>a|</pp><pp><i><u>bc</u></i>d</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i>|<u>bc</u></i>d</pp>",
"<pp>a</pp><pp><i><u>|bc</u></i>d</pp>",
"<pp>a|</pp><pp><i><u>bc</u></i>d</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a|<i><u>bc</u></i>d</pp>",
"<pp>a</pp><pp><i><u>|bc</u></i>d</pp>",
"<pp>a|</pp><pp><i><u>bc</u></i>d</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i><u>bc|</u></i>d</pp>",
"<pp>a<i><u>bc</u></i></pp><pp>|d</pp>",
"<pp>a<i><u>bc|</u></i></pp><pp>d</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i><u>bc</u>|</i>d</pp>",
"<pp>a<i><u>bc</u></i></pp><pp>|d</pp>",
"<pp>a<i><u>bc|</u></i></pp><pp>d</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i><u>bc</u></i>|d</pp>",
"<pp>a<i><u>bc</u></i></pp><pp>|d</pp>",
"<pp>a<i><u>bc|</u></i></pp><pp>d</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i><u>bc</u></i>d|</pp>",
"<pp>a<i><u>bc</u></i>d</pp><pp>|</pp>",
"<pp>a<i><u>bc</u></i>d|</pp><pp></pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>|a<i>b<u>c</u>d</i>e</pp>",
"<pp></pp><pp>|a<i>b<u>c</u>d</i>e</pp>",
"<pp>|</pp><pp>a<i>b<u>c</u>d</i>e</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a|<i>b<u>c</u>d</i>e</pp>",
"<pp>a</pp><pp>|<i>b<u>c</u>d</i>e</pp>",
"<pp>a|</pp><pp><i>b<u>c</u>d</i>e</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i>|b<u>c</u>d</i>e</pp>",
"<pp>a</pp><pp>|<i>b<u>c</u>d</i>e</pp>",
"<pp>a|</pp><pp><i>b<u>c</u>d</i>e</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i>b|<u>c</u>d</i>e</pp>",
"<pp>a<i>b</i></pp><pp>|<i><u>c</u>d</i>e</pp>",
"<pp>a<i>b|</i></pp><pp><i><u>c</u>d</i>e</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i>b<u>|c</u>d</i>e</pp>",
"<pp>a<i>b</i></pp><pp>|<i><u>c</u>d</i>e</pp>",
"<pp>a<i>b|</i></pp><pp><i><u>c</u>d</i>e</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i>b<u>c|</u>d</i>e</pp>",
"<pp>a<i>b<u>c</u></i></pp><pp>|<i>d</i>e</pp>",
"<pp>a<i>b<u>c|</u></i></pp><pp><i>d</i>e</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i>b<u>c</u>|d</i>e</pp>",
"<pp>a<i>b<u>c</u></i></pp><pp>|<i>d</i>e</pp>",
"<pp>a<i>b<u>c|</u></i></pp><pp><i>d</i>e</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i>b<u>c</u>d|</i>e</pp>",
"<pp>a<i>b<u>c</u>d</i></pp><pp>|e</pp>",
"<pp>a<i>b<u>c</u>d|</i></pp><pp>e</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i>b<u>c</u>d</i>|e</pp>",
"<pp>a<i>b<u>c</u>d</i></pp><pp>|e</pp>",
"<pp>a<i>b<u>c</u>d|</i></pp><pp>e</pp>"
);
testEnterBackspaceDeleteWrap(
"<pp>a<i>b<u>c</u>d</i>e|</pp>",
"<pp>a<i>b<u>c</u>d</i>e</pp><pp>|</pp>",
"<pp>a<i>b<u>c</u>d</i>e|</pp><pp></pp>"
);
*/
}
/** Utility that tests backspace, wrapping everything in a line container */
private void testBackspaceWrap(String first, String second) throws OperationException {
testBackspace(format(first), format(second));
}
/** Utility that tests delete, wrapping everything in a line container */
private void testDeleteWrap(String first, String second) throws OperationException {
testDelete(format(first), format(second));
}
/** Utility that tests enter-backspace-delete, wrapping everything in a line container */
private void testEnterBackspaceDeleteWrap(String first, String second, String third)
throws OperationException {
testEnterBackspaceDelete(format(first), format(second), format(third));
}
/** Utility that tests content, wrapping everything in a line container */
private void testContentWrap(String content) throws OperationException {
testContent(format(content));
}
/** Utility that wraps input content within a line container. */
private String format(String input) {
return "<lc>" + input + "</lc>";
}
}
| |
package org.zstack.header.vm;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.message.NeedJsonSchema;
/**
*/
public class VmTracerCanonicalEvents {
public static final String VM_STATE_CHANGED_PATH = "/vmTracer/vmStateChanged";
public static final String STRANGER_VM_FOUND_PATH = "/vmTracer/strangerVmFound";
public static final String VM_OPERATE_FAIL_ON_HYPERVISOR_PATH = "/vmTracer/vmOperateFailOnHypervisor";
public static final String VM_STATE_IN_SHUTDOWN_PATH = "/vmTracer/vmStateInShutdown";
public static final String VM_SKIP_TRACE_PATH = "/vmTracer/skipTrace";
public static final String VM_CONTINUE_TRACE_PATH = "/vmTrace/continueTrace";
public static final String MIGRATE_VM_FAILED_WITH_HOST_MAINTAIN_PATH = "/vmTrace/MigrateVMFailedWithHostMaintain";
@NeedJsonSchema
public static class VmSkipTraceData {
private String vmUuid;
private String apiId;
private String msgName;
private String managementNodeId;
public String getMsgName() {
return msgName;
}
public void setMsgName(String msgName) {
this.msgName = msgName;
}
public String getVmUuid() {
return vmUuid;
}
public void setVmUuid(String vmUuid) {
this.vmUuid = vmUuid;
}
public String getApiId() {
return apiId;
}
public void setApiId(String apiId) {
this.apiId = apiId;
}
public String getManagementNodeId() {
return managementNodeId;
}
public void setManagementNodeId(String managementNodeId) {
this.managementNodeId = managementNodeId;
}
}
@NeedJsonSchema
public static class VmContinueTraceData {
private String vmUuid;
private String apiId;
private String managementNodeId;
public String getVmUuid() {
return vmUuid;
}
public void setVmUuid(String vmUuid) {
this.vmUuid = vmUuid;
}
public String getApiId() {
return apiId;
}
public void setApiId(String apiId) {
this.apiId = apiId;
}
public String getManagementNodeId() {
return managementNodeId;
}
public void setManagementNodeId(String managementNodeId) {
this.managementNodeId = managementNodeId;
}
}
@NeedJsonSchema
public static class VmStateInShutdownData {
private String vmUuid;
private ErrorCode reason;
public String getVmUuid() {
return vmUuid;
}
public void setVmUuid(String vmUuid) {
this.vmUuid = vmUuid;
}
public ErrorCode getReason() {
return reason;
}
public void setReason(ErrorCode reason) {
this.reason = reason;
}
}
@NeedJsonSchema
public static class OperateFailOnHypervisorData {
private String vmUuid;
private String hostUuid;
private String operate;
private String result;
public String getVmUuid() {
return vmUuid;
}
public void setVmUuid(String vmUuid) {
this.vmUuid = vmUuid;
}
public String getHostUuid() {
return hostUuid;
}
public void setHostUuid(String hostUuid) {
this.hostUuid = hostUuid;
}
public String getOperate() {
return operate;
}
public void setOperate(String operate) {
this.operate = operate;
}
public String getResult() {
return result;
}
public void setResult(String result) {
this.result = result;
}
}
@NeedJsonSchema
public static class StrangerVmFoundData {
private String vmIdentity;
private String hostUuid;
private String vmState;
public String getHostUuid() {
return hostUuid;
}
public void setHostUuid(String hostUuid) {
this.hostUuid = hostUuid;
}
public String getVmIdentity() {
return vmIdentity;
}
public void setVmIdentity(String vmIdentity) {
this.vmIdentity = vmIdentity;
}
public VmInstanceState getVmState() {
if (vmState != null) {
return VmInstanceState.valueOf(vmState);
}
return null;
}
public void setVmState(VmInstanceState vmState) {
if (vmState != null) {
this.vmState = vmState.toString();
}
}
}
@NeedJsonSchema
public static class VmStateChangedOnHostData {
private String vmUuid;
private String from;
private String to;
private String originalHostUuid;
private String currentHostUuid;
public String getOriginalHostUuid() {
return originalHostUuid;
}
public void setOriginalHostUuid(String originalHostUuid) {
this.originalHostUuid = originalHostUuid;
}
public String getCurrentHostUuid() {
return currentHostUuid;
}
public void setCurrentHostUuid(String currentHostUuid) {
this.currentHostUuid = currentHostUuid;
}
public String getVmUuid() {
return vmUuid;
}
public void setVmUuid(String vmUuid) {
this.vmUuid = vmUuid;
}
public VmInstanceState getFrom() {
if (from != null) {
return VmInstanceState.valueOf(from);
}
return null;
}
public void setFrom(VmInstanceState from) {
if (from != null) {
this.from = from.toString();
}
}
public VmInstanceState getTo() {
if (to != null) {
return VmInstanceState.valueOf(to);
}
return null;
}
public void setTo(VmInstanceState to) {
if (to != null) {
this.to = to.toString();
}
}
}
@NeedJsonSchema
public static class MigrateVMFailedWithHostMaintainData {
private String vmUuid;
private String hostUuid;
private String reason;
public String getVmUuid() {
return vmUuid;
}
public void setVmUuid(String vmUuid) {
this.vmUuid = vmUuid;
}
public String getHostUuid() {
return hostUuid;
}
public void setHostUuid(String hostUuid) {
this.hostUuid = hostUuid;
}
public String getReason() {
return reason;
}
public void setReason(String reason) {
this.reason = reason;
}
}
}
| |
package com.yq.manager.bo;
import java.util.Date;
public class W10Bean {
private int payid;
private int jyid;
private int pdid;
private String payusername;
private int cxt;
private String vipname;
private int paynum;
private int paynum9;
private String payname;
private String paybank;
private String paycard;
private Integer dqu;
private String bankbz;
private Date paytime;
private Date zftime;
private String openonoff;
private int ep;
private Date rgdate;
private String dfuser;
private int kjygid;
private String bz;
private int qlid;
private String payonoff;
private String txip;
private String clip;
private int txlb;
private int txvip;
//txinfook
private String name;
private Integer sumPayNum;
private int needVerify;
public int getPayid() {
return payid;
}
public void setPayid(int payid) {
this.payid = payid;
}
public int getJyid() {
return jyid;
}
public void setJyid(int jyid) {
this.jyid = jyid;
}
public int getPdid() {
return pdid;
}
public void setPdid(int pdid) {
this.pdid = pdid;
}
public String getPayusername() {
return payusername;
}
public void setPayusername(String payusername) {
this.payusername = payusername;
}
public int getCxt() {
return cxt;
}
public void setCxt(int cxt) {
this.cxt = cxt;
}
public String getVipname() {
return vipname;
}
public void setVipname(String vipname) {
this.vipname = vipname;
}
public int getPaynum() {
return paynum;
}
public void setPaynum(int paynum) {
this.paynum = paynum;
}
public int getPaynum9() {
return paynum9;
}
public void setPaynum9(int paynum9) {
this.paynum9 = paynum9;
}
public String getPayname() {
return payname;
}
public void setPayname(String payname) {
this.payname = payname;
}
public String getPaybank() {
return paybank;
}
public void setPaybank(String paybank) {
this.paybank = paybank;
}
public String getPaycard() {
return paycard;
}
public void setPaycard(String paycard) {
this.paycard = paycard;
}
public Integer getDqu() {
return dqu;
}
public void setDqu(Integer dqu) {
this.dqu = dqu;
}
public String getBankbz() {
return bankbz;
}
public void setBankbz(String bankbz) {
this.bankbz = bankbz;
}
public Date getPaytime() {
return paytime;
}
public void setPaytime(Date paytime) {
this.paytime = paytime;
}
public Date getZftime() {
return zftime;
}
public void setZftime(Date zftime) {
this.zftime = zftime;
}
public String getOpenonoff() {
return openonoff;
}
public void setOpenonoff(String openonoff) {
this.openonoff = openonoff;
}
public int getEp() {
return ep;
}
public void setEp(int ep) {
this.ep = ep;
}
public Date getRgdate() {
return rgdate;
}
public void setRgdate(Date rgdate) {
this.rgdate = rgdate;
}
public String getDfuser() {
return dfuser;
}
public void setDfuser(String dfuser) {
this.dfuser = dfuser;
}
public int getKjygid() {
return kjygid;
}
public void setKjygid(int kjygid) {
this.kjygid = kjygid;
}
public String getBz() {
return bz;
}
public void setBz(String bz) {
this.bz = bz;
}
public int getQlid() {
return qlid;
}
public void setQlid(int qlid) {
this.qlid = qlid;
}
public String getPayonoff() {
return payonoff;
}
public void setPayonoff(String payonoff) {
this.payonoff = payonoff;
}
public String getTxip() {
return txip;
}
public void setTxip(String txip) {
this.txip = txip;
}
public String getClip() {
return clip;
}
public void setClip(String clip) {
this.clip = clip;
}
public int getTxlb() {
return txlb;
}
public void setTxlb(int txlb) {
this.txlb = txlb;
}
public int getTxvip() {
return txvip;
}
public void setTxvip(int txvip) {
this.txvip = txvip;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getSumPayNum() {
return sumPayNum;
}
public void setSumPayNum(Integer sumPayNum) {
this.sumPayNum = sumPayNum;
}
public int getNeedVerify() {
return needVerify;
}
public void setNeedVerify(int needVerify) {
this.needVerify = needVerify;
}
}
| |
package org.adbcj.mysql;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufInputStream;
import io.netty.buffer.ByteBufOutputStream;
import io.netty.channel.*;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.codec.ByteToMessageDecoder;
import io.netty.handler.codec.MessageToByteEncoder;
import org.adbcj.CloseMode;
import org.adbcj.Connection;
import org.adbcj.DbCallback;
import org.adbcj.DbException;
import org.adbcj.mysql.codec.ClientRequest;
import org.adbcj.mysql.codec.MySqlClientDecoder;
import org.adbcj.mysql.codec.MySqlClientEncoder;
import org.adbcj.mysql.codec.decoding.AcceptNextResponse;
import org.adbcj.mysql.codec.decoding.Connecting;
import org.adbcj.mysql.codec.decoding.DecoderState;
import org.adbcj.support.AbstractConnectionManager;
import org.adbcj.support.ConnectionPool;
import org.adbcj.support.LoginCredentials;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
public class MysqlConnectionManager extends AbstractConnectionManager {
private static final Logger logger = LoggerFactory.getLogger(MysqlConnectionManager.class);
private static final String ENCODER = MysqlConnectionManager.class.getName() + ".encoder";
static final String DECODER = MysqlConnectionManager.class.getName() + ".decoder";
private final LoginCredentials defaultCredentials;
private final Bootstrap bootstrap;
private final AtomicInteger idCounter = new AtomicInteger();
private final NioEventLoopGroup eventLoop;
final ConnectionPool<LoginCredentials, Channel> connectionPool;
public MysqlConnectionManager(String host,
int port,
String username,
String password,
String schema,
Map<String, String> properties) {
super(properties);
defaultCredentials = new LoginCredentials(username, password, schema);
eventLoop = new NioEventLoopGroup();
bootstrap = new Bootstrap()
.group(eventLoop)
.channel(NioSocketChannel.class)
.option(ChannelOption.TCP_NODELAY, true)
.option(ChannelOption.SO_KEEPALIVE, true)
.option(ChannelOption.AUTO_READ, false)
.remoteAddress(new InetSocketAddress(host, port))
.handler(new ChannelInitializer() {
@Override
public void initChannel(Channel ch) throws Exception {
ch.config().setAutoRead(false);
ch.pipeline().addLast(ENCODER, new Encoder());
}
});
if(useConnectionPool){
this.connectionPool = new ConnectionPool<>();
} else{
this.connectionPool = null;
}
}
@Override
public void connect(DbCallback<Connection> connected) {
connect(defaultCredentials.getUserName(), defaultCredentials.getPassword(), connected);
}
@Override
public void connect(String user, String password, DbCallback<Connection> connected) {
StackTraceElement[] entry = entryPointStack();
LoginCredentials credentials = new LoginCredentials(user, password, defaultCredentials.getDatabase());
if (isClosed()) {
throw new DbException("Connection manager closed");
}
logger.debug("Starting connection");
if(connectionPool!=null){
Channel channel = connectionPool.tryAquire(credentials);
if(channel!=null){
MySqlConnection dbConn = new MySqlConnection(
credentials,
maxQueueLength(),
this,
channel,
getStackTracingOption()
);
channel.pipeline().addLast(DECODER, new Decoder(
new AcceptNextResponse(dbConn), dbConn));
connected.onComplete(dbConn,
null);
return;
}
}
final ChannelFuture channelFuture = bootstrap.connect();
channelFuture.addListener((ChannelFutureListener) future -> {
logger.debug("Physical connect completed");
Channel channel = future.channel();
if (!future.isSuccess()) {
if (future.cause() != null) {
channel.close();
connected.onComplete(null, DbException.wrap(future.cause(), entry));
}
return;
}
MySqlConnection connection = new MySqlConnection(
credentials,
maxQueueLength(),
MysqlConnectionManager.this,
channel,
getStackTracingOption());
addConnection(connection);
channel.pipeline().addLast(DECODER, new Decoder(
new Connecting(connected, entry, connection, credentials), connection));
channel.config().setAutoRead(true);
channel.read();
});
}
@Override
protected void doCloseConnection(Connection connection, CloseMode mode, DbCallback<Void> callback) {
connection.close(mode,callback);
}
@Override
protected void doClose(DbCallback<Void> callback, StackTraceElement[] entry) {
new Thread("Closing MySQL ConnectionManager") {
@Override
public void run() {
eventLoop.shutdownGracefully().addListener(future -> {
DbException error = null;
if (!future.isSuccess()) {
error = DbException.wrap(future.cause(), entry);
}
callback.onComplete(null, error);
});
}
}.start();
}
int nextId() {
return idCounter.incrementAndGet();
}
void closedConnect(Connection connection) {
removeConnection(connection);
}
}
class Decoder extends ByteToMessageDecoder {
private final static Logger log = LoggerFactory.getLogger(Decoder.class);
private final MySqlClientDecoder decoder;
private final MySqlConnection connection;
public Decoder(DecoderState state, MySqlConnection connection) {
decoder = new MySqlClientDecoder(state);
this.connection = connection;
}
@Override
public void decode(ChannelHandlerContext ctx, ByteBuf buffer, List<Object> out) throws Exception {
// debug buffer since 2017-10-15 little-pan
final boolean debug = log.isDebugEnabled();
if(debug) {
log.debug("Decoded buffer#{}: {}", buffer.hashCode(), buffer);
}
final InputStream in = new ByteBufInputStream(buffer);
try {
Object obj = decoder.decode(in, ctx.channel(), false);
if (log.isDebugEnabled() && null != obj) {
log.debug("Decoded message: {}", obj);
}
if (obj != null) {
out.add(obj);
}
} finally {
in.close();
}
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
super.channelInactive(ctx);
connection.tryCompleteClose(null);
}
}
@ChannelHandler.Sharable
class Encoder extends MessageToByteEncoder<ClientRequest> {
private final static Logger log = LoggerFactory.getLogger(Encoder.class);
private final MySqlClientEncoder encoder = new MySqlClientEncoder();
@Override
public void encode(ChannelHandlerContext ctx, ClientRequest msg, ByteBuf buffer) throws Exception {
if (log.isDebugEnabled()) {
log.debug("Sending request: {}", msg);
}
ByteBufOutputStream out = new ByteBufOutputStream(buffer);
encoder.encode(msg, out);
out.close();
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.macie2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Provides information about an Amazon Macie membership invitation.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/macie2-2020-01-01/Invitation" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Invitation implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The Amazon Web Services account ID for the account that sent the invitation.
* </p>
*/
private String accountId;
/**
* <p>
* The unique identifier for the invitation.
* </p>
*/
private String invitationId;
/**
* <p>
* The date and time, in UTC and extended ISO 8601 format, when the invitation was sent.
* </p>
*/
private java.util.Date invitedAt;
/**
* <p>
* The status of the relationship between the account that sent the invitation and the account that received the
* invitation.
* </p>
*/
private String relationshipStatus;
/**
* <p>
* The Amazon Web Services account ID for the account that sent the invitation.
* </p>
*
* @param accountId
* The Amazon Web Services account ID for the account that sent the invitation.
*/
public void setAccountId(String accountId) {
this.accountId = accountId;
}
/**
* <p>
* The Amazon Web Services account ID for the account that sent the invitation.
* </p>
*
* @return The Amazon Web Services account ID for the account that sent the invitation.
*/
public String getAccountId() {
return this.accountId;
}
/**
* <p>
* The Amazon Web Services account ID for the account that sent the invitation.
* </p>
*
* @param accountId
* The Amazon Web Services account ID for the account that sent the invitation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Invitation withAccountId(String accountId) {
setAccountId(accountId);
return this;
}
/**
* <p>
* The unique identifier for the invitation.
* </p>
*
* @param invitationId
* The unique identifier for the invitation.
*/
public void setInvitationId(String invitationId) {
this.invitationId = invitationId;
}
/**
* <p>
* The unique identifier for the invitation.
* </p>
*
* @return The unique identifier for the invitation.
*/
public String getInvitationId() {
return this.invitationId;
}
/**
* <p>
* The unique identifier for the invitation.
* </p>
*
* @param invitationId
* The unique identifier for the invitation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Invitation withInvitationId(String invitationId) {
setInvitationId(invitationId);
return this;
}
/**
* <p>
* The date and time, in UTC and extended ISO 8601 format, when the invitation was sent.
* </p>
*
* @param invitedAt
* The date and time, in UTC and extended ISO 8601 format, when the invitation was sent.
*/
public void setInvitedAt(java.util.Date invitedAt) {
this.invitedAt = invitedAt;
}
/**
* <p>
* The date and time, in UTC and extended ISO 8601 format, when the invitation was sent.
* </p>
*
* @return The date and time, in UTC and extended ISO 8601 format, when the invitation was sent.
*/
public java.util.Date getInvitedAt() {
return this.invitedAt;
}
/**
* <p>
* The date and time, in UTC and extended ISO 8601 format, when the invitation was sent.
* </p>
*
* @param invitedAt
* The date and time, in UTC and extended ISO 8601 format, when the invitation was sent.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Invitation withInvitedAt(java.util.Date invitedAt) {
setInvitedAt(invitedAt);
return this;
}
/**
* <p>
* The status of the relationship between the account that sent the invitation and the account that received the
* invitation.
* </p>
*
* @param relationshipStatus
* The status of the relationship between the account that sent the invitation and the account that received
* the invitation.
* @see RelationshipStatus
*/
public void setRelationshipStatus(String relationshipStatus) {
this.relationshipStatus = relationshipStatus;
}
/**
* <p>
* The status of the relationship between the account that sent the invitation and the account that received the
* invitation.
* </p>
*
* @return The status of the relationship between the account that sent the invitation and the account that received
* the invitation.
* @see RelationshipStatus
*/
public String getRelationshipStatus() {
return this.relationshipStatus;
}
/**
* <p>
* The status of the relationship between the account that sent the invitation and the account that received the
* invitation.
* </p>
*
* @param relationshipStatus
* The status of the relationship between the account that sent the invitation and the account that received
* the invitation.
* @return Returns a reference to this object so that method calls can be chained together.
* @see RelationshipStatus
*/
public Invitation withRelationshipStatus(String relationshipStatus) {
setRelationshipStatus(relationshipStatus);
return this;
}
/**
* <p>
* The status of the relationship between the account that sent the invitation and the account that received the
* invitation.
* </p>
*
* @param relationshipStatus
* The status of the relationship between the account that sent the invitation and the account that received
* the invitation.
* @return Returns a reference to this object so that method calls can be chained together.
* @see RelationshipStatus
*/
public Invitation withRelationshipStatus(RelationshipStatus relationshipStatus) {
this.relationshipStatus = relationshipStatus.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAccountId() != null)
sb.append("AccountId: ").append(getAccountId()).append(",");
if (getInvitationId() != null)
sb.append("InvitationId: ").append(getInvitationId()).append(",");
if (getInvitedAt() != null)
sb.append("InvitedAt: ").append(getInvitedAt()).append(",");
if (getRelationshipStatus() != null)
sb.append("RelationshipStatus: ").append(getRelationshipStatus());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Invitation == false)
return false;
Invitation other = (Invitation) obj;
if (other.getAccountId() == null ^ this.getAccountId() == null)
return false;
if (other.getAccountId() != null && other.getAccountId().equals(this.getAccountId()) == false)
return false;
if (other.getInvitationId() == null ^ this.getInvitationId() == null)
return false;
if (other.getInvitationId() != null && other.getInvitationId().equals(this.getInvitationId()) == false)
return false;
if (other.getInvitedAt() == null ^ this.getInvitedAt() == null)
return false;
if (other.getInvitedAt() != null && other.getInvitedAt().equals(this.getInvitedAt()) == false)
return false;
if (other.getRelationshipStatus() == null ^ this.getRelationshipStatus() == null)
return false;
if (other.getRelationshipStatus() != null && other.getRelationshipStatus().equals(this.getRelationshipStatus()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAccountId() == null) ? 0 : getAccountId().hashCode());
hashCode = prime * hashCode + ((getInvitationId() == null) ? 0 : getInvitationId().hashCode());
hashCode = prime * hashCode + ((getInvitedAt() == null) ? 0 : getInvitedAt().hashCode());
hashCode = prime * hashCode + ((getRelationshipStatus() == null) ? 0 : getRelationshipStatus().hashCode());
return hashCode;
}
@Override
public Invitation clone() {
try {
return (Invitation) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.macie2.model.transform.InvitationMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.solr;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLDecoder;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.impl.DefaultEndpoint;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.solr.client.solrj.impl.CloudSolrServer;
import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrServer;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
/**
* Represents a Solr endpoint.
*/
@UriEndpoint(scheme = "solr", label = "monitoring,search")
public class SolrEndpoint extends DefaultEndpoint {
@UriPath
private String url;
private String scheme = "http://";
@UriParam(defaultValue = "" + SolrConstants.DEFUALT_STREAMING_QUEUE_SIZE)
private int streamingQueueSize = SolrConstants.DEFUALT_STREAMING_QUEUE_SIZE;
@UriParam(defaultValue = "" + SolrConstants.DEFAULT_STREAMING_THREAD_COUNT)
private int streamingThreadCount = SolrConstants.DEFAULT_STREAMING_THREAD_COUNT;
@UriParam
private Integer maxRetries;
@UriParam
private Integer soTimeout;
@UriParam
private Integer connectionTimeout;
@UriParam
private Integer defaultMaxConnectionsPerHost;
@UriParam
private Integer maxTotalConnections;
@UriParam(defaultValue = "false")
private Boolean followRedirects;
@UriParam(defaultValue = "false")
private Boolean allowCompression;
@UriParam
private String zkHost;
@UriParam
private String collection;
@UriParam
private String requestHandler;
public SolrEndpoint(String endpointUri, SolrComponent component, String address) throws Exception {
super(endpointUri, component);
if (endpointUri.startsWith("solrs")) {
scheme = "https://";
}
URL url = new URL(scheme + address);
this.url = url.toString();
}
public void setZkHost(String zkHost) throws UnsupportedEncodingException {
String decoded = URLDecoder.decode(zkHost, "UTF-8");
this.zkHost = decoded;
}
public String getZkHost() {
return this.zkHost;
}
public void setCollection(String collection) {
this.collection = collection;
}
public String getCollection() {
return this.collection;
}
@Override
public SolrComponent getComponent() {
return (SolrComponent) super.getComponent();
}
private CloudSolrServer getCloudServer() {
CloudSolrServer rVal = null;
if (this.getZkHost() != null && this.getCollection() != null) {
rVal = new CloudSolrServer(zkHost);
rVal.setDefaultCollection(this.getCollection());
}
return rVal;
}
@Override
public Producer createProducer() throws Exception {
// do we have servers?
SolrComponent.SolrServerReference ref = getComponent().getSolrServers(this);
if (ref == null) {
// no then create new servers
ref = new SolrComponent.SolrServerReference();
CloudSolrServer cloudServer = getCloudServer();
if (cloudServer == null) {
HttpSolrServer solrServer = new HttpSolrServer(url);
ConcurrentUpdateSolrServer solrStreamingServer = new ConcurrentUpdateSolrServer(url, streamingQueueSize, streamingThreadCount);
// set the properties on the solr server
if (maxRetries != null) {
solrServer.setMaxRetries(maxRetries);
}
if (soTimeout != null) {
solrServer.setSoTimeout(soTimeout);
}
if (connectionTimeout != null) {
solrServer.setConnectionTimeout(connectionTimeout);
}
if (defaultMaxConnectionsPerHost != null) {
solrServer.setDefaultMaxConnectionsPerHost(defaultMaxConnectionsPerHost);
}
if (maxTotalConnections != null) {
solrServer.setMaxTotalConnections(maxTotalConnections);
}
if (followRedirects != null) {
solrServer.setFollowRedirects(followRedirects);
}
if (allowCompression != null) {
solrServer.setAllowCompression(allowCompression);
}
ref.setSolrServer(solrServer);
ref.setUpdateSolrServer(solrStreamingServer);
}
ref.setCloudSolrServer(cloudServer);
getComponent().addSolrServers(this, ref);
}
ref.addReference();
return new SolrProducer(this, ref.getSolrServer(), ref.getUpdateSolrServer(), ref.getCloudSolrServer());
}
protected void onProducerShutdown(SolrProducer producer) {
SolrComponent.SolrServerReference ref = getComponent().getSolrServers(this);
if (ref != null) {
int counter = ref.decReference();
if (counter <= 0) {
getComponent().shutdownServers(ref, true);
}
}
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException("Consumer not supported for Solr endpoint.");
}
@Override
public boolean isSingleton() {
return true;
}
public void setRequestHandler(String requestHandler) {
this.requestHandler = requestHandler;
}
public String getRequestHandler() {
return requestHandler;
}
public int getStreamingThreadCount() {
return streamingThreadCount;
}
public void setStreamingThreadCount(int streamingThreadCount) {
this.streamingThreadCount = streamingThreadCount;
}
public int getStreamingQueueSize() {
return streamingQueueSize;
}
public void setStreamingQueueSize(int streamingQueueSize) {
this.streamingQueueSize = streamingQueueSize;
}
public Integer getMaxRetries() {
return maxRetries;
}
public void setMaxRetries(Integer maxRetries) {
this.maxRetries = maxRetries;
}
public Integer getSoTimeout() {
return soTimeout;
}
public void setSoTimeout(Integer soTimeout) {
this.soTimeout = soTimeout;
}
public Integer getConnectionTimeout() {
return connectionTimeout;
}
public void setConnectionTimeout(Integer connectionTimeout) {
this.connectionTimeout = connectionTimeout;
}
public Integer getDefaultMaxConnectionsPerHost() {
return defaultMaxConnectionsPerHost;
}
public void setDefaultMaxConnectionsPerHost(Integer defaultMaxConnectionsPerHost) {
this.defaultMaxConnectionsPerHost = defaultMaxConnectionsPerHost;
}
public Integer getMaxTotalConnections() {
return maxTotalConnections;
}
public void setMaxTotalConnections(Integer maxTotalConnections) {
this.maxTotalConnections = maxTotalConnections;
}
public Boolean getFollowRedirects() {
return followRedirects;
}
public void setFollowRedirects(Boolean followRedirects) {
this.followRedirects = followRedirects;
}
public Boolean getAllowCompression() {
return allowCompression;
}
public void setAllowCompression(Boolean allowCompression) {
this.allowCompression = allowCompression;
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2010 tap4j team (see AUTHORS)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.tap4j.ext.testng.listener;
import java.io.File;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.tap4j.ext.testng.util.TapTestNGUtil;
import org.tap4j.model.Plan;
import org.tap4j.model.TestResult;
import org.tap4j.model.TestSet;
import org.tap4j.producer.Producer;
import org.tap4j.producer.TapProducer;
import org.testng.IReporter;
import org.testng.ISuite;
import org.testng.ISuiteResult;
import org.testng.ITestNGMethod;
import org.testng.ITestResult;
import org.testng.xml.XmlSuite;
/**
* A TestNG suite TAP reporter.
*
* @since 1.0
*/
public class TapListenerSuite implements IReporter {
private final Map<Class<?>, List<ITestResult>> testResultsPerSuite
= new LinkedHashMap<Class<?>, List<ITestResult>>();
private final Map<String, List<ITestResult>> testResultsPerGroup
= new LinkedHashMap<String, List<ITestResult>>();
// private final Map<ITestNGMethod, List<ITestResult>> testResultsPerMethod
// = new LinkedHashMap<ITestNGMethod, List<ITestResult>>();
/**
* TAP Producer.
*/
private Producer tapProducer = new TapProducer();
/**
* TAP Test Set
*/
private TestSet testSet;
/*
* (non-Javadoc)
*
* @see org.testng.IReporter#generateReport(java.util.List, java.util.List,
* java.lang.String)
*/
public void generateReport(List<XmlSuite> xmlSuites, List<ISuite> suites,
String outputDirectory) {
this.generateTAPPerSuite(xmlSuites, suites, outputDirectory);
this.generateTAPPerGroup(xmlSuites, suites, outputDirectory);
}
/**
* Generate a TAP file for every suite tested
*
* @param xmlSuites
* @param suites
* @param outputDirectory
*/
protected void generateTAPPerSuite(List<XmlSuite> xmlSuites,
List<ISuite> suites, String outputDirectory) {
for (ISuite suite : suites) {
testSet = new TestSet();
Set<Class<?>> testResultsSet = this
.getTestResultsSetPerSuite(suite);
Integer totalTestResults = this
.getTotalTestResultsByTestSuite(testResultsSet);
testSet.setPlan(new Plan(totalTestResults));
for (Class<?> testResultClass : testResultsSet) {
List<ITestResult> testResults = testResultsPerSuite
.get(testResultClass);
for (ITestResult testResult : testResults) {
TestResult tapTestResult = TapTestNGUtil
.generateTAPTestResult(testResult,
testSet.getNumberOfTestResults() + 1,
isYaml());
testSet.addTestResult(tapTestResult);
}
}
File output = new File(outputDirectory, suite.getName() + ".tap");
if (!output.getParentFile().exists()) {
output.getParentFile().mkdirs();
}
tapProducer.dump(testSet, output);
}
}
/**
* Generate a TAP file for every group tested
*
* @param xmlSuites
* @param suites
* @param outputDirectory
*/
protected void generateTAPPerGroup(List<XmlSuite> xmlSuites,
List<ISuite> suites, String outputDirectory) {
for (ISuite suite : suites) {
Map<String, Collection<ITestNGMethod>> groups = suite
.getMethodsByGroups();
this.populateTestResultsPerGroupMap(suite, groups);
if (groups.size() > 0) {
String[] groupNames = groups.keySet().toArray(
new String[groups.size()]);
Arrays.sort(groupNames);
for (String group : groupNames) {
if (group != null && group.trim().length() > 0) {
List<ITestResult> groupTestResults = testResultsPerGroup
.get(group);
if (groupTestResults != null) {
final Integer totalTestResultsByGroup = groupTestResults
.size();
testSet = new TestSet();
testSet.setPlan(new Plan(totalTestResultsByGroup));
for (ITestResult testResult : groupTestResults) {
TestResult tapTestResult = TapTestNGUtil
.generateTAPTestResult(
testResult,
testSet.getNumberOfTestResults() + 1,
isYaml());
testSet.addTestResult(tapTestResult);
}
File output = new File(outputDirectory, group
+ ".tap");
if (!output.getParentFile().exists()) {
output.getParentFile().mkdirs();
}
tapProducer.dump(testSet, output);
}
}
}
}
}
}
/**
* Get a Set of test Results for Suites by a given ISuite
*
* @param suite
* @return Set of Classes for a Suite
*/
protected Set<Class<?>> getTestResultsSetPerSuite(ISuite suite) {
XmlSuite xmlSuite = suite.getXmlSuite();
// Popula o mapa testResultsPerSuite com uma classe para cada suite com
// seus resultados
this.generateClasses(xmlSuite, suite);
return testResultsPerSuite.keySet();
}
/**
* Populate a Map of test Results for Groups by a given ISuite
*
* @param suite
* @param groups
*/
protected void populateTestResultsPerGroupMap(ISuite suite,
Map<String, Collection<ITestNGMethod>> groups) {
XmlSuite xmlSuite = suite.getXmlSuite();
// Popula o mapa testResultsPerGroup com uma String para cada grupo com
// seus resultados
this.generateResultsMapForGroups(xmlSuite, suite, groups);
}
/**
* Get total results from a test suite
*
* @param keySet
* @return Total Results
*/
public Integer getTotalTestResultsByTestSuite(Set<Class<?>> keySet) {
Integer totalTestResults = 0;
for (Class<?> clazz : keySet) {
List<ITestResult> testResults = testResultsPerSuite.get(clazz);
totalTestResults += testResults.size();
}
return totalTestResults;
}
/**
* Populate a List of ITestResults for every test Class in a test Suite
*
* @param xmlSuite
* @param suite
*/
public void generateClasses(XmlSuite xmlSuite, ISuite suite) {
if (suite.getResults().size() > 0) {
for (ISuiteResult suiteResult : suite.getResults().values()) {
List<ITestResult> testResults = TapTestNGUtil
.getTestNGResultsOrderedByExecutionDate(suiteResult
.getTestContext());
for (ITestResult testResult : testResults) {
Class<?> clazz = testResult.getMethod().getRealClass();
List<ITestResult> testResultsForThisClass = testResultsPerSuite
.get(clazz);
if (testResultsForThisClass == null) {
testResultsForThisClass = new LinkedList<ITestResult>();
testResultsPerSuite.put(clazz, testResultsForThisClass);
}
testResultsForThisClass.add(testResult);
}
}
}
}
/**
* Generate the map for the groups
*
* @param xmlSuite
* @param suite
* @param groups
*/
protected void generateResultsMapForGroups(XmlSuite xmlSuite, ISuite suite,
Map<String, Collection<ITestNGMethod>> groups) {
if (suite.getResults().size() > 0) {
for (ISuiteResult suiteResult : suite.getResults().values()) {
List<ITestResult> testResults = TapTestNGUtil
.getTestNGResultsOrderedByExecutionDate(suiteResult
.getTestContext());
for (ITestResult testResult : testResults) {
ITestNGMethod method = testResult.getMethod();
String[] groupsNm = findInWhatGroupsMethodIs(method, groups);
for (String gpNm : groupsNm) {
if (gpNm != null && gpNm.trim().length() > 0) {
List<ITestResult> testResultsForThisGroup = testResultsPerGroup
.get(gpNm);
if (testResultsForThisGroup == null) {
testResultsForThisGroup = new LinkedList<ITestResult>();
testResultsPerGroup.put(gpNm,
testResultsForThisGroup);
}
testResultsForThisGroup.add(testResult);
}
}
}
}
}
}
/**
* Get all groups names that a Test Method is inclueded in
*
* @param methodToFind
* @param groups
*/
protected String[] findInWhatGroupsMethodIs(ITestNGMethod methodToFind,
Map<String, Collection<ITestNGMethod>> groups) {
String[] groupsFound = new String[groups.keySet().size()];
int cont = 0;
for (Map.Entry<String, Collection<ITestNGMethod>> grupo : groups
.entrySet()) {
for (ITestNGMethod method : grupo.getValue()) {
if (method.equals(methodToFind)
&& method.getRealClass().equals(
methodToFind.getRealClass())) {
groupsFound[cont++] = grupo.getKey();
}
}
}
return groupsFound;
}
/**
* @return <code>true</code> when output YAML is enabled.
*/
public boolean isYaml() {
return false;
}
}
| |
/*******************************************************************************
* Copyright Duke Comprehensive Cancer Center and SemanticBits
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/c3pr/LICENSE.txt for details.
*******************************************************************************/
package edu.duke.cabig.c3pr.webservice.iso21090;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlID;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded">
* <element ref="{uri:iso.org:21090}tr"/>
* </sequence>
* <attribute name="ID" type="{http://www.w3.org/2001/XMLSchema}ID" />
* <attribute name="language" type="{http://www.w3.org/2001/XMLSchema}NMTOKEN" />
* <attribute name="styleCode" type="{http://www.w3.org/2001/XMLSchema}NMTOKENS" />
* <attribute name="align">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}NMTOKEN">
* <enumeration value="left"/>
* <enumeration value="center"/>
* <enumeration value="right"/>
* <enumeration value="justify"/>
* <enumeration value="char"/>
* </restriction>
* </simpleType>
* </attribute>
* <attribute name="char" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="charoff" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="valign">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}NMTOKEN">
* <enumeration value="top"/>
* <enumeration value="middle"/>
* <enumeration value="bottom"/>
* <enumeration value="baseline"/>
* </restriction>
* </simpleType>
* </attribute>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"tr"
})
@XmlRootElement(name = "tbody")
public class Tbody {
@XmlElement(required = true)
protected List<Tr> tr;
@XmlAttribute(name = "ID")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
@XmlSchemaType(name = "ID")
protected String id;
@XmlAttribute
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "NMTOKEN")
protected String language;
@XmlAttribute
@XmlSchemaType(name = "NMTOKENS")
protected List<String> styleCode;
@XmlAttribute
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String align;
@XmlAttribute(name = "char")
protected String _char;
@XmlAttribute
protected String charoff;
@XmlAttribute
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String valign;
/**
* Gets the value of the tr property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the tr property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getTr().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Tr }
*
*
*/
public List<Tr> getTr() {
if (tr == null) {
tr = new ArrayList<Tr>();
}
return this.tr;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getID() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setID(String value) {
this.id = value;
}
/**
* Gets the value of the language property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLanguage() {
return language;
}
/**
* Sets the value of the language property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLanguage(String value) {
this.language = value;
}
/**
* Gets the value of the styleCode property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the styleCode property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getStyleCode().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getStyleCode() {
if (styleCode == null) {
styleCode = new ArrayList<String>();
}
return this.styleCode;
}
/**
* Gets the value of the align property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAlign() {
return align;
}
/**
* Sets the value of the align property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAlign(String value) {
this.align = value;
}
/**
* Gets the value of the char property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getChar() {
return _char;
}
/**
* Sets the value of the char property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setChar(String value) {
this._char = value;
}
/**
* Gets the value of the charoff property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCharoff() {
return charoff;
}
/**
* Sets the value of the charoff property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCharoff(String value) {
this.charoff = value;
}
/**
* Gets the value of the valign property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getValign() {
return valign;
}
/**
* Sets the value of the valign property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setValign(String value) {
this.valign = value;
}
}
| |
package com.indeed.util.core.threads;
import javax.annotation.Nullable;
import java.io.Serializable;
import java.util.Arrays;
/**
* a bit set that is thread safe readable but not thread safe writable
*
* @author ahudson
*/
public class ThreadSafeBitSet implements Serializable {
private static final long serialVersionUID = -7685178028568216346L;
private final int[] bits;
private final int size;
public ThreadSafeBitSet(int size) {
this.size = size;
this.bits = new int[(size + 31) >>> 5];
}
private ThreadSafeBitSet(int[] bits, int size) {
this.bits = bits;
this.size = size;
}
/* mask for the last byte that contains 1 on the bits that
* are actually in use.
*/
private int finalIntUsedBitMask() {
final int nbUsedBitsInLastInt = this.size % 32;
final int nbUselessBits = (32 - nbUsedBitsInLastInt) % 32;
return (~0 >>> nbUselessBits);
}
private void cleanUpLastInt() {
final int nbUsedBitsInLastInt = this.size % 32;
if (nbUsedBitsInLastInt > 0) {
this.bits[this.bits.length - 1] &= finalIntUsedBitMask();
}
}
public final void set(int index, boolean value) {
if (value) set(index);
else clear(index);
}
public final boolean get(int index) {
final int t = 1 << (index & 0x1F);
return (bits[index >> 5] & t) != 0;
}
public ThreadSafeBitSet copy() {
return new ThreadSafeBitSet(this.bits.clone(), this.size);
}
public final int size() {
return size;
}
public final void set(int index) {
bits[index >>> 5] |= (1 << (index & 0x1F));
}
public final void clear(int index) {
bits[index >>> 5] &= ~(1 << (index & 0x1F));
}
public final void clearAll() {
Arrays.fill(bits, 0);
}
public final void setAll() {
Arrays.fill(bits, 0xFFFFFFFF);
cleanUpLastInt();
}
public final void invertAll() {
for (int i = 0; i < bits.length; i++) {
bits[i] = ~bits[i];
}
cleanUpLastInt();
}
// basically same as java's BitSet.and
public final void and(ThreadSafeBitSet other) {
if (other.size != size) throw new IllegalArgumentException("BitSets must be of equal size");
for (int i = 0; i < bits.length; i++) {
bits[i] &= other.bits[i];
}
}
/**
* this = this | other, bitwise
*
* @param other The bit set to or with.
*/
public final void or(ThreadSafeBitSet other) {
if (other.size != size) throw new IllegalArgumentException("BitSets must be of equal size");
for (int i = 0; i < bits.length; i++) {
bits[i] |= other.bits[i];
}
}
/**
* this = this ^ other, bitwise
*
* @param other The bit set to xor with.
*/
public final void xor(ThreadSafeBitSet other) {
if (other.size != size) throw new IllegalArgumentException("BitSets must be of equal size");
for (int i = 0; i < bits.length; i++) {
bits[i] ^= other.bits[i];
}
}
public final void copyFrom(ThreadSafeBitSet other) {
if (other.size == this.size) {
System.arraycopy(other.bits, 0, bits, 0, other.bits.length);
} else if (other.size < this.size) {
if (other.size > 0) {
// we copy the array but handle the last int separately
System.arraycopy(other.bits, 0, bits, 0, other.bits.length - 1);
final int otherLastInt = other.bits[other.bits.length - 1];
final int myLastModifiedInt = this.bits[other.bits.length - 1];
final int mask = other.finalIntUsedBitMask();
final int lastInt = (mask & otherLastInt) | ((~mask) & myLastModifiedInt);
this.bits[other.bits.length - 1] = lastInt;
}
} else {
throw new IllegalArgumentException(
"Copy from array bigger than destination is forbidden");
}
}
public final void copyFromRange(
final ThreadSafeBitSet other,
final int startIndex,
final int otherStartIndex,
final int length) {
if (length == 0) {
return;
}
// end indices are INCLUSIVE to make the twiddling a tad simpler
final int endIndex = startIndex + length - 1;
final int otherEndIndex = otherStartIndex + length - 1;
final int bitsStartIndex = startIndex >>> 5;
final int otherBitsStartIndex = otherStartIndex >>> 5;
final int bitsEndIndex = endIndex >>> 5;
if ((startIndex & 0x1F) == (otherStartIndex & 0x1F)) {
if (bitsStartIndex != bitsEndIndex) {
System.arraycopy(
other.bits,
otherBitsStartIndex + 1,
bits,
bitsStartIndex + 1,
bitsEndIndex - bitsStartIndex - 1);
simpleCopyFromRange(
other, startIndex, otherStartIndex, Integer.SIZE - (startIndex & 0x1F));
simpleCopyFromRange(
other, endIndex & ~0x1F, otherEndIndex & ~0x1F, (endIndex & 0x1F) + 1);
} else {
simpleCopyFromRange(other, startIndex, otherStartIndex, length);
}
} else {
if (bitsStartIndex != bitsEndIndex) {
final int difference = (startIndex & 0x1F) - (otherStartIndex & 0x1F);
final int absDifference = Math.abs(difference);
final int reverseDifference = Integer.SIZE - absDifference;
final int reverseDifferenceMask = -1 << reverseDifference;
final int absDifferenceMask = -1 << absDifference;
if (difference > 0) {
bits[bitsStartIndex] =
(bits[bitsStartIndex] & ~(-1 << (startIndex & 0x1F)))
| ((other.bits[otherBitsStartIndex]
& (-1 << (otherStartIndex & 0x1F)))
<< difference);
for (int bitsIndex = bitsStartIndex + 1, otherBitsIndex = otherBitsStartIndex;
bitsIndex < bitsEndIndex;
++bitsIndex, ++otherBitsIndex) {
bits[bitsIndex] =
((other.bits[otherBitsIndex] & reverseDifferenceMask)
>>> reverseDifference)
| ((other.bits[otherBitsIndex + 1] & ~reverseDifferenceMask)
<< difference);
}
simpleCopyFromRange(
other,
endIndex & ~0x1F,
otherEndIndex - (endIndex & 0x1F),
(endIndex & 0x1F) + 1);
} else {
simpleCopyFromRange(
other, startIndex, otherStartIndex, Integer.SIZE - (startIndex & 0x1F));
for (int bitsIndex = bitsStartIndex + 1,
otherBitsIndex = otherBitsStartIndex + 1;
bitsIndex < bitsEndIndex;
++bitsIndex, ++otherBitsIndex) {
bits[bitsIndex] =
((other.bits[otherBitsIndex] & absDifferenceMask) >>> absDifference)
| ((other.bits[otherBitsIndex + 1] & ~absDifferenceMask)
<< reverseDifference);
}
simpleCopyFromRange(
other,
endIndex & ~0x1F,
otherEndIndex - (endIndex & 0x1F),
(endIndex & 0x1F) + 1);
}
} else {
simpleCopyFromRange(other, startIndex, otherStartIndex, length);
}
}
}
private void simpleCopyFromRange(
final ThreadSafeBitSet other,
final int startIndex,
final int otherStartIndex,
final int length) {
for (int i = 0; i < length; ++i) {
set(startIndex + i, other.get(otherStartIndex + i));
}
}
public final int cardinality() {
int sum = 0;
for (final int x : bits) {
sum += Integer.bitCount(x);
}
return sum;
}
public static ThreadSafeBitSet expand(@Nullable ThreadSafeBitSet oldBitSet, int newSize) {
if (oldBitSet != null && newSize <= oldBitSet.size) return oldBitSet;
final ThreadSafeBitSet ret = new ThreadSafeBitSet(newSize);
if (oldBitSet != null) ret.copyFrom(oldBitSet);
return ret;
}
public static boolean equals(ThreadSafeBitSet bitset1, ThreadSafeBitSet bitset2) {
if (bitset1 == bitset2) return true;
if (bitset1 == null || bitset2 == null) return false;
if (bitset1.size() != bitset2.size()) return false;
return Arrays.equals(bitset1.bits, bitset2.bits);
}
public static ThreadSafeBitSet or(ThreadSafeBitSet a, ThreadSafeBitSet b) {
final int size = Math.max(a.size(), b.size());
final ThreadSafeBitSet ret = new ThreadSafeBitSet(size);
for (int i = 0; i < a.bits.length; i++) {
ret.bits[i] |= a.bits[i];
}
for (int i = 0; i < b.bits.length; i++) {
ret.bits[i] |= b.bits[i];
}
return ret;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.expression;
import static org.junit.Assert.assertEquals;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.SQLException;
import java.sql.Time;
import java.util.List;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.phoenix.expression.function.ArrayFillFunction;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.schema.types.*;
import org.junit.Test;
import org.apache.phoenix.thirdparty.com.google.common.collect.Lists;
public class ArrayFillFunctionTest {
private static void testExpression(LiteralExpression element, LiteralExpression length, PhoenixArray expected)
throws SQLException {
List<Expression> expressions = Lists.newArrayList((Expression) element);
expressions.add(length);
Expression arrayFillFunction = new ArrayFillFunction(expressions);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
arrayFillFunction.evaluate(null, ptr);
PhoenixArray result = (PhoenixArray) arrayFillFunction.getDataType().toObject(ptr, arrayFillFunction.getSortOrder(), arrayFillFunction.getMaxLength(), arrayFillFunction.getScale());
assertEquals(expected, result);
}
private static void test(Object element, Object length, PDataType elementDataType, Integer elementMaxLen, Integer elementScale, PDataType lengthDataType, Integer lengthMaxlen, Integer lengthScale, PhoenixArray expected, SortOrder elementSortOrder, SortOrder lengthSortOrder) throws SQLException {
LiteralExpression elementLiteral, lengthLiteral;
elementLiteral = LiteralExpression.newConstant(element, elementDataType, elementMaxLen, elementScale, elementSortOrder, Determinism.ALWAYS);
lengthLiteral = LiteralExpression.newConstant(length, lengthDataType, lengthMaxlen, lengthScale, lengthSortOrder, Determinism.ALWAYS);
testExpression(elementLiteral, lengthLiteral, expected);
}
@Test
public void testForInt() throws SQLException {
Object element = 5;
Object length = 3;
PDataType baseType = PInteger.INSTANCE;
PhoenixArray e = new PhoenixArray.PrimitiveIntPhoenixArray(baseType, new Object[]{5, 5, 5});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testForBoolean() throws SQLException {
Object element = false;
Object length = 3;
PDataType baseType = PBoolean.INSTANCE;
PhoenixArray e = new PhoenixArray.PrimitiveBooleanPhoenixArray(baseType, new Object[]{false, false, false});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testForVarchar() throws SQLException {
Object element = "foo";
Object length = 3;
PDataType baseType = PVarchar.INSTANCE;
PhoenixArray e = new PhoenixArray(baseType, new Object[]{"foo", "foo", "foo"});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testForChar() throws SQLException {
Object element = "foo";
Object length = 3;
PDataType baseType = PChar.INSTANCE;
PhoenixArray e = new PhoenixArray(baseType, new Object[]{"foo", "foo", "foo"});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testForDouble() throws SQLException {
Object element = 34.67;
Object length = 3;
PDataType baseType = PDouble.INSTANCE;
PhoenixArray e = new PhoenixArray.PrimitiveDoublePhoenixArray(baseType, new Object[]{34.67, 34.67, 34.67});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
}
@Test
public void testForFloat() throws SQLException {
Object element = 5.6;
Object length = 3;
PDataType baseType = PFloat.INSTANCE;
PhoenixArray e = new PhoenixArray.PrimitiveFloatPhoenixArray(baseType, new Object[]{(float) 5.6, (float) 5.6, (float) 5.6});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
}
@Test
public void testForSmallint() throws SQLException {
Object element = 5;
Object length = 3;
PDataType baseType = PSmallint.INSTANCE;
PhoenixArray e = new PhoenixArray.PrimitiveShortPhoenixArray(baseType, new Object[]{(short) 5, (short) 5, (short) 5});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testForTinyint() throws SQLException {
Object element = 6;
Object length = 3;
PDataType baseType = PTinyint.INSTANCE;
PhoenixArray e = new PhoenixArray.PrimitiveBytePhoenixArray(baseType, new Object[]{(byte) 6, (byte) 6, (byte) 6});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testForLong() throws SQLException {
Object element = 34567l;
Object length = 3;
PDataType baseType = PLong.INSTANCE;
PhoenixArray e = new PhoenixArray.PrimitiveLongPhoenixArray(baseType, new Object[]{34567l, 34567l, 34567l});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testForDecimal() throws SQLException {
Object element = BigDecimal.valueOf(345.67);
Object length = 3;
PDataType baseType = PDecimal.INSTANCE;
PhoenixArray e = new PhoenixArray(baseType, new Object[]{BigDecimal.valueOf(345.67), BigDecimal.valueOf(345.67), BigDecimal.valueOf(345.67)});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testForDate() throws SQLException {
Object element = new Date(23);
Object length = 3;
PDataType baseType = PDate.INSTANCE;
PhoenixArray e = new PhoenixArray(baseType, new Object[]{new Date(23), new Date(23), new Date(23)});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testForTime() throws SQLException {
Object element = new Time(23);
Object length = 3;
PDataType baseType = PTime.INSTANCE;
PhoenixArray e = new PhoenixArray(baseType, new Object[]{new Time(23), new Time(23), new Time(23)});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testForNulls1() throws SQLException {
Object element = null;
Object length = 3;
PDataType baseType = PInteger.INSTANCE;
PhoenixArray e = new PhoenixArray.PrimitiveIntPhoenixArray(baseType, new Object[]{0, 0, 0});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testForNulls2() throws SQLException {
Object element = null;
Object length = 3;
PDataType baseType = PVarchar.INSTANCE;
PhoenixArray e = new PhoenixArray(baseType, new Object[]{null, null, null});
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.ASC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.ASC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.DESC);
test(element, length, baseType, null, null, PInteger.INSTANCE, null, null, e, SortOrder.DESC, SortOrder.ASC);
}
}
| |
/**
* Copyright (C) 2013-2019 Helical IT Solutions (http://www.helicalinsight.com) - All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helicalinsight.resourcesecurity.jaxb;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import javax.xml.bind.annotation.*;
import java.util.List;
/**
* Created by author on 25-05-2015.
*
* @author Rajasekhar
*/
@SuppressWarnings("ALL")
@Component
@Scope("prototype")
@XmlRootElement(name = "security")
@XmlAccessorType(XmlAccessType.FIELD)
public class Security {
@XmlElement
private String createdBy;
public String getCreatedBy() {
return createdBy;
}
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Security security = (Security) o;
if (createdBy != null ? !createdBy.equals(security.createdBy) : security.createdBy != null) return false;
return true;
}
@Override
public String toString() {
return "Security{" +
"createdBy='" + createdBy + '\'' +
'}';
}
@Override
public int hashCode() {
return createdBy != null ? createdBy.hashCode() : 0;
}
@Component
@Scope("prototype")
@XmlAccessorType(XmlAccessType.FIELD)
public static class Share {
@XmlAttribute
private String mandatory = "true";
@XmlElement(name = "roles")
private Roles roles;
@XmlElement(name = "users")
private Users users;
@Override
public String toString() {
return "Share{" +
"mandatory='" + mandatory + '\'' +
", roles=" + roles +
", users=" + users +
'}';
}
public Roles getRoles() {
return roles;
}
public void setRoles(Roles roles) {
this.roles = roles;
}
public Users getUsers() {
return users;
}
public void setUsers(Users users) {
this.users = users;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Share share = (Share) o;
if (!mandatory.equals(share.mandatory)) return false;
if (roles != null ? !roles.equals(share.roles) : share.roles != null) return false;
if (users != null ? !users.equals(share.users) : share.users != null) return false;
return true;
}
@Override
public int hashCode() {
int result = mandatory.hashCode();
result = 31 * result + (roles != null ? roles.hashCode() : 0);
result = 31 * result + (users != null ? users.hashCode() : 0);
return result;
}
}
@Component
@Scope("prototype")
@XmlAccessorType(XmlAccessType.FIELD)
public static class Roles {
@XmlAttribute
private String mandatory = "true";
@XmlElement(name = "role")
private List<Role> roles;
public List<Role> getRoles() {
return roles;
}
public void setRoles(List<Role> roles) {
this.roles = roles;
}
@Override
public String toString() {
return "Roles{" +
"roles=" + roles +
'}';
}
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null || getClass() != other.getClass()) return false;
Roles roles1 = (Roles) other;
if (mandatory != null ? !mandatory.equals(roles1.mandatory) : roles1.mandatory != null) return false;
if (roles != null ? !roles.equals(roles1.roles) : roles1.roles != null) return false;
return true;
}
@Override
public int hashCode() {
int result = mandatory != null ? mandatory.hashCode() : 0;
result = 31 * result + (roles != null ? roles.hashCode() : 0);
return result;
}
}
@Component
@Scope("prototype")
@XmlAccessorType(XmlAccessType.FIELD)
public static class Users {
@XmlAttribute
private String mandatory = "true";
@XmlElement(name = "user")
private List<User> users;
public List<User> getUsers() {
return users;
}
public void setUsers(List<User> users) {
this.users = users;
}
@Override
public String toString() {
return "Users{" +
"users=" + users +
'}';
}
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null || getClass() != other.getClass()) return false;
Users users1 = (Users) other;
if (mandatory != null ? !mandatory.equals(users1.mandatory) : users1.mandatory != null) return false;
if (users != null ? !users.equals(users1.users) : users1.users != null) return false;
return true;
}
@Override
public int hashCode() {
int result = mandatory != null ? mandatory.hashCode() : 0;
result = 31 * result + (users != null ? users.hashCode() : 0);
return result;
}
}
@Component
@Scope("prototype")
@XmlAccessorType(XmlAccessType.FIELD)
public static class Role {
@XmlAttribute
private String name;
@XmlAttribute
private String id;
@XmlValue
private String permission;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getPermission() {
return permission;
}
public void setPermission(String permission) {
this.permission = permission;
}
@Override
public String toString() {
return "Role{" +
"name='" + name + '\'' +
", id='" + id + '\'' +
", permission='" + permission + '\'' +
'}';
}
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null || getClass() != other.getClass()) return false;
Role role = (Role) other;
if (id != null ? !id.equals(role.id) : role.id != null) return false;
if (name != null ? !name.equals(role.name) : role.name != null) return false;
if (permission != null ? !permission.equals(role.permission) : role.permission != null) return false;
return true;
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (id != null ? id.hashCode() : 0);
result = 31 * result + (permission != null ? permission.hashCode() : 0);
return result;
}
}
@Component
@Scope("prototype")
@XmlAccessorType(XmlAccessType.FIELD)
public static class User {
@XmlAttribute
private String name;
@XmlAttribute
private String id;
@XmlValue
private String permission;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getPermission() {
return permission;
}
public void setPermission(String permission) {
this.permission = permission;
}
@Override
public String toString() {
return "User{" +
"name='" + name + '\'' +
", id='" + id + '\'' +
", permission='" + permission + '\'' +
'}';
}
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null || getClass() != other.getClass()) return false;
User user = (User) other;
if (id != null ? !id.equals(user.id) : user.id != null) return false;
if (name != null ? !name.equals(user.name) : user.name != null) return false;
if (permission != null ? !permission.equals(user.permission) : user.permission != null) return false;
return true;
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (id != null ? id.hashCode() : 0);
result = 31 * result + (permission != null ? permission.hashCode() : 0);
return result;
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.TailType;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupValueFactory;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.editor.CaretModel;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.filters.*;
import com.intellij.psi.filters.getters.XmlAttributeValueGetter;
import com.intellij.psi.filters.position.LeftNeighbour;
import com.intellij.psi.filters.position.XmlTokenTypeFilter;
import com.intellij.psi.search.PsiElementProcessor;
import com.intellij.psi.tree.TokenSet;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.*;
import com.intellij.util.ArrayUtil;
import com.intellij.xml.Html5SchemaProvider;
import com.intellij.xml.XmlElementDescriptor;
import com.intellij.xml.XmlNSDescriptor;
import com.intellij.xml.util.HtmlUtil;
import com.intellij.xml.util.XmlUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
/**
* Created by IntelliJ IDEA.
* User: ik
* Date: 05.06.2003
* Time: 18:55:15
* To change this template use Options | File Templates.
*/
public class XmlCompletionData extends CompletionData {
public XmlCompletionData() {
declareFinalScope(XmlTag.class);
declareFinalScope(XmlAttribute.class);
declareFinalScope(XmlAttributeValue.class);
{
final CompletionVariant variant = new CompletionVariant(createTagCompletionFilter());
variant.includeScopeClass(XmlTag.class);
variant.addCompletionFilter(TrueFilter.INSTANCE);
registerVariant(variant);
}
{
final CompletionVariant variant = new CompletionVariant(createAttributeCompletionFilter());
variant.includeScopeClass(XmlAttribute.class);
variant.addCompletionFilter(TrueFilter.INSTANCE);
registerVariant(variant);
}
{
XmlAttributeValueGetter getter = getAttributeValueGetter();
if (getter != null) {
final CompletionVariant variant = new CompletionVariant(createAttributeValueCompletionFilter());
variant.includeScopeClass(XmlAttributeValue.class);
variant.addCompletion(getter, TailType.NONE);
variant.addCompletionFilter(TrueFilter.INSTANCE, TailType.NONE);
registerVariant(variant);
}
}
final ElementFilter entityCompletionFilter = createXmlEntityCompletionFilter();
{
final CompletionVariant variant = new CompletionVariant(
new AndFilter(new XmlTokenTypeFilter(XmlTokenType.XML_DATA_CHARACTERS), new NotFilter(entityCompletionFilter), new ElementFilter() {
@Override
public boolean isAcceptable(Object element, PsiElement context) {
XmlTag tag = PsiTreeUtil.getParentOfType(context, XmlTag.class, false);
if (tag != null) {
return XmlUtil.getSchemaSimpleContent(tag) != null;
}
return false;
}
@Override
public boolean isClassAcceptable(Class hintClass) {
return true;
}
}));
variant.includeScopeClass(XmlToken.class, true);
variant.addCompletion(new SimpleTagContentEnumerationValuesGetter(), TailType.NONE);
registerVariant(variant);
}
{
final CompletionVariant variant = new CompletionVariant(
new AndFilter(new XmlTokenTypeFilter(XmlTokenType.XML_DATA_CHARACTERS), new NotFilter(entityCompletionFilter)));
variant.includeScopeClass(XmlToken.class, true);
registerVariant(variant);
}
{
final CompletionVariant variant = new CompletionVariant(entityCompletionFilter);
variant.includeScopeClass(XmlToken.class, true);
variant.addCompletion(new EntityRefGetter());
variant.setInsertHandler(new EntityRefInsertHandler());
registerVariant(variant);
}
}
protected ElementFilter createXmlEntityCompletionFilter() {
return new AndFilter(new LeftNeighbour(new XmlTextFilter("&")), new OrFilter(new XmlTokenTypeFilter(XmlTokenType.XML_DATA_CHARACTERS),
new XmlTokenTypeFilter(
XmlTokenType.XML_ATTRIBUTE_VALUE_TOKEN)));
}
protected XmlAttributeValueGetter getAttributeValueGetter() {
return new XmlAttributeValueGetter();
}
protected ElementFilter createAttributeCompletionFilter() {
return TrueFilter.INSTANCE;
}
protected ElementFilter createAttributeValueCompletionFilter() {
return TrueFilter.INSTANCE;
}
protected ElementFilter createTagCompletionFilter() {
return TrueFilter.INSTANCE;
}
private static class SimpleTagContentEnumerationValuesGetter implements ContextGetter {
@Override
public Object[] get(final PsiElement context, CompletionContext completionContext) {
XmlTag tag = PsiTreeUtil.getParentOfType(context, XmlTag.class, false);
if (tag != null) {
final XmlTag simpleContent = XmlUtil.getSchemaSimpleContent(tag);
if (simpleContent != null) {
final HashSet<String> variants = new HashSet<String>();
XmlUtil.collectEnumerationValues(simpleContent, variants);
return ArrayUtil.toObjectArray(variants);
}
}
return ArrayUtil.EMPTY_OBJECT_ARRAY;
}
}
protected static class EntityRefGetter implements ContextGetter {
@Nullable
private static Object getLookupItem(@Nullable final XmlEntityDecl decl) {
if (decl == null) {
return null;
}
final String name = decl.getName();
if (name == null) {
return null;
}
final XmlAttributeValue value = decl.getValueElement();
final ASTNode node = value.getNode();
if (node != null) {
final ASTNode[] nodes = node.getChildren(TokenSet.create(XmlTokenType.XML_CHAR_ENTITY_REF));
if (nodes.length == 1) {
final String valueText = nodes[0].getText();
final int i = valueText.indexOf('#');
if (i > 0) {
String s = valueText.substring(i + 1);
s = StringUtil.trimEnd(s, ";");
try {
final int unicodeChar = Integer.valueOf(s).intValue();
return LookupValueFactory.createLookupValueWithHint(name, null, String.valueOf((char)unicodeChar));
}
catch (NumberFormatException e) {
return null;
}
}
}
}
return null;
}
@Override
public Object[] get(final PsiElement context, CompletionContext completionContext) {
XmlFile containingFile = null;
XmlFile descriptorFile = null;
final XmlTag tag = PsiTreeUtil.getParentOfType(context, XmlTag.class);
if (tag != null) {
containingFile = (XmlFile)tag.getContainingFile();
descriptorFile = findDescriptorFile(tag, containingFile);
}
if (HtmlUtil.isHtml5Context(tag)) {
descriptorFile = XmlUtil.findXmlFile(containingFile, Html5SchemaProvider.getCharsDtdLocation());
} else if (tag == null) {
final XmlDocument document = PsiTreeUtil.getParentOfType(context, XmlDocument.class);
if (document != null) {
containingFile = (XmlFile)document.getContainingFile();
final FileType ft = containingFile.getFileType();
if (HtmlUtil.isHtml5Document(document)) {
descriptorFile = XmlUtil.findXmlFile(containingFile, Html5SchemaProvider.getCharsDtdLocation());
} else if(ft != StdFileTypes.XML) {
final String namespace = ft == StdFileTypes.XHTML || ft == StdFileTypes.JSPX ? XmlUtil.XHTML_URI : XmlUtil.HTML_URI;
final XmlNSDescriptor nsDescriptor = document.getDefaultNSDescriptor(namespace, true);
if (nsDescriptor != null) {
descriptorFile = nsDescriptor.getDescriptorFile();
}
}
}
}
if (descriptorFile != null) {
final List<Object> results = new ArrayList<Object>();
final boolean acceptSystemEntities = containingFile.getFileType() == StdFileTypes.XML;
final PsiElementProcessor processor = new PsiElementProcessor() {
@Override
public boolean execute(@NotNull final PsiElement element) {
if (element instanceof XmlEntityDecl) {
final XmlEntityDecl xmlEntityDecl = (XmlEntityDecl)element;
if (xmlEntityDecl.isInternalReference() || acceptSystemEntities) {
final String name = xmlEntityDecl.getName();
final Object _item = getLookupItem(xmlEntityDecl);
results.add(_item == null ? name : _item);
}
}
return true;
}
};
XmlUtil.processXmlElements(descriptorFile, processor, true);
if (descriptorFile != containingFile && containingFile.getFileType() == StdFileTypes.XML) {
final XmlProlog element = containingFile.getDocument().getProlog();
if (element != null) XmlUtil.processXmlElements(element, processor, true);
}
return ArrayUtil.toObjectArray(results);
}
return ArrayUtil.EMPTY_OBJECT_ARRAY;
}
}
public static XmlFile findDescriptorFile(@NotNull XmlTag tag, @NotNull XmlFile containingFile) {
final XmlElementDescriptor descriptor = tag.getDescriptor();
final XmlNSDescriptor nsDescriptor = descriptor != null ? descriptor.getNSDescriptor() : null;
XmlFile descriptorFile = nsDescriptor != null
? nsDescriptor.getDescriptorFile()
: containingFile.getDocument().getProlog().getDoctype() != null ? containingFile : null;
if (nsDescriptor != null && (descriptorFile == null || descriptorFile.getName().equals(containingFile.getName() + ".dtd"))) {
descriptorFile = containingFile;
}
return descriptorFile;
}
protected static class EntityRefInsertHandler extends BasicInsertHandler {
@Override
public void handleInsert(InsertionContext context, LookupElement item) {
super.handleInsert(context, item);
context.setAddCompletionChar(false);
final CaretModel caretModel = context.getEditor().getCaretModel();
context.getEditor().getDocument().insertString(caretModel.getOffset(), ";");
caretModel.moveToOffset(caretModel.getOffset() + 1);
}
}
}
| |
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.runtime.pipeline.impl;
import com.sun.codemodel.CodeWriter;
import com.sun.codemodel.JCodeModel;
import com.sun.codemodel.JPackage;
import com.sun.tools.xjc.BadCommandLineException;
import com.sun.tools.xjc.ErrorReceiver;
import com.sun.tools.xjc.ModelLoader;
import com.sun.tools.xjc.Options;
import com.sun.tools.xjc.model.Model;
import org.drools.compiler.builder.impl.KnowledgeBuilderImpl;
import org.drools.compiler.commons.jci.readers.MemoryResourceReader;
import org.drools.compiler.compiler.PackageRegistry;
import org.drools.compiler.compiler.ProjectJavaCompiler;
import org.drools.compiler.lang.descr.PackageDescr;
import org.drools.compiler.rule.builder.dialect.java.JavaDialect;
import org.drools.core.command.runtime.BatchExecutionCommandImpl;
import org.drools.core.command.runtime.GetGlobalCommand;
import org.drools.core.command.runtime.SetGlobalCommand;
import org.drools.core.command.runtime.process.AbortWorkItemCommand;
import org.drools.core.command.runtime.process.CompleteWorkItemCommand;
import org.drools.core.command.runtime.process.SignalEventCommand;
import org.drools.core.command.runtime.process.StartProcessCommand;
import org.drools.core.command.runtime.rule.DeleteCommand;
import org.drools.core.command.runtime.rule.FireAllRulesCommand;
import org.drools.core.command.runtime.rule.GetObjectsCommand;
import org.drools.core.command.runtime.rule.InsertElementsCommand;
import org.drools.core.command.runtime.rule.InsertObjectCommand;
import org.drools.core.command.runtime.rule.ModifyCommand;
import org.drools.core.command.runtime.rule.ModifyCommand.SetterImpl;
import org.drools.core.command.runtime.rule.QueryCommand;
import org.drools.core.common.DefaultFactHandle;
import org.drools.core.common.ProjectClassLoader;
import org.drools.core.impl.InternalKnowledgeBase;
import org.drools.core.rule.TypeDeclaration;
import org.drools.core.runtime.impl.ExecutionResultImpl;
import org.drools.core.runtime.rule.impl.FlatQueryResults;
import org.drools.core.xml.jaxb.util.JaxbListWrapper;
import org.kie.api.io.Resource;
import org.kie.internal.KnowledgeBase;
import org.kie.internal.builder.KnowledgeBuilder;
import org.kie.internal.builder.KnowledgeBuilderResult;
import org.kie.internal.builder.help.DroolsJaxbHelperProvider;
import org.xml.sax.InputSource;
import org.xml.sax.SAXParseException;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import java.io.ByteArrayOutputStream;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Reader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
public class DroolsJaxbHelperProviderImpl
implements DroolsJaxbHelperProvider {
public static final String[] JAXB_ANNOTATED_CMD = {
BatchExecutionCommandImpl.class.getName(),
SetGlobalCommand.class.getName(),
GetGlobalCommand.class.getName(),
FireAllRulesCommand.class.getName(),
InsertElementsCommand.class.getName(),
InsertObjectCommand.class.getName(),
ModifyCommand.class.getName(),
SetterImpl.class.getName(),
QueryCommand.class.getName(),
DeleteCommand.class.getName(),
AbortWorkItemCommand.class.getName(),
SignalEventCommand.class.getName(),
StartProcessCommand.class.getName(),
BatchExecutionCommandImpl.class.getName(),
ExecutionResultImpl.class.getName(),
DefaultFactHandle.class.getName(),
JaxbListWrapper.class.getName(),
FlatQueryResults.class.getName(),
CompleteWorkItemCommand.class.getName(),
GetObjectsCommand.class.getName()};
public static String[] addXsdModel(Resource resource,
KnowledgeBuilderImpl kBuilder,
Options xjcOpts,
String systemId) throws IOException {
InputSource source = new InputSource( new CachingRewindableReader( resource.getReader() ) );
source.setSystemId( systemId.trim().startsWith( "." ) ? systemId : "." + systemId );
xjcOpts.addGrammar( source );
try {
xjcOpts.parseArguments( new String[]{"-npa"} );
} catch ( BadCommandLineException e ) {
throw new IllegalArgumentException( "Unable to parse arguments",
e );
}
ErrorReceiver errorReceiver = new JaxbErrorReceiver4Drools();
Model model = ModelLoader.load( xjcOpts,
new JCodeModel(),
errorReceiver );
model.generateCode( xjcOpts, errorReceiver );
MapVfsCodeWriter codeWriter = new MapVfsCodeWriter();
model.codeModel.build( xjcOpts.createCodeWriter( codeWriter ) );
MemoryResourceReader src = new MemoryResourceReader();
boolean useProjectClassLoader = kBuilder.getRootClassLoader() instanceof ProjectClassLoader;
List<String> classNames = new ArrayList<String>();
List<String> srcNames = new ArrayList<String>();
for ( Entry<String, byte[]> entry : codeWriter.getMap().entrySet() ) {
String name = entry.getKey();
int dotPos = name.lastIndexOf( '.' );
String pkgName = name.substring( 0, dotPos );
if ( !name.endsWith( "package-info.java" ) ) {
classNames.add( pkgName );
}
dotPos = pkgName.lastIndexOf( '.' );
if ( dotPos != -1 ) {
pkgName = pkgName.substring( 0, dotPos );
}
PackageRegistry pkgReg = kBuilder.getPackageRegistry( pkgName );
if ( pkgReg == null ) {
kBuilder.addPackage( new PackageDescr( pkgName ) );
pkgReg = kBuilder.getPackageRegistry( pkgName );
}
if (useProjectClassLoader) {
String srcName = convertToResource( entry.getKey() );
src.add( srcName, entry.getValue() );
srcNames.add( srcName );
} else {
JavaDialect dialect = (JavaDialect) pkgReg.getDialectCompiletimeRegistry().getDialect( "java" );
dialect.addSrc( convertToResource( entry.getKey() ),
entry.getValue() );
}
}
if (useProjectClassLoader) {
ProjectJavaCompiler compiler = new ProjectJavaCompiler(kBuilder.getBuilderConfiguration());
List<KnowledgeBuilderResult> results = compiler.compileAll((ProjectClassLoader)kBuilder.getRootClassLoader(),
srcNames,
src);
for (String className : classNames) {
Class<?> clazz = null;
try {
clazz = Class.forName( className, true, kBuilder.getRootClassLoader() );
} catch (ClassNotFoundException e) {
continue;
}
String pkgName = className.substring( 0, className.lastIndexOf( '.' ) );
PackageRegistry pkgReg = kBuilder.getPackageRegistry(pkgName);
pkgReg.getPackage().addTypeDeclaration( new TypeDeclaration( clazz ) );
}
kBuilder.updateResults(results);
} else {
kBuilder.compileAll();
kBuilder.updateResults();
}
return classNames.toArray( new String[classNames.size()] );
}
public static JAXBContext createDroolsJaxbContext(List<String> classNames, Map<String, ?> properties) throws ClassNotFoundException, JAXBException {
int i = 0;
Class<?>[] classes = new Class[classNames.size() + JAXB_ANNOTATED_CMD.length];
for (i = 0; i < classNames.size(); i++) {
classes[i] = Class.forName(classNames.get(i));
}
int j = 0;
for (i = classNames.size(); i < classes.length; i++, j++) {
classes[i] = Class.forName(JAXB_ANNOTATED_CMD[j]);
}
return JAXBContext.newInstance(classes, properties);
}
public String[] addXsdModel(Resource resource,
KnowledgeBuilder kbuilder,
Options xjcOpts,
String systemId) throws IOException {
return addXsdModel( resource, (KnowledgeBuilderImpl)kbuilder, xjcOpts, systemId );
}
public JAXBContext newJAXBContext(String[] classNames,
KnowledgeBase kbase) throws JAXBException {
return newJAXBContext( classNames,
Collections.<String, Object> emptyMap(),
kbase );
}
public JAXBContext newJAXBContext(String[] classNames,
Map<String, ? > properties,
KnowledgeBase kbase) throws JAXBException {
ClassLoader classLoader = ((InternalKnowledgeBase) kbase).getRootClassLoader();
int i = 0;
try {
Class<?>[] classes = new Class[classNames.length
+ JAXB_ANNOTATED_CMD.length];
for (i = 0; i < classNames.length; i++) {
classes[i] = classLoader.loadClass(classNames[i]);
}
int j = 0;
for (i = classNames.length; i < classes.length; i++, j++) {
classes[i] = classLoader.loadClass(JAXB_ANNOTATED_CMD[j]);
}
return JAXBContext.newInstance(classes, properties);
} catch (ClassNotFoundException e) {
throw new JAXBException("Unable to resolve class '" + classNames[i] + "'", e);
}
}
private static String convertToResource(String string) {
int lastDot = string.lastIndexOf( '.' );
return string.substring( 0,
lastDot ).replace( '.',
'/' ) + string.substring( lastDot,
string.length() );
}
public static class MapVfsCodeWriter extends CodeWriter {
private final Map<String, byte[]> map;
private ByteArrayOutputStream currentBaos;
private String currentPath;
public MapVfsCodeWriter() {
this.map = new LinkedHashMap<String, byte[]>();
}
public OutputStream openBinary(JPackage pkg,
String fileName) throws IOException {
String pkgName = pkg.name();
if ( pkgName.length() != 0 ) {
pkgName += '.';
}
if ( this.currentBaos != null ) {
this.currentBaos.close();
this.map.put( this.currentPath,
this.currentBaos.toByteArray() );
}
this.currentPath = pkgName + fileName;
this.currentBaos = new ByteArrayOutputStream();
return new FilterOutputStream( this.currentBaos ) {
public void close() {
// don't let this stream close
}
};
}
public void close() throws IOException {
if ( this.currentBaos != null ) {
this.currentBaos.close();
this.map.put( this.currentPath,
this.currentBaos.toByteArray() );
}
}
public Map<String, byte[]> getMap() {
return this.map;
}
}
public static class JaxbErrorReceiver4Drools extends ErrorReceiver {
public String stage = "processing";
public void warning(SAXParseException e) {
e.printStackTrace();
}
public void error(SAXParseException e) {
e.printStackTrace();
}
public void fatalError(SAXParseException e) {
e.printStackTrace();
}
public void info(SAXParseException e) {
e.printStackTrace();
}
}
public static class CachingRewindableReader extends Reader {
private Reader source;
private boolean sourceClosed;
private RewindableStringReader cache;
private StringBuilder strBuilder;
public CachingRewindableReader(Reader source) {
this.source = source;
this.strBuilder = new StringBuilder();
}
public int read(char[] cbuf,
int off,
int len) throws IOException {
int value = 0;
if ( this.cache == null ) {
value = this.source.read( cbuf,
off,
len );
if ( value != -1 ) {
// keep appening to the stringBuilder until we are at the end
this.strBuilder.append( cbuf,
off,
value );
} else {
// we are at the end, so switch to cache
this.cache = new RewindableStringReader( strBuilder.toString() );
}
} else {
value = this.cache.read( cbuf,
off,
len );
}
return value;
}
public void close() throws IOException {
if ( !sourceClosed ) {
// close the source, we only do this once.
this.source.close();
this.sourceClosed = true;
}
if ( cache == null ) {
// switch to cache if we haven't already
this.cache = new RewindableStringReader( strBuilder.toString() );
} else {
// reset the cache, so it can be read again.
this.cache.reset();
}
}
}
public static class RewindableStringReader extends StringReader {
public RewindableStringReader(String s) {
super( s );
}
public void close() {
try {
reset();
} catch ( IOException e ) {
e.printStackTrace();
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.TopDocs;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.SearchOperationListener;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.AggregationInitializationException;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.dfs.DfsPhase;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.QueryFetchSearchResult;
import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult;
import org.elasticsearch.search.fetch.ShardFetchRequest;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext.FieldDataField;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext.ScriptField;
import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.DefaultSearchContext;
import org.elasticsearch.search.internal.InternalScrollSearchRequest;
import org.elasticsearch.search.internal.ScrollContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QueryPhase;
import org.elasticsearch.search.query.QuerySearchRequest;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.query.QuerySearchResultProvider;
import org.elasticsearch.search.query.ScrollQuerySearchResult;
import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
import org.elasticsearch.search.sort.SortAndFormats;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.suggest.Suggesters;
import org.elasticsearch.threadpool.ThreadPool;
import com.carrotsearch.hppc.ObjectFloatHashMap;
/**
*
*/
public class SearchService extends AbstractLifecycleComponent<SearchService> implements IndexEventListener {
// we can have 5 minutes here, since we make sure to clean with search requests and when shard/index closes
public static final Setting<TimeValue> DEFAULT_KEEPALIVE_SETTING =
Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), Property.NodeScope);
public static final Setting<TimeValue> KEEPALIVE_INTERVAL_SETTING =
Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), Property.NodeScope);
public static final TimeValue NO_TIMEOUT = timeValueMillis(-1);
public static final Setting<TimeValue> DEFAULT_SEARCH_TIMEOUT_SETTING =
Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, Property.Dynamic, Property.NodeScope);
private final ThreadPool threadPool;
private final ClusterService clusterService;
private final IndicesService indicesService;
private final ScriptService scriptService;
private final BigArrays bigArrays;
private final DfsPhase dfsPhase;
private final QueryPhase queryPhase;
private final FetchPhase fetchPhase;
private final long defaultKeepAlive;
private volatile TimeValue defaultSearchTimeout;
private final ScheduledFuture<?> keepAliveReaper;
private final AtomicLong idGenerator = new AtomicLong();
private final ConcurrentMapLong<SearchContext> activeContexts = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency();
private final Map<String, SearchParseElement> elementParsers;
private final ParseFieldMatcher parseFieldMatcher;
private final AggregatorParsers aggParsers;
private final Suggesters suggesters;
@Inject
public SearchService(Settings settings, ClusterSettings clusterSettings, ClusterService clusterService, IndicesService indicesService,
ThreadPool threadPool, ScriptService scriptService, BigArrays bigArrays, DfsPhase dfsPhase,
QueryPhase queryPhase, FetchPhase fetchPhase, AggregatorParsers aggParsers, Suggesters suggesters) {
super(settings);
this.aggParsers = aggParsers;
this.suggesters = suggesters;
this.parseFieldMatcher = new ParseFieldMatcher(settings);
this.threadPool = threadPool;
this.clusterService = clusterService;
this.indicesService = indicesService;
this.scriptService = scriptService;
this.bigArrays = bigArrays;
this.dfsPhase = dfsPhase;
this.queryPhase = queryPhase;
this.fetchPhase = fetchPhase;
TimeValue keepAliveInterval = KEEPALIVE_INTERVAL_SETTING.get(settings);
this.defaultKeepAlive = DEFAULT_KEEPALIVE_SETTING.get(settings).millis();
Map<String, SearchParseElement> elementParsers = new HashMap<>();
elementParsers.putAll(dfsPhase.parseElements());
elementParsers.putAll(queryPhase.parseElements());
elementParsers.putAll(fetchPhase.parseElements());
this.elementParsers = unmodifiableMap(elementParsers);
this.keepAliveReaper = threadPool.scheduleWithFixedDelay(new Reaper(), keepAliveInterval);
defaultSearchTimeout = DEFAULT_SEARCH_TIMEOUT_SETTING.get(settings);
clusterSettings.addSettingsUpdateConsumer(DEFAULT_SEARCH_TIMEOUT_SETTING, this::setDefaultSearchTimeout);
}
private void setDefaultSearchTimeout(TimeValue defaultSearchTimeout) {
this.defaultSearchTimeout = defaultSearchTimeout;
}
@Override
public void afterIndexClosed(Index index, Settings indexSettings) {
// once an index is closed we can just clean up all the pending search context information
// to release memory and let references to the filesystem go etc.
IndexMetaData idxMeta = SearchService.this.clusterService.state().metaData().index(index);
if (idxMeta != null && idxMeta.getState() == IndexMetaData.State.CLOSE) {
// we need to check if it's really closed
// since sometimes due to a relocation we already closed the shard and that causes the index to be closed
// if we then close all the contexts we can get some search failures along the way which are not expected.
// it's fine to keep the contexts open if the index is still "alive"
// unfortunately we don't have a clear way to signal today why an index is closed.
afterIndexDeleted(index, indexSettings);
}
}
@Override
public void afterIndexDeleted(Index index, Settings indexSettings) {
freeAllContextForIndex(index);
}
protected void putContext(SearchContext context) {
final SearchContext previous = activeContexts.put(context.id(), context);
assert previous == null;
}
protected SearchContext removeContext(long id) {
return activeContexts.remove(id);
}
@Override
protected void doStart() {
}
@Override
protected void doStop() {
for (final SearchContext context : activeContexts.values()) {
freeContext(context.id());
}
}
@Override
protected void doClose() {
doStop();
FutureUtils.cancel(keepAliveReaper);
}
public DfsSearchResult executeDfsPhase(ShardSearchRequest request) throws IOException {
final SearchContext context = createAndPutContext(request);
try {
contextProcessing(context);
dfsPhase.execute(context);
contextProcessedSuccessfully(context);
return context.dfsResult();
} catch (Throwable e) {
logger.trace("Dfs phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
/**
* Try to load the query results from the cache or execute the query phase directly if the cache cannot be used.
*/
private void loadOrExecuteQueryPhase(final ShardSearchRequest request, final SearchContext context,
final QueryPhase queryPhase) throws Exception {
final boolean canCache = indicesService.canCache(request, context);
if (canCache) {
indicesService.loadIntoContext(request, context, queryPhase);
} else {
queryPhase.execute(context);
}
}
public QuerySearchResultProvider executeQueryPhase(ShardSearchRequest request) throws IOException {
final SearchContext context = createAndPutContext(request);
final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
try {
operationListener.onPreQueryPhase(context);
long time = System.nanoTime();
contextProcessing(context);
loadOrExecuteQueryPhase(request, context, queryPhase);
if (context.queryResult().topDocs().scoreDocs.length == 0 && context.scrollContext() == null) {
freeContext(context.id());
} else {
contextProcessedSuccessfully(context);
}
operationListener.onQueryPhase(context, System.nanoTime() - time);
return context.queryResult();
} catch (Throwable e) {
// execution exception can happen while loading the cache, strip it
if (e instanceof ExecutionException) {
e = e.getCause();
}
operationListener.onFailedQueryPhase(context);
logger.trace("Query phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
public ScrollQuerySearchResult executeQueryPhase(InternalScrollSearchRequest request) {
final SearchContext context = findContext(request.id());
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
try {
operationListener.onPreQueryPhase(context);
long time = System.nanoTime();
contextProcessing(context);
processScroll(request, context);
queryPhase.execute(context);
contextProcessedSuccessfully(context);
operationListener.onQueryPhase(context, System.nanoTime() - time);
return new ScrollQuerySearchResult(context.queryResult(), context.shardTarget());
} catch (Throwable e) {
operationListener.onFailedQueryPhase(context);
logger.trace("Query phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
public QuerySearchResult executeQueryPhase(QuerySearchRequest request) {
final SearchContext context = findContext(request.id());
contextProcessing(context);
context.searcher().setAggregatedDfs(request.dfs());
IndexShard indexShard = context.indexShard();
SearchOperationListener operationListener = indexShard.getSearchOperationListener();
try {
operationListener.onPreQueryPhase(context);
long time = System.nanoTime();
queryPhase.execute(context);
if (context.queryResult().topDocs().scoreDocs.length == 0 && context.scrollContext() == null) {
// no hits, we can release the context since there will be no fetch phase
freeContext(context.id());
} else {
contextProcessedSuccessfully(context);
}
operationListener.onQueryPhase(context, System.nanoTime() - time);
return context.queryResult();
} catch (Throwable e) {
operationListener.onFailedQueryPhase(context);
logger.trace("Query phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
private boolean fetchPhaseShouldFreeContext(SearchContext context) {
if (context.scrollContext() == null) {
// simple search, no scroll
return true;
} else {
// scroll request, but the scroll was not extended
return context.scrollContext().scroll == null;
}
}
public QueryFetchSearchResult executeFetchPhase(ShardSearchRequest request) throws IOException {
final SearchContext context = createAndPutContext(request);
contextProcessing(context);
try {
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
operationListener.onPreQueryPhase(context);
long time = System.nanoTime();
try {
loadOrExecuteQueryPhase(request, context, queryPhase);
} catch (Throwable e) {
operationListener.onFailedQueryPhase(context);
throw ExceptionsHelper.convertToRuntime(e);
}
long time2 = System.nanoTime();
operationListener.onQueryPhase(context, time2 - time);
operationListener.onPreFetchPhase(context);
try {
shortcutDocIdsToLoad(context);
fetchPhase.execute(context);
if (fetchPhaseShouldFreeContext(context)) {
freeContext(context.id());
} else {
contextProcessedSuccessfully(context);
}
} catch (Throwable e) {
operationListener.onFailedFetchPhase(context);
throw ExceptionsHelper.convertToRuntime(e);
}
operationListener.onFetchPhase(context, System.nanoTime() - time2);
return new QueryFetchSearchResult(context.queryResult(), context.fetchResult());
} catch (Throwable e) {
logger.trace("Fetch phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
public QueryFetchSearchResult executeFetchPhase(QuerySearchRequest request) {
final SearchContext context = findContext(request.id());
contextProcessing(context);
context.searcher().setAggregatedDfs(request.dfs());
try {
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
operationListener.onPreQueryPhase(context);
long time = System.nanoTime();
try {
queryPhase.execute(context);
} catch (Throwable e) {
operationListener.onFailedQueryPhase(context);
throw ExceptionsHelper.convertToRuntime(e);
}
long time2 = System.nanoTime();
operationListener.onQueryPhase(context, time2 - time);
operationListener.onPreFetchPhase(context);
try {
shortcutDocIdsToLoad(context);
fetchPhase.execute(context);
if (fetchPhaseShouldFreeContext(context)) {
freeContext(request.id());
} else {
contextProcessedSuccessfully(context);
}
} catch (Throwable e) {
operationListener.onFailedFetchPhase(context);
throw ExceptionsHelper.convertToRuntime(e);
}
operationListener.onFetchPhase(context, System.nanoTime() - time2);
return new QueryFetchSearchResult(context.queryResult(), context.fetchResult());
} catch (Throwable e) {
logger.trace("Fetch phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
public ScrollQueryFetchSearchResult executeFetchPhase(InternalScrollSearchRequest request) {
final SearchContext context = findContext(request.id());
contextProcessing(context);
try {
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
processScroll(request, context);
operationListener.onPreQueryPhase(context);
long time = System.nanoTime();
try {
queryPhase.execute(context);
} catch (Throwable e) {
operationListener.onFailedQueryPhase(context);
throw ExceptionsHelper.convertToRuntime(e);
}
long time2 = System.nanoTime();
operationListener.onQueryPhase(context, time2 - time);
operationListener.onPreFetchPhase(context);
try {
shortcutDocIdsToLoad(context);
fetchPhase.execute(context);
if (fetchPhaseShouldFreeContext(context)) {
freeContext(request.id());
} else {
contextProcessedSuccessfully(context);
}
} catch (Throwable e) {
operationListener.onFailedFetchPhase(context);
throw ExceptionsHelper.convertToRuntime(e);
}
operationListener.onFetchPhase(context, System.nanoTime() - time2);
return new ScrollQueryFetchSearchResult(new QueryFetchSearchResult(context.queryResult(), context.fetchResult()), context.shardTarget());
} catch (Throwable e) {
logger.trace("Fetch phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
public FetchSearchResult executeFetchPhase(ShardFetchRequest request) {
final SearchContext context = findContext(request.id());
contextProcessing(context);
final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
try {
if (request.lastEmittedDoc() != null) {
context.scrollContext().lastEmittedDoc = request.lastEmittedDoc();
}
context.docIdsToLoad(request.docIds(), 0, request.docIdsSize());
operationListener.onPreFetchPhase(context);
long time = System.nanoTime();
fetchPhase.execute(context);
if (fetchPhaseShouldFreeContext(context)) {
freeContext(request.id());
} else {
contextProcessedSuccessfully(context);
}
operationListener.onFetchPhase(context, System.nanoTime() - time);
return context.fetchResult();
} catch (Throwable e) {
operationListener.onFailedFetchPhase(context);
logger.trace("Fetch phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
private SearchContext findContext(long id) throws SearchContextMissingException {
SearchContext context = activeContexts.get(id);
if (context == null) {
throw new SearchContextMissingException(id);
}
SearchContext.setCurrent(context);
return context;
}
final SearchContext createAndPutContext(ShardSearchRequest request) throws IOException {
SearchContext context = createContext(request, null);
boolean success = false;
try {
putContext(context);
if (request.scroll() != null) {
context.indexShard().getSearchOperationListener().onNewScrollContext(context);
}
context.indexShard().getSearchOperationListener().onNewContext(context);
success = true;
return context;
} finally {
if (!success) {
freeContext(context.id());
}
}
}
final SearchContext createContext(ShardSearchRequest request, @Nullable Engine.Searcher searcher) throws IOException {
IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
IndexShard indexShard = indexService.getShard(request.shardId().getId());
SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().getId(), indexShard.shardId());
Engine.Searcher engineSearcher = searcher == null ? indexShard.acquireSearcher("search") : searcher;
DefaultSearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher,
indexService,
indexShard, scriptService, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher,
defaultSearchTimeout, fetchPhase);
SearchContext.setCurrent(context);
try {
request.rewrite(context.getQueryShardContext());
// reset that we have used nowInMillis from the context since it may
// have been rewritten so its no longer in the query and the request can
// be cached. If it is still present in the request (e.g. in a range
// aggregation) it will still be caught when the aggregation is
// evaluated.
context.resetNowInMillisUsed();
if (request.scroll() != null) {
context.scrollContext(new ScrollContext());
context.scrollContext().scroll = request.scroll();
}
if (request.template() != null) {
ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH,
Collections.emptyMap(), context.getQueryShardContext().getClusterState());
BytesReference run = (BytesReference) executable.run();
try (XContentParser parser = XContentFactory.xContent(run).createParser(run)) {
QueryParseContext queryParseContext = new QueryParseContext(indicesService.getIndicesQueryRegistry(), parser,
parseFieldMatcher);
parseSource(context, SearchSourceBuilder.fromXContent(queryParseContext, aggParsers, suggesters));
}
}
parseSource(context, request.source());
// if the from and size are still not set, default them
if (context.from() == -1) {
context.from(0);
}
if (context.size() == -1) {
context.size(10);
}
// pre process
dfsPhase.preProcess(context);
queryPhase.preProcess(context);
fetchPhase.preProcess(context);
// compute the context keep alive
long keepAlive = defaultKeepAlive;
if (request.scroll() != null && request.scroll().keepAlive() != null) {
keepAlive = request.scroll().keepAlive().millis();
}
context.keepAlive(keepAlive);
} catch (Throwable e) {
context.close();
throw ExceptionsHelper.convertToRuntime(e);
}
return context;
}
private void freeAllContextForIndex(Index index) {
assert index != null;
for (SearchContext ctx : activeContexts.values()) {
if (index.equals(ctx.indexShard().shardId().getIndex())) {
freeContext(ctx.id());
}
}
}
public boolean freeContext(long id) {
final SearchContext context = removeContext(id);
if (context != null) {
try {
context.indexShard().getSearchOperationListener().onFreeContext(context);
if (context.scrollContext() != null) {
context.indexShard().getSearchOperationListener().onFreeScrollContext(context);
}
} finally {
context.close();
}
return true;
}
return false;
}
public void freeAllScrollContexts() {
for (SearchContext searchContext : activeContexts.values()) {
if (searchContext.scrollContext() != null) {
freeContext(searchContext.id());
}
}
}
private void contextProcessing(SearchContext context) {
// disable timeout while executing a search
context.accessed(-1);
}
private void contextProcessedSuccessfully(SearchContext context) {
context.accessed(threadPool.estimatedTimeInMillis());
}
private void cleanContext(SearchContext context) {
assert context == SearchContext.current();
context.clearReleasables(Lifetime.PHASE);
SearchContext.removeCurrent();
}
private void processFailure(SearchContext context, Throwable t) {
freeContext(context.id());
try {
if (Lucene.isCorruptionException(t)) {
context.indexShard().failShard("search execution corruption failure", t);
}
} catch (Throwable e) {
logger.warn("failed to process shard failure to (potentially) send back shard failure on corruption", e);
}
}
private void parseSource(DefaultSearchContext context, SearchSourceBuilder source) throws SearchContextException {
// nothing to parse...
if (source == null) {
return;
}
QueryShardContext queryShardContext = context.getQueryShardContext();
context.from(source.from());
context.size(source.size());
ObjectFloatHashMap<String> indexBoostMap = source.indexBoost();
if (indexBoostMap != null) {
Float indexBoost = indexBoostMap.get(context.shardTarget().index());
if (indexBoost != null) {
context.queryBoost(indexBoost);
}
}
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
if (source.query() != null) {
InnerHitBuilder.extractInnerHits(source.query(), innerHitBuilders);
context.parsedQuery(queryShardContext.toQuery(source.query()));
}
if (source.postFilter() != null) {
InnerHitBuilder.extractInnerHits(source.postFilter(), innerHitBuilders);
context.parsedPostFilter(queryShardContext.toQuery(source.postFilter()));
}
if (innerHitBuilders.size() > 0) {
for (Map.Entry<String, InnerHitBuilder> entry : innerHitBuilders.entrySet()) {
try {
entry.getValue().build(context, context.innerHits());
} catch (IOException e) {
throw new SearchContextException(context, "failed to build inner_hits", e);
}
}
}
if (source.sorts() != null) {
try {
Optional<SortAndFormats> optionalSort = SortBuilder.buildSort(source.sorts(), context.getQueryShardContext());
if (optionalSort.isPresent()) {
context.sort(optionalSort.get());
}
} catch (IOException e) {
throw new SearchContextException(context, "failed to create sort elements", e);
}
}
context.trackScores(source.trackScores());
if (source.minScore() != null) {
context.minimumScore(source.minScore());
}
if (source.profile()) {
context.setProfilers(new Profilers(context.searcher()));
}
context.timeoutInMillis(source.timeoutInMillis());
context.terminateAfter(source.terminateAfter());
if (source.aggregations() != null) {
try {
AggregationContext aggContext = new AggregationContext(context);
AggregatorFactories factories = source.aggregations().build(aggContext, null);
factories.validate();
context.aggregations(new SearchContextAggregations(factories));
} catch (IOException e) {
throw new AggregationInitializationException("Failed to create aggregators", e);
}
}
if (source.suggest() != null) {
try {
context.suggest(source.suggest().build(queryShardContext));
} catch (IOException e) {
throw new SearchContextException(context, "failed to create SuggestionSearchContext", e);
}
}
if (source.rescores() != null) {
try {
for (RescoreBuilder<?> rescore : source.rescores()) {
context.addRescore(rescore.build(queryShardContext));
}
} catch (IOException e) {
throw new SearchContextException(context, "failed to create RescoreSearchContext", e);
}
}
if (source.fields() != null) {
context.fieldNames().addAll(source.fields());
}
if (source.explain() != null) {
context.explain(source.explain());
}
if (source.fetchSource() != null) {
context.fetchSourceContext(source.fetchSource());
}
if (source.fieldDataFields() != null) {
FieldDataFieldsContext fieldDataFieldsContext = context.getFetchSubPhaseContext(FieldDataFieldsFetchSubPhase.CONTEXT_FACTORY);
for (String field : source.fieldDataFields()) {
fieldDataFieldsContext.add(new FieldDataField(field));
}
fieldDataFieldsContext.setHitExecutionNeeded(true);
}
if (source.highlighter() != null) {
HighlightBuilder highlightBuilder = source.highlighter();
try {
context.highlight(highlightBuilder.build(queryShardContext));
} catch (IOException e) {
throw new SearchContextException(context, "failed to create SearchContextHighlighter", e);
}
}
if (source.scriptFields() != null) {
for (org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField field : source.scriptFields()) {
SearchScript searchScript = context.scriptService().search(context.lookup(), field.script(), ScriptContext.Standard.SEARCH,
Collections.emptyMap(), context.getQueryShardContext().getClusterState());
context.scriptFields().add(new ScriptField(field.fieldName(), searchScript, field.ignoreFailure()));
}
}
if (source.ext() != null) {
XContentParser extParser = null;
try {
extParser = XContentFactory.xContent(source.ext()).createParser(source.ext());
XContentParser.Token token = extParser.nextToken();
String currentFieldName = null;
while ((token = extParser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = extParser.currentName();
} else {
SearchParseElement parseElement = this.elementParsers.get(currentFieldName);
if (parseElement == null) {
if (currentFieldName != null && currentFieldName.equals("suggest")) {
throw new SearchParseException(context,
"suggest is not supported in [ext], please use SearchSourceBuilder#suggest(SuggestBuilder) instead",
extParser.getTokenLocation());
}
throw new SearchParseException(context, "Unknown element [" + currentFieldName + "] in [ext]",
extParser.getTokenLocation());
} else {
parseElement.parse(extParser, context);
}
}
}
} catch (Exception e) {
String sSource = "_na_";
try {
sSource = source.toString();
} catch (Throwable e1) {
// ignore
}
XContentLocation location = extParser != null ? extParser.getTokenLocation() : null;
throw new SearchParseException(context, "failed to parse ext source [" + sSource + "]", location, e);
} finally {
if (extParser != null) {
extParser.close();
}
}
}
if (source.version() != null) {
context.version(source.version());
}
if (source.stats() != null) {
context.groupStats(source.stats());
}
if (source.searchAfter() != null && source.searchAfter().length > 0) {
if (context.scrollContext() != null) {
throw new SearchContextException(context, "`search_after` cannot be used in a scroll context.");
}
if (context.from() > 0) {
throw new SearchContextException(context, "`from` parameter must be set to 0 when `search_after` is used.");
}
FieldDoc fieldDoc = SearchAfterBuilder.buildFieldDoc(context.sort(), source.searchAfter());
context.searchAfter(fieldDoc);
}
if (source.slice() != null) {
if (context.scrollContext() == null) {
throw new SearchContextException(context, "`slice` cannot be used outside of a scroll context");
}
context.sliceFilter(source.slice().toFilter(queryShardContext,
context.shardTarget().getShardId().getId(),
queryShardContext.getIndexSettings().getNumberOfShards()));
}
}
private static final int[] EMPTY_DOC_IDS = new int[0];
/**
* Shortcut ids to load, we load only "from" and up to "size". The phase controller
* handles this as well since the result is always size * shards for Q_A_F
*/
private void shortcutDocIdsToLoad(SearchContext context) {
if (context.request().scroll() != null) {
TopDocs topDocs = context.queryResult().topDocs();
int[] docIdsToLoad = new int[topDocs.scoreDocs.length];
for (int i = 0; i < topDocs.scoreDocs.length; i++) {
docIdsToLoad[i] = topDocs.scoreDocs[i].doc;
}
context.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
} else {
TopDocs topDocs = context.queryResult().topDocs();
if (topDocs.scoreDocs.length < context.from()) {
// no more docs...
context.docIdsToLoad(EMPTY_DOC_IDS, 0, 0);
return;
}
int totalSize = context.from() + context.size();
int[] docIdsToLoad = new int[Math.min(topDocs.scoreDocs.length - context.from(), context.size())];
int counter = 0;
for (int i = context.from(); i < totalSize; i++) {
if (i < topDocs.scoreDocs.length) {
docIdsToLoad[counter] = topDocs.scoreDocs[i].doc;
} else {
break;
}
counter++;
}
context.docIdsToLoad(docIdsToLoad, 0, counter);
}
}
private void shortcutDocIdsToLoadForScanning(SearchContext context) {
TopDocs topDocs = context.queryResult().topDocs();
if (topDocs.scoreDocs.length == 0) {
// no more docs...
context.docIdsToLoad(EMPTY_DOC_IDS, 0, 0);
return;
}
int[] docIdsToLoad = new int[topDocs.scoreDocs.length];
for (int i = 0; i < docIdsToLoad.length; i++) {
docIdsToLoad[i] = topDocs.scoreDocs[i].doc;
}
context.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
}
private void processScroll(InternalScrollSearchRequest request, SearchContext context) {
// process scroll
context.from(context.from() + context.size());
context.scrollContext().scroll = request.scroll();
// update the context keep alive based on the new scroll value
if (request.scroll() != null && request.scroll().keepAlive() != null) {
context.keepAlive(request.scroll().keepAlive().millis());
}
}
/**
* Returns the number of active contexts in this
* SearchService
*/
public int getActiveContexts() {
return this.activeContexts.size();
}
class Reaper implements Runnable {
@Override
public void run() {
final long time = threadPool.estimatedTimeInMillis();
for (SearchContext context : activeContexts.values()) {
// Use the same value for both checks since lastAccessTime can
// be modified by another thread between checks!
final long lastAccessTime = context.lastAccessTime();
if (lastAccessTime == -1L) { // its being processed or timeout is disabled
continue;
}
if ((time - lastAccessTime > context.keepAlive())) {
logger.debug("freeing search context [{}], time [{}], lastAccessTime [{}], keepAlive [{}]", context.id(), time, lastAccessTime, context.keepAlive());
freeContext(context.id());
}
}
}
}
}
| |
/**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.dmdl.directio.json.driver;
import static com.asakusafw.dmdl.directio.json.driver.JsonFormatConstants.*;
import java.time.ZoneId;
import java.util.Map;
import java.util.function.Function;
import com.asakusafw.dmdl.directio.util.AttributeAnalyzer;
import com.asakusafw.dmdl.directio.util.ClassName;
import com.asakusafw.dmdl.directio.util.DatePattern;
import com.asakusafw.dmdl.directio.util.Value;
import com.asakusafw.dmdl.model.AstAttribute;
import com.asakusafw.dmdl.model.AstAttributeElement;
import com.asakusafw.dmdl.semantics.DmdlSemantics;
import com.asakusafw.runtime.io.json.ErrorAction;
import com.asakusafw.runtime.io.json.value.ValueOptionPropertyAdapter;
/**
* Settings of JSON property.
* @since 0.10.3
*/
public class JsonPropertySettings {
private Value<ClassName> adapterClass = Value.undefined();
private Value<DatePattern> dateFormat = Value.undefined();
private Value<DatePattern> dateTimeFormat = Value.undefined();
private Value<ZoneId> timeZone = Value.undefined();
private Value<ErrorAction> missingInputAction = Value.undefined();
private Value<ErrorAction> malformedInputAction = Value.undefined();
private Value<ValueOptionPropertyAdapter.NullStyle> nullStyle = Value.undefined();
/**
* Returns the adapter class.
* @return the adapter class
*/
public Value<ClassName> getAdapterClass() {
return adapterClass;
}
/**
* Returns the date format.
* @return the date format
*/
public Value<DatePattern> getDateFormat() {
return dateFormat;
}
/**
* Returns the date-time format.
* @return the date-time format
*/
public Value<DatePattern> getDateTimeFormat() {
return dateTimeFormat;
}
/**
* Returns the time zone.
* @return the time zone
*/
public Value<ZoneId> getTimeZone() {
return timeZone;
}
/**
* Returns the missing input action.
* @return the action
*/
public Value<ErrorAction> getMissingInputAction() {
return missingInputAction;
}
/**
* Returns the malformed input action.
* @return the action
*/
public Value<ErrorAction> getMalformedInputAction() {
return malformedInputAction;
}
/**
* Returns the null style.
* @return the null style
*/
public Value<ValueOptionPropertyAdapter.NullStyle> getNullStyle() {
return nullStyle;
}
/**
* Merges this object and the given default settings into a new object.
* @param defaults the default settings
* @return the created settings
*/
public JsonPropertySettings mergeDefaults(JsonPropertySettings defaults) {
JsonPropertySettings settings = new JsonPropertySettings();
settings.adapterClass = merge(defaults, JsonPropertySettings::getAdapterClass);
settings.dateFormat = merge(defaults, JsonPropertySettings::getDateFormat);
settings.dateTimeFormat = merge(defaults, JsonPropertySettings::getDateTimeFormat);
settings.timeZone = merge(defaults, JsonPropertySettings::getTimeZone);
settings.missingInputAction = merge(defaults, JsonPropertySettings::getMissingInputAction);
settings.malformedInputAction = merge(defaults, JsonPropertySettings::getMalformedInputAction);
settings.nullStyle = merge(defaults, JsonPropertySettings::getNullStyle);
return settings;
}
private <T> Value<T> merge(JsonPropertySettings defaults, Function<JsonPropertySettings, Value<T>> mapping) {
return mapping.apply(this).orDefault(mapping.apply(defaults));
}
/**
* Consumes attribute elements about property settings.
* @param environment the current environment
* @param attribute the attribute
* @param elements the element map to be consumed
* @return consumed settings
*/
public static JsonPropertySettings consume(
DmdlSemantics environment, AstAttribute attribute,
Map<String, AstAttributeElement> elements) {
AttributeAnalyzer analyzer = new AttributeAnalyzer(environment, attribute);
JsonPropertySettings settings = new JsonPropertySettings();
consumeAdapterClass(settings, analyzer, elements.remove(ELEMENT_FIELD_ADAPTER));
consumeDateFormat(settings, analyzer, elements.remove(ELEMENT_DATE_FORMAT));
consumeDateTimeFormat(settings, analyzer, elements.remove(ELEMENT_DATETIME_FORMAT));
consumeTimeZone(settings, analyzer, elements.remove(ELEMENT_TIME_ZONE));
consumeMalformedInputAction(settings, analyzer, elements.remove(ELEMENT_MALFORMED_INPUT_ACTION));
consumeMissingInputAction(settings, analyzer, elements.remove(ELEMENT_MISSING_PROPERTY_ACTION));
consumeNullStyle(settings, analyzer, elements.remove(ELEMENT_NULL_STYLE));
return settings;
}
private static void consumeAdapterClass(
JsonPropertySettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) {
if (element != null) {
settings.adapterClass = analyzer.toClassName(element);
}
}
private static void consumeDateFormat(
JsonPropertySettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) {
if (element != null) {
settings.dateFormat = analyzer.toDatePattern(element);
}
}
private static void consumeDateTimeFormat(
JsonPropertySettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) {
if (element != null) {
settings.dateTimeFormat = analyzer.toDatePattern(element);
}
}
private static void consumeTimeZone(
JsonPropertySettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) {
if (element != null) {
settings.timeZone = analyzer.toZoneIdWithNull(element);
}
}
private static void consumeMalformedInputAction(
JsonPropertySettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) {
if (element != null) {
settings.malformedInputAction = analyzer.toEnumConstant(element, ErrorAction.class);
}
}
private static void consumeMissingInputAction(
JsonPropertySettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) {
if (element != null) {
settings.missingInputAction = analyzer.toEnumConstant(element, ErrorAction.class);
}
}
private static void consumeNullStyle(
JsonPropertySettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) {
if (element != null) {
settings.nullStyle = analyzer.toEnumConstant(element, ValueOptionPropertyAdapter.NullStyle.class);
}
}
/**
* Verifies this settings.
* @param environment the current environment
* @param attribute the original attribute
* @return {@code true} if the settings seems valid, otherwise {@code false}
*/
public boolean verify(DmdlSemantics environment, AstAttribute attribute) {
return true;
}
}
| |
/*
* Copyright (c) 2005-2012, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.siddhi.core.query;
import org.wso2.siddhi.core.config.SiddhiContext;
import org.wso2.siddhi.core.partition.executor.PartitionExecutor;
import org.wso2.siddhi.core.query.creator.QueryCreator;
import org.wso2.siddhi.core.query.creator.QueryCreatorFactiory;
import org.wso2.siddhi.core.query.output.callback.OutputCallback;
import org.wso2.siddhi.core.query.output.callback.QueryCallback;
import org.wso2.siddhi.core.query.output.ratelimit.OutputRateManager;
import org.wso2.siddhi.core.query.output.ratelimit.snapshot.WrappedSnapshotOutputRateManager;
import org.wso2.siddhi.core.query.processor.handler.HandlerProcessor;
import org.wso2.siddhi.core.query.processor.handler.PartitionHandlerProcessor;
import org.wso2.siddhi.core.query.processor.handler.TableHandlerProcessor;
import org.wso2.siddhi.core.query.selector.QuerySelector;
import org.wso2.siddhi.core.stream.StreamJunction;
import org.wso2.siddhi.core.table.EventTable;
import org.wso2.siddhi.core.util.parser.QueryOutputParser;
import org.wso2.siddhi.query.api.condition.ConditionValidator;
import org.wso2.siddhi.query.api.definition.AbstractDefinition;
import org.wso2.siddhi.query.api.definition.StreamDefinition;
import org.wso2.siddhi.query.api.definition.partition.PartitionDefinition;
import org.wso2.siddhi.query.api.expression.Expression;
import org.wso2.siddhi.query.api.expression.ExpressionValidator;
import org.wso2.siddhi.query.api.query.Query;
import org.wso2.siddhi.query.api.query.input.JoinStream;
import org.wso2.siddhi.query.api.query.input.WindowInputStream;
import org.wso2.siddhi.query.api.query.input.WindowStream;
import org.wso2.siddhi.query.api.query.input.handler.Filter;
import org.wso2.siddhi.query.api.query.input.handler.StreamFunction;
import org.wso2.siddhi.query.api.query.input.handler.StreamHandler;
import org.wso2.siddhi.query.api.query.input.handler.Window;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
public class QueryManager {
private String queryId;
private Query query;
private final StreamDefinition outputStreamDefinition;
private List<HandlerProcessor> handlerProcessors = new ArrayList<HandlerProcessor>();
private ArrayList<QuerySelector> querySelectorList = new ArrayList<QuerySelector>();
private List<QueryCallback> queryCallbackList = new ArrayList<QueryCallback>();
private OutputCallback outputCallback = null;
private final OutputRateManager outputRateManager;
public QueryManager(Query query, ConcurrentMap<String, AbstractDefinition> streamTableDefinitionMap,
ConcurrentMap<String, StreamJunction> streamJunctionMap,
ConcurrentMap<String, EventTable> eventTableMap, SiddhiContext siddhiContext) {
if (query.getName() != null) {
this.queryId = query.getName();
} else if (query.getOutputStream() != null) {
this.queryId = query.getOutputStream().getStreamId() + "-" + UUID.randomUUID();
} else {
this.queryId = UUID.randomUUID().toString();
}
this.query = query;
QueryValidator.validate(query,streamTableDefinitionMap);
if (query.getInputStream() instanceof WindowInputStream) {
WindowInputStream stream = (WindowInputStream) query.getInputStream();
AbstractDefinition inputStreamDefinition = streamTableDefinitionMap.get(stream.getStreamId());
for (StreamHandler streamHandler : stream.getStreamHandlers()) {
if (streamHandler instanceof Filter) {
Filter filter = (Filter) streamHandler;
Map<String, Set<String>> dependencies = ConditionValidator.getDependency(filter.getFilterCondition());
checkAttribute(stream.getStreamId(), stream.getStreamId(), inputStreamDefinition, dependencies);
checkAttribute(stream.getStreamReferenceId(), stream.getStreamId(), inputStreamDefinition, dependencies);
checkAttribute(null, stream.getStreamId(), inputStreamDefinition, dependencies);
} else if (streamHandler instanceof Window) {
Window window = (Window) streamHandler;
for (Expression expression : window.getParameters()) {
Map<String, Set<String>> dependencies = ExpressionValidator.getDependency(expression);
checkAttribute(stream.getStreamId(), stream.getStreamId(), inputStreamDefinition, dependencies);
checkAttribute(stream.getStreamReferenceId(), stream.getStreamId(), inputStreamDefinition, dependencies);
checkAttribute(null, stream.getStreamId(), inputStreamDefinition, dependencies);
}
} else if (streamHandler instanceof StreamFunction) {
StreamFunction streamFunction = (StreamFunction) streamHandler;
for (Expression expression : streamFunction.getParameters()) {
Map<String, Set<String>> dependencies = ExpressionValidator.getDependency(expression);
checkAttribute(stream.getStreamId(), stream.getStreamId(), inputStreamDefinition, dependencies);
checkAttribute(stream.getStreamReferenceId(), stream.getStreamId(), inputStreamDefinition, dependencies);
checkAttribute(null, stream.getStreamId(), inputStreamDefinition, dependencies);
}
}
}
}
//todo check
outputRateManager = QueryOutputParser.constructOutputRateManager(query.getOutputRate(), siddhiContext.getScheduledExecutorService(),
query.getSelector().getGroupByList().size() != 0,
query.getInputStream() instanceof WindowStream||query.getInputStream() instanceof JoinStream);
QueryCreator queryCreator = QueryCreatorFactiory.constructQueryCreator(queryId, query, streamTableDefinitionMap, streamJunctionMap, eventTableMap,outputRateManager, siddhiContext);
outputStreamDefinition = queryCreator.getOutputStreamDefinition();
if (query.getOutputStream() != null) {
outputCallback = QueryOutputParser.constructOutputCallback(query.getOutputStream(), streamJunctionMap, eventTableMap, siddhiContext, queryCreator.getOutputStreamDefinition());
outputRateManager.setOutputCallback(outputCallback);
if(outputRateManager instanceof WrappedSnapshotOutputRateManager){
((WrappedSnapshotOutputRateManager) outputRateManager).init();
}
}
QueryPartitioner queryPartitioner = new QueryPartitioner(query.getPartitionId(), queryCreator, queryCallbackList, outputCallback, querySelectorList, partitionDefinitionMap,siddhiContext);
List<HandlerProcessor> handlerProcessorList = queryPartitioner.constructPartition();
if (query.getPartitionId() == null) {
handlerProcessors = handlerProcessorList;
} else {
List<List<PartitionExecutor>> partitionExecutors = queryPartitioner.getPartitionExecutors();
for (int i = 0; i < handlerProcessorList.size(); i++) {
HandlerProcessor queryStreamProcessor = handlerProcessorList.get(i);
if ((!(queryStreamProcessor instanceof TableHandlerProcessor))) {
handlerProcessors.add(new PartitionHandlerProcessor(queryStreamProcessor.getStreamId(), queryPartitioner, i,partitionExecutors.get(i)));
}
}
}
for (HandlerProcessor handlerProcessor : handlerProcessors) {
if (!(handlerProcessor instanceof TableHandlerProcessor)) {
streamJunctionMap.get(handlerProcessor.getStreamId()).addEventFlow(handlerProcessor);
}
}
}
public String getQueryId() {
return queryId;
}
public Query getQuery() {
return query;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof QueryManager)) {
return false;
}
QueryManager that = (QueryManager) o;
if (queryId != null ? !queryId.equals(that.queryId) : that.queryId != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return queryId != null ? queryId.hashCode() : 0;
}
public OutputCallback getOutputCallback() {
return outputCallback;
}
public void removeQuery(ConcurrentMap<String, StreamJunction> streamJunctionMap,
ConcurrentMap<String, AbstractDefinition> streamTableDefinitionMap) {
for (HandlerProcessor queryStreamProcessor : handlerProcessors) {
StreamJunction junction = streamJunctionMap.get(queryStreamProcessor.getStreamId());
if (junction != null) {
junction.removeEventFlow(queryStreamProcessor);
}
}
streamTableDefinitionMap.remove(query.getOutputStream().getStreamId());
}
public StreamDefinition getOutputStreamDefinition() {
return outputStreamDefinition;
}
public void addCallback(QueryCallback callback) {
outputRateManager.addQueryCallback(callback);
}
}
| |
/**
* Copyright (c) 2015 MapR, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ojai;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.sql.Date;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Map;
import org.ojai.annotation.API;
import org.ojai.types.Interval;
@API.Public
public interface DocumentBuilder {
/* ===========
* Map Methods
* ===========
*/
/**
* Associates the specified {@code boolean} value with the specified
* {@code field} in the current map. Any previous association will be
* overwritten.
*
* @param field The name of the field.
* @param value The {@code boolean} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in a MAP segment.
*/
DocumentBuilder put(String field, boolean value);
/**
* Associates the specified {@code String} value with the specified
* {@code field} in the current map. Any previous association will be
* overwritten.
*
* @param field The name of the field.
* @param value The {@code String} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in a MAP segment.
*/
DocumentBuilder put(String field, String value);
DocumentBuilder put(String field, byte value);
DocumentBuilder put(String field, short value);
DocumentBuilder put(String field, int value);
DocumentBuilder put(String field, long value);
DocumentBuilder put(String field, float value);
DocumentBuilder put(String field, double value);
DocumentBuilder put(String field, BigDecimal value);
DocumentBuilder putDecimal(String field, long decimalValue);
DocumentBuilder putDecimal(String field, double decimalValue);
DocumentBuilder putDecimal(String field, int unscaledValue, int scale);
DocumentBuilder putDecimal(String field, long unscaledValue, int scale);
DocumentBuilder putDecimal(String field, byte[] unscaledValue, int scale);
DocumentBuilder put(String field, byte[] value);
DocumentBuilder put(String field, byte[] value, int offset, int length);
DocumentBuilder put(String field, ByteBuffer value);
DocumentBuilder put(String field, Date value);
/**
* Associates the specified {@code date} value represented as number of
* days since epoch with the specified {@code field} in the
* current map. Any previous association will be overwritten.
*
* @param field The name of the field.
* @param value The {@code date} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in a MAP segment.
*/
DocumentBuilder putDate(String field, int days);
DocumentBuilder put(String field, Time value);
/**
* Associates the specified {@code time} value represented as number of
* milliseconds since midnight with the specified {@code field} in the
* current map. Any previous association will be overwritten.
*
* @param field The name of the field.
* @param value The {@code time} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in a MAP segment.
* @throws IllegalArgumentException If the value of {@code millis} is greater
* than 86400000.
*/
DocumentBuilder putTime(String field, int millis);
DocumentBuilder put(String field, Timestamp value);
/**
* Associates the specified {@code timestamp} value represented as number of
* milliseconds since epoch with the specified {@code field} in the
* current map. Any previous association will be overwritten.
*
* @param field The name of the field.
* @param value The {@code timestamp} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in a MAP segment.
*/
DocumentBuilder putTimestamp(String field, long timeMillis);
DocumentBuilder put(String field, Interval value);
DocumentBuilder putInterval(String field, long durationInMs);
DocumentBuilder putInterval(String field, int months, int days, int milliseconds);
DocumentBuilder putNewMap(String field);
DocumentBuilder putNewArray(String field);
DocumentBuilder putNull(String field);
DocumentBuilder put(String field, Value value);
DocumentBuilder put(String field, Document value);
DocumentBuilder put(String field, Map<String, Object> value);
/* =============
* Array Methods
* =============
*/
/**
* Appends a {@code boolean} value to the current array.
* @param value The {@code boolean} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder add(boolean value);
/**
* Appends a {@code String} value to the current array.
* @param value The {@code String} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder add(String value);
/**
* Appends a {@code byte} value to the current array.
* @param value The {@code byte} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder add(byte value);
/**
* Appends a {@code short} value to the current array.
* @param value The {@code short} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder add(short value);
/**
* Appends a {@code int} value to the current array.
* @param value The {@code int} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder add(int value);
/**
* Appends a {@code long} value to the current array.
* @param value The {@code long} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder add(long value);
/**
* Appends a {@code float} value to the current array.
* @param value The {@code float} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder add(float value);
/**
* Appends a {@code double} value to the current array.
* @param value The {@code double} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder add(double value);
/**
* Appends a {@code BigDecimal} value to the current array.
* @param value The {@code BigDecimal} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder add(BigDecimal value);
/**
* Appends a long number as a {@code DECIMAL} value to the current array.
* @param value The {@code long} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder addDecimal(long decimalValue);
/**
* Appends a double number as a {@code DECIMAL} value to the current array.
* @param value The {@code double} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder addDecimal(double decimalValue);
/**
* Appends an {@code int} unscaled value and an {@code int} scale as a
* {@code DECIMAL} value. The {@code DECIMAL} value is
* <tt>(unscaledValue × 10<sup>-scale</sup>)</tt>.
*
* @param unscaledValue unscaled value of the {@code DECIMAL}.
* @param scale scale of the {@code DECIMAL}.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder addDecimal(int unscaledValue, int scale);
/**
* Appends an {@code long} unscaled value and an {@code int} scale as a
* {@code DECIMAL} value. The {@code DECIMAL} value is
* <tt>(unscaledValue × 10<sup>-scale</sup>)</tt>.
*
* @param unscaledValue unscaled value of the {@code DECIMAL}.
* @param scale scale of the {@code DECIMAL}.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder addDecimal(long unscaledValue, int scale);
/**
* Appends a byte array containing the two's-complement binary representation
* and an {@code int} scale as a {@code DECIMAL} value. The input array is
* assumed to be in <i>big-endian</i> byte-order: the most significant
* byte is in the zeroth element.
*
* @param unscaledValue unscaled value of the {@code DECIMAL}.
* @param scale scale of the {@code DECIMAL}.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder addDecimal(byte[] unscaledValue, int scale);
/**
* Appends the byte array as a {@code BINARY} value to the current array.
* @param value The byte array to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder add(byte[] value);
/**
* Appends the byte array bounded by offset and length as a {@code BINARY}
* value to the current array.
* @param value The byte array to append.
* @param offset The start offset in the byte array.
* @param length The length from the offset.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
* @throws IndexOutOfBoundsException If the offset or offset+length are outside
* of byte array range.
*/
DocumentBuilder add(byte[] value, int offset, int length);
/**
* Appends the {@code ByteBuffer} as a {@code BINARY} value to the current array.
* @param value The {@code ByteBuffer} to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder add(ByteBuffer value);
/**
* Appends a {@code NULL} value to the current array.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder addNull();
/**
* Appends the {@code Value} to the current array.
* @param value The {@code Value} to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder add(Value value);
/**
* Appends the {@code Document} to the current array.
* @param value The {@code Document} to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in an ARRAY segment.
*/
DocumentBuilder add(Document value);
/* Advanced Array Methods */
DocumentBuilder addNewArray();
DocumentBuilder addNewMap();
DocumentBuilder add(Time value);
/**
* Appends the specified {@code time} value represented as number of
* milliseconds since midnight to the current array.
*
* @param field The name of the field.
* @param value The {@code time} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in a ARRAY segment.
* @throws IllegalArgumentException If the value of {@code millis} is greater
* than 86400000.
*/
DocumentBuilder addTime(int millis);
DocumentBuilder add(Date value);
/**
* Appends the specified {@code date} value represented as number of
* days since epoch to the current array.
*
* @param field The name of the field.
* @param value The {@code date} value to append.
* @return {@code this} for chained invocation.
* @throws IllegalStateException If the builder is not in a ARRAY segment.
*/
DocumentBuilder addDate(int days);
DocumentBuilder add(Timestamp value);
DocumentBuilder addTimestamp(long timeMillis);
DocumentBuilder add(Interval value);
DocumentBuilder addInterval(long durationInMs);
/* Lifecycle methods */
DocumentBuilder endArray();
DocumentBuilder endMap();
Document getDocument();
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.common.utils.Triple;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractUnnestNonMapOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.IntersectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestMapOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.OuterUnnestOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
public class ProducedVariableVisitor implements ILogicalOperatorVisitor<Void, Void> {
private Collection<LogicalVariable> producedVariables;
public ProducedVariableVisitor(Collection<LogicalVariable> producedVariables) throws AlgebricksException {
this.producedVariables = producedVariables;
}
@Override
public Void visitAggregateOperator(AggregateOperator op, Void arg) throws AlgebricksException {
producedVariables.addAll(op.getVariables());
return null;
}
@Override
public Void visitAssignOperator(AssignOperator op, Void arg) throws AlgebricksException {
producedVariables.addAll(op.getVariables());
return null;
}
@Override
public Void visitDataScanOperator(DataSourceScanOperator op, Void arg) throws AlgebricksException {
producedVariables.addAll(op.getVariables());
return null;
}
@Override
public Void visitDistinctOperator(DistinctOperator op, Void arg) {
return null;
}
@Override
public Void visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitExchangeOperator(ExchangeOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitGroupByOperator(GroupByOperator op, Void arg) throws AlgebricksException {
for (ILogicalPlan p : op.getNestedPlans()) {
for (Mutable<ILogicalOperator> r : p.getRoots()) {
VariableUtilities.getLiveVariables(r.getValue(), producedVariables);
}
}
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : op.getGroupByList()) {
if (p.first != null) {
producedVariables.add(p.first);
}
}
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : op.getDecorList()) {
if (p.first != null) {
producedVariables.add(p.first);
}
}
return null;
}
@Override
public Void visitInnerJoinOperator(InnerJoinOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitLeftOuterJoinOperator(LeftOuterJoinOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitLimitOperator(LimitOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitOrderOperator(OrderOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitProjectOperator(ProjectOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitRunningAggregateOperator(RunningAggregateOperator op, Void arg) throws AlgebricksException {
producedVariables.addAll(op.getVariables());
return null;
}
@Override
public Void visitScriptOperator(ScriptOperator op, Void arg) throws AlgebricksException {
List<LogicalVariable> usedVariables = new ArrayList<LogicalVariable>();
VariableUtilities.getUsedVariables(op, usedVariables);
for (LogicalVariable v : op.getOutputVariables()) {
if (!usedVariables.contains(v)) {
producedVariables.add(v);
}
}
return null;
}
@Override
public Void visitSelectOperator(SelectOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitSubplanOperator(SubplanOperator op, Void arg) throws AlgebricksException {
Set<LogicalVariable> producedVars = new HashSet<>();
Set<LogicalVariable> liveVars = new HashSet<>();
for (ILogicalPlan p : op.getNestedPlans()) {
for (Mutable<ILogicalOperator> r : p.getRoots()) {
VariableUtilities.getProducedVariablesInDescendantsAndSelf(r.getValue(), producedVars);
VariableUtilities.getSubplanLocalLiveVariables(r.getValue(), liveVars);
}
}
producedVars.retainAll(liveVars);
producedVariables.addAll(producedVars);
return null;
}
@Override
public Void visitUnionOperator(UnionAllOperator op, Void arg) throws AlgebricksException {
for (Triple<LogicalVariable, LogicalVariable, LogicalVariable> t : op.getVariableMappings()) {
producedVariables.add(t.third);
}
return null;
}
@Override
public Void visitIntersectOperator(IntersectOperator op, Void arg) throws AlgebricksException {
producedVariables.addAll(op.getOutputVars());
return null;
}
@Override
public Void visitUnnestMapOperator(UnnestMapOperator op, Void arg) throws AlgebricksException {
producedVariables.addAll(op.getVariables());
return null;
}
@Override
public Void visitLeftOuterUnnestMapOperator(LeftOuterUnnestMapOperator op, Void arg) throws AlgebricksException {
producedVariables.addAll(op.getVariables());
return null;
}
@Override
public Void visitUnnestOperator(UnnestOperator op, Void arg) throws AlgebricksException {
return visitUnnestNonMapOperator(op);
}
@Override
public Void visitWriteOperator(WriteOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitDistributeResultOperator(DistributeResultOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitWriteResultOperator(WriteResultOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitMaterializeOperator(MaterializeOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitInsertDeleteUpsertOperator(InsertDeleteUpsertOperator op, Void arg) throws AlgebricksException {
op.getProducedVariables(producedVariables);
return null;
}
@Override
public Void visitIndexInsertDeleteUpsertOperator(IndexInsertDeleteUpsertOperator op, Void arg)
throws AlgebricksException {
return null;
}
@Override
public Void visitTokenizeOperator(TokenizeOperator op, Void arg) throws AlgebricksException {
producedVariables.addAll(op.getTokenizeVars());
return null;
}
@Override
public Void visitSinkOperator(SinkOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitExtensionOperator(ExtensionOperator op, Void arg) throws AlgebricksException {
op.getDelegate().getProducedVariables(producedVariables);
return null;
}
@Override
public Void visitOuterUnnestOperator(OuterUnnestOperator op, Void arg) throws AlgebricksException {
return visitUnnestNonMapOperator(op);
}
private Void visitUnnestNonMapOperator(AbstractUnnestNonMapOperator op) {
producedVariables.addAll(op.getVariables());
LogicalVariable positionalVariable = op.getPositionalVariable();
if (positionalVariable != null) {
if (!producedVariables.contains(positionalVariable)) {
producedVariables.add(positionalVariable);
}
}
return null;
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.vmware;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLSession;
import org.apache.log4j.xml.DOMConfigurator;
import com.cloud.utils.PropertiesUtil;
import com.vmware.apputils.version.ExtendedAppUtil;
import com.vmware.vim25.HostIpConfig;
import com.vmware.vim25.HostVirtualNicSpec;
import com.vmware.vim25.HostConfigManager;
import com.vmware.vim25.HostPortGroupSpec;
import com.vmware.vim25.HttpNfcLeaseDeviceUrl;
import com.vmware.vim25.HttpNfcLeaseInfo;
import com.vmware.vim25.HttpNfcLeaseState;
import com.vmware.vim25.OvfCreateImportSpecParams;
import com.vmware.vim25.OvfCreateImportSpecResult;
import com.vmware.vim25.OvfFileItem;
import com.vmware.vim25.OvfNetworkMapping;
import com.vmware.vim25.VirtualMachineConfigSpec;
import com.vmware.vim25.VirtualDeviceConfigSpecOperation;
import com.vmware.vim25.VirtualEthernetCard;
import com.vmware.vim25.VirtualEthernetCardNetworkBackingInfo;
import com.vmware.vim25.VirtualNicManagerNetConfig;
import com.vmware.vim25.VirtualPCNet32;
import com.vmware.vim25.VirtualDeviceConfigSpec;
import com.vmware.vim25.VirtualMachineCloneSpec;
import com.vmware.vim25.VirtualMachineRelocateSpec;
import com.vmware.vim25.ArrayOfManagedObjectReference;
import com.vmware.vim25.DatastoreInfo;
import com.vmware.vim25.DynamicProperty;
import com.vmware.vim25.InvalidProperty;
import com.vmware.vim25.ManagedObjectReference;
import com.vmware.vim25.ObjectContent;
import com.vmware.vim25.ObjectSpec;
import com.vmware.vim25.PropertyFilterSpec;
import com.vmware.vim25.PropertySpec;
import com.vmware.vim25.RuntimeFault;
import com.vmware.vim25.SelectionSpec;
import com.vmware.vim25.TraversalSpec;
public class TestVMWare {
private static ExtendedAppUtil cb;
static {
try {
javax.net.ssl.TrustManager[] trustAllCerts = new javax.net.ssl.TrustManager[1];
javax.net.ssl.TrustManager tm = new TrustAllManager();
trustAllCerts[0] = tm;
javax.net.ssl.SSLContext sc = javax.net.ssl.SSLContext.getInstance("SSL");
sc.init(null, trustAllCerts, null);
javax.net.ssl.HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
} catch (Exception e) {
}
}
private static void setupLog4j() {
File file = PropertiesUtil.findConfigFile("log4j-cloud.xml");
if(file != null) {
System.out.println("Log4j configuration from : " + file.getAbsolutePath());
DOMConfigurator.configureAndWatch(file.getAbsolutePath(), 10000);
} else {
System.out.println("Configure log4j with default properties");
}
}
private void getAndPrintInventoryContents() throws Exception {
TraversalSpec resourcePoolTraversalSpec = new TraversalSpec();
resourcePoolTraversalSpec.setName("resourcePoolTraversalSpec");
resourcePoolTraversalSpec.setType("ResourcePool");
resourcePoolTraversalSpec.setPath("resourcePool");
resourcePoolTraversalSpec.setSkip(new Boolean(false));
resourcePoolTraversalSpec.setSelectSet(
new SelectionSpec [] { new SelectionSpec(null,null,"resourcePoolTraversalSpec") });
TraversalSpec computeResourceRpTraversalSpec = new TraversalSpec();
computeResourceRpTraversalSpec.setName("computeResourceRpTraversalSpec");
computeResourceRpTraversalSpec.setType("ComputeResource");
computeResourceRpTraversalSpec.setPath("resourcePool");
computeResourceRpTraversalSpec.setSkip(new Boolean(false));
computeResourceRpTraversalSpec.setSelectSet(
new SelectionSpec [] { new SelectionSpec(null,null,"resourcePoolTraversalSpec") });
TraversalSpec computeResourceHostTraversalSpec = new TraversalSpec();
computeResourceHostTraversalSpec.setName("computeResourceHostTraversalSpec");
computeResourceHostTraversalSpec.setType("ComputeResource");
computeResourceHostTraversalSpec.setPath("host");
computeResourceHostTraversalSpec.setSkip(new Boolean(false));
TraversalSpec datacenterHostTraversalSpec = new TraversalSpec();
datacenterHostTraversalSpec.setName("datacenterHostTraversalSpec");
datacenterHostTraversalSpec.setType("Datacenter");
datacenterHostTraversalSpec.setPath("hostFolder");
datacenterHostTraversalSpec.setSkip(new Boolean(false));
datacenterHostTraversalSpec.setSelectSet(
new SelectionSpec [] { new SelectionSpec(null,null,"folderTraversalSpec") });
TraversalSpec datacenterVmTraversalSpec = new TraversalSpec();
datacenterVmTraversalSpec.setName("datacenterVmTraversalSpec");
datacenterVmTraversalSpec.setType("Datacenter");
datacenterVmTraversalSpec.setPath("vmFolder");
datacenterVmTraversalSpec.setSkip(new Boolean(false));
datacenterVmTraversalSpec.setSelectSet(
new SelectionSpec [] { new SelectionSpec(null,null,"folderTraversalSpec") });
TraversalSpec folderTraversalSpec = new TraversalSpec();
folderTraversalSpec.setName("folderTraversalSpec");
folderTraversalSpec.setType("Folder");
folderTraversalSpec.setPath("childEntity");
folderTraversalSpec.setSkip(new Boolean(false));
folderTraversalSpec.setSelectSet(
new SelectionSpec [] { new SelectionSpec(null,null,"folderTraversalSpec"),
datacenterHostTraversalSpec,
datacenterVmTraversalSpec,
computeResourceRpTraversalSpec,
computeResourceHostTraversalSpec,
resourcePoolTraversalSpec });
PropertySpec[] propspecary = new PropertySpec[] { new PropertySpec() };
propspecary[0].setAll(new Boolean(false));
propspecary[0].setPathSet(new String[] { "name" });
propspecary[0].setType("ManagedEntity");
PropertyFilterSpec spec = new PropertyFilterSpec();
spec.setPropSet(propspecary);
spec.setObjectSet(new ObjectSpec[] { new ObjectSpec() });
spec.getObjectSet(0).setObj(cb.getServiceConnection3().getRootFolder());
spec.getObjectSet(0).setSkip(new Boolean(false));
spec.getObjectSet(0).setSelectSet(
new SelectionSpec[] { folderTraversalSpec });
// Recursively get all ManagedEntity ManagedObjectReferences
// and the "name" property for all ManagedEntities retrieved
ObjectContent[] ocary =
cb.getServiceConnection3().getService().retrieveProperties(
cb.getServiceConnection3().getServiceContent().getPropertyCollector(),
new PropertyFilterSpec[] { spec }
);
// If we get contents back. print them out.
if (ocary != null) {
ObjectContent oc = null;
ManagedObjectReference mor = null;
DynamicProperty[] pcary = null;
DynamicProperty pc = null;
for (int oci = 0; oci < ocary.length; oci++) {
oc = ocary[oci];
mor = oc.getObj();
pcary = oc.getPropSet();
System.out.println("Object Type : " + mor.getType());
System.out.println("Reference Value : " + mor.get_value());
if (pcary != null) {
for (int pci = 0; pci < pcary.length; pci++) {
pc = pcary[pci];
System.out.println(" Property Name : " + pc.getName());
if (pc != null) {
if (!pc.getVal().getClass().isArray()) {
System.out.println(" Property Value : " + pc.getVal());
}
else {
Object[] ipcary = (Object[])pc.getVal();
System.out.println("Val : " + pc.getVal());
for (int ii = 0; ii < ipcary.length; ii++) {
Object oval = ipcary[ii];
if (oval.getClass().getName().indexOf("ManagedObjectReference") >= 0) {
ManagedObjectReference imor = (ManagedObjectReference)oval;
System.out.println("Inner Object Type : " + imor.getType());
System.out.println("Inner Reference Value : " + imor.get_value());
}
else {
System.out.println("Inner Property Value : " + oval);
}
}
}
}
}
}
}
} else {
System.out.println("No Managed Entities retrieved!");
}
}
private void listDataCenters() {
try {
ManagedObjectReference[] morDatacenters = getDataCenterMors();
if(morDatacenters != null) {
for(ManagedObjectReference mor : morDatacenters) {
System.out.println("Datacenter : " + mor.get_value());
Map<String, Object> properites = new HashMap<String, Object>();
properites.put("name", null);
properites.put("vmFolder", null);
properites.put("hostFolder", null);
getProperites(mor, properites);
for(Map.Entry<String, Object> entry : properites.entrySet()) {
if(entry.getValue() instanceof ManagedObjectReference) {
ManagedObjectReference morProp = (ManagedObjectReference)entry.getValue();
System.out.println("\t" + entry.getKey() + ":(" + morProp.getType() + ", " + morProp.get_value() + ")");
} else {
System.out.println("\t" + entry.getKey() + ":" + entry.getValue());
}
}
System.out.println("Datacenter clusters");
ManagedObjectReference[] clusters = getDataCenterClusterMors(mor);
if(clusters != null) {
for(ManagedObjectReference morCluster : clusters) {
Object[] props = this.getProperties(morCluster, new String[] {"name"});
System.out.println("cluster : " + props[0]);
System.out.println("cluster hosts");
ManagedObjectReference[] hosts = getClusterHostMors(morCluster);
if(hosts != null) {
for(ManagedObjectReference morHost : hosts) {
Object[] props2 = this.getProperties(morHost, new String[] {"name"});
System.out.println("host : " + props2[0]);
}
}
}
}
System.out.println("Datacenter standalone hosts");
ManagedObjectReference[] hosts = getDataCenterStandaloneHostMors(mor);
if(hosts != null) {
for(ManagedObjectReference morHost : hosts) {
Object[] props = this.getProperties(morHost, new String[] {"name"});
System.out.println("host : " + props[0]);
}
}
System.out.println("Datacenter datastores");
ManagedObjectReference[] stores = getDataCenterDatastoreMors(mor);
if(stores != null) {
for(ManagedObjectReference morStore : stores) {
// data store name property does not work for some reason
Object[] props = getProperties(morStore, new String[] {"info" });
System.out.println(morStore.getType() + ": " + ((DatastoreInfo)props[0]).getName());
}
}
System.out.println("Datacenter VMs");
ManagedObjectReference[] vms = getDataCenterVMMors(mor);
if(stores != null) {
for(ManagedObjectReference morVm : vms) {
Object[] props = this.getProperties(morVm, new String[] {"name"});
System.out.println("VM name: " + props[0] + ", ref val: " + morVm.get_value());
}
}
}
}
} catch(RuntimeFault e) {
e.printStackTrace();
} catch(RemoteException e) {
e.printStackTrace();
}
}
private void listInventoryFolders() {
TraversalSpec folderTraversalSpec = new TraversalSpec();
folderTraversalSpec.setName("folderTraversalSpec");
folderTraversalSpec.setType("Folder");
folderTraversalSpec.setPath("childEntity");
folderTraversalSpec.setSkip(new Boolean(false));
folderTraversalSpec.setSelectSet(
new SelectionSpec [] { new SelectionSpec(null, null, "folderTraversalSpec")}
);
PropertySpec[] propSpecs = new PropertySpec[] { new PropertySpec() };
propSpecs[0].setAll(new Boolean(false));
propSpecs[0].setPathSet(new String[] { "name" });
propSpecs[0].setType("ManagedEntity");
PropertyFilterSpec filterSpec = new PropertyFilterSpec();
filterSpec.setPropSet(propSpecs);
filterSpec.setObjectSet(new ObjectSpec[] { new ObjectSpec() });
filterSpec.getObjectSet(0).setObj(cb.getServiceConnection3().getRootFolder());
filterSpec.getObjectSet(0).setSkip(new Boolean(false));
filterSpec.getObjectSet(0).setSelectSet(
new SelectionSpec[] { folderTraversalSpec }
);
try {
ObjectContent[] objContent = cb.getServiceConnection3().getService().retrieveProperties(
cb.getServiceConnection3().getServiceContent().getPropertyCollector(),
new PropertyFilterSpec[] { filterSpec }
);
printContent(objContent);
} catch (InvalidProperty e) {
e.printStackTrace();
} catch (RuntimeFault e) {
e.printStackTrace();
} catch (RemoteException e) {
e.printStackTrace();
}
}
private TraversalSpec getFolderRecursiveTraversalSpec() {
SelectionSpec recurseFolders = new SelectionSpec();
recurseFolders.setName("folder2childEntity");
TraversalSpec folder2childEntity = new TraversalSpec();
folder2childEntity.setType("Folder");
folder2childEntity.setPath("childEntity");
folder2childEntity.setName(recurseFolders.getName());
folder2childEntity.setSelectSet(new SelectionSpec[] { recurseFolders });
return folder2childEntity;
}
private ManagedObjectReference[] getDataCenterMors() throws RuntimeFault, RemoteException {
PropertySpec pSpec = new PropertySpec();
pSpec.setType("Datacenter");
pSpec.setPathSet(new String[] { "name"} );
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(cb.getServiceConnection3().getRootFolder());
oSpec.setSkip(Boolean.TRUE);
oSpec.setSelectSet(new SelectionSpec[] { getFolderRecursiveTraversalSpec() });
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.setPropSet(new PropertySpec[] { pSpec });
pfSpec.setObjectSet(new ObjectSpec[] { oSpec });
ObjectContent[] ocs = cb.getServiceConnection3().getService().retrieveProperties(
cb.getServiceConnection3().getServiceContent().getPropertyCollector(),
new PropertyFilterSpec[] { pfSpec });
if(ocs != null) {
ManagedObjectReference[] morDatacenters = new ManagedObjectReference[ocs.length];
for(int i = 0; i < ocs.length; i++)
morDatacenters[i] = ocs[i].getObj();
return morDatacenters;
}
return null;
}
private ManagedObjectReference[] getDataCenterVMMors(ManagedObjectReference morDatacenter) throws RuntimeFault, RemoteException {
PropertySpec pSpec = new PropertySpec();
pSpec.setType("VirtualMachine");
pSpec.setPathSet(new String[] { "name"} );
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(morDatacenter);
oSpec.setSkip(Boolean.TRUE);
TraversalSpec tSpec = new TraversalSpec();
tSpec.setName("dc2VMFolder");
tSpec.setType("Datacenter");
tSpec.setPath("vmFolder");
tSpec.setSelectSet(new SelectionSpec[] { getFolderRecursiveTraversalSpec() } );
oSpec.setSelectSet(new SelectionSpec[] { tSpec });
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.setPropSet(new PropertySpec[] { pSpec });
pfSpec.setObjectSet(new ObjectSpec[] { oSpec });
ObjectContent[] ocs = cb.getServiceConnection3().getService().retrieveProperties(
cb.getServiceConnection3().getServiceContent().getPropertyCollector(),
new PropertyFilterSpec[] { pfSpec });
if(ocs != null) {
ManagedObjectReference[] morVMs = new ManagedObjectReference[ocs.length];
for(int i = 0; i < ocs.length; i++)
morVMs[i] = ocs[i].getObj();
return morVMs;
}
return null;
}
private ManagedObjectReference[] getDataCenterDatastoreMors(ManagedObjectReference morDatacenter) throws RuntimeFault, RemoteException {
Object[] stores = getProperties(morDatacenter, new String[] { "datastore" });
if(stores != null && stores.length == 1) {
return ((ArrayOfManagedObjectReference)stores[0]).getManagedObjectReference();
}
return null;
}
private ManagedObjectReference[] getDataCenterClusterMors(ManagedObjectReference morDatacenter) throws RuntimeFault, RemoteException {
PropertySpec pSpec = new PropertySpec();
pSpec.setType("ClusterComputeResource");
pSpec.setPathSet(new String[] { "name"} );
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(morDatacenter);
oSpec.setSkip(Boolean.TRUE);
TraversalSpec tSpec = new TraversalSpec();
tSpec.setName("traversalHostFolder");
tSpec.setType("Datacenter");
tSpec.setPath("hostFolder");
tSpec.setSkip(false);
tSpec.setSelectSet(new SelectionSpec[] { getFolderRecursiveTraversalSpec() });
oSpec.setSelectSet(new TraversalSpec[] { tSpec });
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.setPropSet(new PropertySpec[] { pSpec });
pfSpec.setObjectSet(new ObjectSpec[] { oSpec });
ObjectContent[] ocs = cb.getServiceConnection3().getService().retrieveProperties(
cb.getServiceConnection3().getServiceContent().getPropertyCollector(),
new PropertyFilterSpec[] { pfSpec });
if(ocs != null) {
ManagedObjectReference[] morDatacenters = new ManagedObjectReference[ocs.length];
for(int i = 0; i < ocs.length; i++)
morDatacenters[i] = ocs[i].getObj();
return morDatacenters;
}
return null;
}
private ManagedObjectReference[] getDataCenterStandaloneHostMors(ManagedObjectReference morDatacenter) throws RuntimeFault, RemoteException {
PropertySpec pSpec = new PropertySpec();
pSpec.setType("ComputeResource");
pSpec.setPathSet(new String[] { "name"} );
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(morDatacenter);
oSpec.setSkip(Boolean.TRUE);
TraversalSpec tSpec = new TraversalSpec();
tSpec.setName("traversalHostFolder");
tSpec.setType("Datacenter");
tSpec.setPath("hostFolder");
tSpec.setSkip(false);
tSpec.setSelectSet(new SelectionSpec[] { getFolderRecursiveTraversalSpec() });
oSpec.setSelectSet(new TraversalSpec[] { tSpec });
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.setPropSet(new PropertySpec[] { pSpec });
pfSpec.setObjectSet(new ObjectSpec[] { oSpec });
ObjectContent[] ocs = cb.getServiceConnection3().getService().retrieveProperties(
cb.getServiceConnection3().getServiceContent().getPropertyCollector(),
new PropertyFilterSpec[] { pfSpec });
if(ocs != null) {
List<ManagedObjectReference> listComputeResources = new ArrayList<ManagedObjectReference>();
for(ObjectContent oc : ocs) {
if(oc.getObj().getType().equalsIgnoreCase("ComputeResource"))
listComputeResources.add(oc.getObj());
}
List<ManagedObjectReference> listHosts = new ArrayList<ManagedObjectReference>();
for(ManagedObjectReference morComputeResource : listComputeResources) {
ManagedObjectReference[] hosts = getComputeResourceHostMors(morComputeResource);
if(hosts != null) {
for(ManagedObjectReference host: hosts)
listHosts.add(host);
}
}
return listHosts.toArray(new ManagedObjectReference[0]);
}
return null;
}
private ManagedObjectReference[] getComputeResourceHostMors(ManagedObjectReference morCompute) throws RuntimeFault, RemoteException {
PropertySpec pSpec = new PropertySpec();
pSpec.setType("HostSystem");
pSpec.setPathSet(new String[] { "name"} );
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(morCompute);
oSpec.setSkip(true);
TraversalSpec tSpec = new TraversalSpec();
tSpec.setName("computeResource2Host");
tSpec.setType("ComputeResource");
tSpec.setPath("host");
tSpec.setSkip(false);
oSpec.setSelectSet(new TraversalSpec[] { tSpec });
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.setPropSet(new PropertySpec[] { pSpec });
pfSpec.setObjectSet(new ObjectSpec[] { oSpec });
ObjectContent[] ocs = cb.getServiceConnection3().getService().retrieveProperties(
cb.getServiceConnection3().getServiceContent().getPropertyCollector(),
new PropertyFilterSpec[] { pfSpec });
if(ocs != null) {
ManagedObjectReference[] morDatacenters = new ManagedObjectReference[ocs.length];
for(int i = 0; i < ocs.length; i++)
morDatacenters[i] = ocs[i].getObj();
return morDatacenters;
}
return null;
}
private ManagedObjectReference[] getClusterHostMors(ManagedObjectReference morCluster) throws RuntimeFault, RemoteException {
// ClusterComputeResource inherits from ComputeResource
return getComputeResourceHostMors(morCluster);
}
private ObjectContent[] getDataCenterProperites(String[] properites) throws RuntimeFault, RemoteException {
PropertySpec pSpec = new PropertySpec();
pSpec.setType("Datacenter");
pSpec.setPathSet(properites );
SelectionSpec recurseFolders = new SelectionSpec();
recurseFolders.setName("folder2childEntity");
TraversalSpec folder2childEntity = new TraversalSpec();
folder2childEntity.setType("Folder");
folder2childEntity.setPath("childEntity");
folder2childEntity.setName(recurseFolders.getName());
folder2childEntity.setSelectSet(new SelectionSpec[] { recurseFolders });
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(cb.getServiceConnection3().getRootFolder());
oSpec.setSkip(Boolean.TRUE);
oSpec.setSelectSet(new SelectionSpec[] { folder2childEntity });
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.setPropSet(new PropertySpec[] { pSpec });
pfSpec.setObjectSet(new ObjectSpec[] { oSpec });
return cb.getServiceConnection3().getService().retrieveProperties(
cb.getServiceConnection3().getServiceContent().getPropertyCollector(),
new PropertyFilterSpec[] { pfSpec });
}
private void printContent(ObjectContent[] objContent) {
if(objContent != null) {
for(ObjectContent oc : objContent) {
ManagedObjectReference mor = oc.getObj();
DynamicProperty[] objProps = oc.getPropSet();
System.out.println("Object type: " + mor.getType());
if(objProps != null) {
for(DynamicProperty objProp : objProps) {
if(!objProp.getClass().isArray()) {
System.out.println("\t" + objProp.getName() + "=" + objProp.getVal());
} else {
Object[] ipcary = (Object[])objProp.getVal();
System.out.print("\t" + objProp.getName() + "=[");
int i = 0;
for(Object item : ipcary) {
if (item.getClass().getName().indexOf("ManagedObjectReference") >= 0) {
ManagedObjectReference imor = (ManagedObjectReference)item;
System.out.print("(" + imor.getType() + "," + imor.get_value() + ")");
} else {
System.out.print(item);
}
if(i < ipcary.length - 1)
System.out.print(", ");
i++;
}
System.out.println("]");
}
}
}
}
}
}
private void getProperites(ManagedObjectReference mor, Map<String, Object> properties) throws RuntimeFault, RemoteException {
PropertySpec pSpec = new PropertySpec();
pSpec.setType(mor.getType());
pSpec.setPathSet(properties.keySet().toArray(new String[0]));
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(mor);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.setPropSet(new PropertySpec[] {pSpec} );
pfSpec.setObjectSet(new ObjectSpec[] {oSpec} );
ObjectContent[] ocs = cb.getServiceConnection3().getService().retrieveProperties(
cb.getServiceConnection3().getServiceContent().getPropertyCollector(),
new PropertyFilterSpec[] {pfSpec} );
if(ocs != null) {
for(ObjectContent oc : ocs) {
DynamicProperty[] propSet = oc.getPropSet();
if(propSet != null) {
for(DynamicProperty prop : propSet) {
properties.put(prop.getName(), prop.getVal());
}
}
}
}
}
private Object[] getProperties(ManagedObjectReference moRef, String[] properties) throws RuntimeFault, RemoteException {
PropertySpec pSpec = new PropertySpec();
pSpec.setType(moRef.getType());
pSpec.setPathSet(properties);
ObjectSpec oSpec = new ObjectSpec();
// Set the starting object
oSpec.setObj(moRef);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.setPropSet(new PropertySpec[] {pSpec} );
pfSpec.setObjectSet(new ObjectSpec[] {oSpec} );
ObjectContent[] ocs = cb.getServiceConnection3().getService().retrieveProperties(
cb.getServiceConnection3().getServiceContent().getPropertyCollector(),
new PropertyFilterSpec[] {pfSpec} );
Object[] ret = new Object[properties.length];
if(ocs != null) {
for(int i = 0; i< ocs.length; ++i) {
ObjectContent oc = ocs[i];
DynamicProperty[] dps = oc.getPropSet();
if(dps != null) {
for(int j = 0; j < dps.length; ++j) {
DynamicProperty dp = dps[j];
for(int p = 0; p < ret.length; ++p) {
if(properties[p].equals(dp.getName())) {
ret[p] = dp.getVal();
}
}
}
}
}
}
return ret;
}
private void powerOnVm() throws Exception {
ManagedObjectReference morVm = new ManagedObjectReference();
morVm.setType("VirtualMachine");
morVm.set_value("vm-480");
cb.getServiceConnection3().getService().powerOnVM_Task(morVm, null);
}
private void powerOffVm() throws Exception {
ManagedObjectReference morVm = new ManagedObjectReference();
morVm.setType("VirtualMachine");
morVm.set_value("vm-66");
cb.getServiceConnection3().getService().powerOffVM_Task(morVm);
}
private void createSnapshot() throws Exception {
ManagedObjectReference morVm = new ManagedObjectReference();
morVm.setType("VirtualMachine");
morVm.set_value("vm-66");
cb.getServiceConnection3().getService().createSnapshot_Task(morVm, "RunningSnapshotProg", "", false, false);
}
private void registerTemplate() throws Exception {
ManagedObjectReference morFolder = new ManagedObjectReference();
morFolder.setType("Folder");
morFolder.set_value("group-v3");
ManagedObjectReference morHost = new ManagedObjectReference();
morHost.setType("HostSystem");
morHost.set_value("host-48");
System.out.println("Begin registerVM_Task");
ManagedObjectReference taskmor = cb.getServiceConnection3().getService().registerVM_Task(
morFolder, "[NFS datastore] Template-Fedora/Template-Fedora.vmtx", "Template-Fedora", true,
null, morHost);
System.out.println("End registerVM_Task");
String result = cb.getServiceUtil3().waitForTask(taskmor);
if (result.equalsIgnoreCase("Sucess")) {
System.out.println("Registering The Virtual Machine ..........Done");
} else {
System.out.println("Some Exception While Registering The VM");
}
}
private void createVmFromTemplate() throws Exception {
VirtualMachineCloneSpec cloneSpec = new VirtualMachineCloneSpec();
ManagedObjectReference morDatastore = new ManagedObjectReference();
morDatastore.setType("Datastore");
morDatastore.set_value("datastore-30");
ManagedObjectReference morHost = new ManagedObjectReference();
morHost.setType("HostSystem");
morHost.set_value("host-48");
ManagedObjectReference morPool = new ManagedObjectReference();
morPool.setType("ResourcePool");
morPool.set_value("resgroup-41");
VirtualMachineRelocateSpec relocSpec = new VirtualMachineRelocateSpec();
cloneSpec.setLocation(relocSpec);
cloneSpec.setPowerOn(false);
cloneSpec.setTemplate(false);
relocSpec.setDatastore(morDatastore);
relocSpec.setHost(morHost);
relocSpec.setPool(morPool);
ManagedObjectReference morTemplate = new ManagedObjectReference();
morTemplate.setType("VirtualMachine");
morTemplate.set_value("vm-76");
ManagedObjectReference morFolder = new ManagedObjectReference();
morFolder.setType("Folder");
morFolder.set_value("group-v3");
ManagedObjectReference cloneTask
= cb.getServiceConnection3().getService().cloneVM_Task(morTemplate, morFolder,
"Fedora-clone-test", cloneSpec);
String status = cb.getServiceUtil3().waitForTask(cloneTask);
if(status.equalsIgnoreCase("failure")) {
System.out.println("Failure -: Virtual Machine cannot be cloned");
}
if(status.equalsIgnoreCase("sucess")) {
System.out.println("Virtual Machine Cloned successfully.");
}
}
private void addNic() throws Exception {
ManagedObjectReference morVm = new ManagedObjectReference();
morVm.setType("VirtualMachine");
morVm.set_value("vm-77");
ManagedObjectReference morNetwork = new ManagedObjectReference();
morNetwork.setType("DistributedVirtualPortgroup");
morNetwork.set_value("dvportgroup-56");
VirtualDeviceConfigSpec nicSpec = new VirtualDeviceConfigSpec();
nicSpec.setOperation(VirtualDeviceConfigSpecOperation.add);
VirtualEthernetCard nic = new VirtualPCNet32();
VirtualEthernetCardNetworkBackingInfo nicBacking
= new VirtualEthernetCardNetworkBackingInfo();
nicBacking.setDeviceName("Adapter to dSwitch-vlan26");
nicBacking.setNetwork(morNetwork);
nic.setAddressType("generated");
nic.setBacking(nicBacking);
nic.setKey(4);
nicSpec.setDevice(nic);
VirtualMachineConfigSpec vmConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec [] nicSpecArray = {nicSpec};
vmConfigSpec.setDeviceChange(nicSpecArray);
ManagedObjectReference tmor
= cb.getServiceConnection3().getService().reconfigVM_Task(
morVm, vmConfigSpec);
String status = cb.getServiceUtil3().waitForTask(tmor);
if(status.equalsIgnoreCase("failure")) {
System.out.println("Failure -: Virtual Machine cannot be cloned");
}
if(status.equalsIgnoreCase("sucess")) {
System.out.println("Virtual Machine Cloned successfully.");
}
}
// add virtual NIC to vmkernel
private void addNicToNetwork() throws Exception {
ManagedObjectReference morHost = new ManagedObjectReference();
morHost.setType("HostSystem");
morHost.set_value("host-48");
HostPortGroupSpec portgrp = new HostPortGroupSpec();
portgrp.setName("VM Network vlan26");
Object cmobj = cb.getServiceUtil3().getDynamicProperty(morHost, "configManager");
HostConfigManager configMgr = (HostConfigManager)cmobj;
ManagedObjectReference nwSystem = configMgr.getNetworkSystem();
HostVirtualNicSpec vNicSpec = new HostVirtualNicSpec();
HostIpConfig ipConfig = new HostIpConfig();
ipConfig.setDhcp(false);
ipConfig.setIpAddress("192.168.26.177");
ipConfig.setSubnetMask("255.255.255.0");
vNicSpec.setIp(ipConfig);
vNicSpec.setPortgroup("VM Network vlan26");
cb.getServiceConnection3().getService().addVirtualNic(nwSystem,
"dvPortGroup-vlan26", vNicSpec);
}
private void createDatacenter() throws Exception {
cb.getServiceConnection3().getService().createDatacenter(
cb.getServiceConnection3().getRootFolder(),
"cloud.dc.test");
}
private void getPropertyWithPath() throws Exception {
ManagedObjectReference morHost = new ManagedObjectReference();
morHost.setType("HostSystem");
morHost.set_value("host-161");
VirtualNicManagerNetConfig[] netConfigs = (VirtualNicManagerNetConfig[])cb.getServiceUtil3().getDynamicProperty(morHost, "config.virtualNicManagerInfo.netConfig");
}
private void getHostVMs() throws Exception {
ManagedObjectReference morHost = new ManagedObjectReference();
morHost.setType("HostSystem");
morHost.set_value("host-48");
PropertySpec pSpec = new PropertySpec();
pSpec.setType("VirtualMachine");
pSpec.setPathSet(new String[] { "name", "runtime.powerState", "config.template" });
TraversalSpec host2VmTraversal = new TraversalSpec();
host2VmTraversal.setType("HostSystem");
host2VmTraversal.setPath("vm");
host2VmTraversal.setName("host2VmTraversal");
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(morHost);
oSpec.setSkip(Boolean.TRUE);
oSpec.setSelectSet(new SelectionSpec[] { host2VmTraversal });
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.setPropSet(new PropertySpec[] { pSpec });
pfSpec.setObjectSet(new ObjectSpec[] { oSpec });
ObjectContent[] ocs = cb.getServiceConnection3().getService().retrieveProperties(
cb.getServiceConnection3().getServiceContent().getPropertyCollector(),
new PropertyFilterSpec[] { pfSpec });
this.printContent(ocs);
}
private void testFT() throws Exception {
ManagedObjectReference morVm = new ManagedObjectReference();
morVm.setType("VirtualMachine");
morVm.set_value("vm-480");
ManagedObjectReference morHost = new ManagedObjectReference();
morHost.setType("HostSystem");
morHost.set_value("host-470");
System.out.println("Create secondary VM");
ManagedObjectReference morTask = cb.getServiceConnection3().getService().createSecondaryVM_Task(morVm, morHost);
String result = cb.getServiceUtil3().waitForTask(morTask);
System.out.println("Create secondary VM resutl : " + result);
}
private void testFTEnable() throws Exception {
ManagedObjectReference morVm = new ManagedObjectReference();
morVm.setType("VirtualMachine");
morVm.set_value("vm-480");
ManagedObjectReference morHost = new ManagedObjectReference();
morHost.setType("HostSystem");
morHost.set_value("host-470");
ManagedObjectReference morSecondaryVm = new ManagedObjectReference();
morSecondaryVm.setType("VirtualMachine");
morSecondaryVm.set_value("vm-485");
System.out.println("Enable FT");
ManagedObjectReference morTask = cb.getServiceConnection3().getService().enableSecondaryVM_Task(morVm,
morSecondaryVm, morHost);
String result = cb.getServiceUtil3().waitForTask(morTask);
System.out.println("Enable FT resutl : " + result);
}
private void importOVF() throws Exception {
ManagedObjectReference morHost = new ManagedObjectReference();
morHost.setType("HostSystem");
morHost.set_value("host-223");
ManagedObjectReference morRp = new ManagedObjectReference();
morRp.setType("ResourcePool");
morRp.set_value("resgroup-222");
ManagedObjectReference morDs = new ManagedObjectReference();
morDs.setType("Datastore");
morDs.set_value("datastore-30");
ManagedObjectReference morVmFolder = new ManagedObjectReference();
morVmFolder.setType("Folder");
morVmFolder.set_value("group-v3");
ManagedObjectReference morNetwork = new ManagedObjectReference();
morNetwork.setType("Network");
morNetwork.set_value("network-32");
ManagedObjectReference morOvf = cb.getServiceConnection3().getServiceContent().getOvfManager();
OvfCreateImportSpecParams importSpecParams = new OvfCreateImportSpecParams();
importSpecParams.setHostSystem(morHost);
importSpecParams.setLocale("US");
importSpecParams.setEntityName("winxpsp3-ovf-deployed");
importSpecParams.setDeploymentOption("");
importSpecParams.setDiskProvisioning("thin");
/*
OvfNetworkMapping networkMapping = new OvfNetworkMapping();
networkMapping.setName("VM Network");
networkMapping.setNetwork(morNetwork); // network);
importSpecParams.setNetworkMapping(new OvfNetworkMapping[] { networkMapping });
*/
importSpecParams.setPropertyMapping(null);
String ovfDescriptor = readOvfContent("C:\\research\\vmware\\winxpsp3-ovf\\winxpsp3-ovf.ovf");
OvfCreateImportSpecResult ovfImportResult = cb.getServiceConnection3().getService().createImportSpec(
morOvf, ovfDescriptor, morRp, morDs, importSpecParams);
if(ovfImportResult != null) {
long totalBytes = addTotalBytes(ovfImportResult);
ManagedObjectReference morLease = cb.getServiceConnection3().getService().importVApp(morRp,
ovfImportResult.getImportSpec(), morVmFolder, morHost);
HttpNfcLeaseState state;
for(;;) {
state = (HttpNfcLeaseState)cb.getServiceUtil3().getDynamicProperty(morLease, "state");
if(state == HttpNfcLeaseState.ready || state == HttpNfcLeaseState.error)
break;
}
if(state == HttpNfcLeaseState.ready) {
HttpNfcLeaseInfo httpNfcLeaseInfo = (HttpNfcLeaseInfo)cb.getServiceUtil3().getDynamicProperty(morLease, "info");
HttpNfcLeaseDeviceUrl[] deviceUrls = httpNfcLeaseInfo.getDeviceUrl();
long bytesAlreadyWritten = 0;
for (HttpNfcLeaseDeviceUrl deviceUrl : deviceUrls) {
String deviceKey = deviceUrl.getImportKey();
for (OvfFileItem ovfFileItem : ovfImportResult.getFileItem()) {
if (deviceKey.equals(ovfFileItem.getDeviceId())) {
System.out.println("Import key==OvfFileItem device id: " + deviceKey);
System.out.println("device URL: " + deviceUrl.getUrl());
String absoluteFile = "C:\\research\\vmware\\winxpsp3-ovf\\" + ovfFileItem.getPath();
String urlToPost = deviceUrl.getUrl().replace("*", "esxhost-1.lab.vmops.com");
uploadVmdkFile(ovfFileItem.isCreate(), absoluteFile, urlToPost, bytesAlreadyWritten, totalBytes);
bytesAlreadyWritten += ovfFileItem.getSize();
System.out.println("Completed uploading the VMDK file:" + absoluteFile);
}
}
}
cb.getServiceConnection3().getService().httpNfcLeaseProgress(morLease, 100);
cb.getServiceConnection3().getService().httpNfcLeaseComplete(morLease);
}
}
}
private static void uploadVmdkFile(boolean put, String diskFilePath, String urlStr, long bytesAlreadyWritten, long totalBytes) throws IOException {
HttpsURLConnection.setDefaultHostnameVerifier(new HostnameVerifier() {
public boolean verify(String urlHostName, SSLSession session) {
return true;
}
});
HttpsURLConnection conn = (HttpsURLConnection) new URL(urlStr).openConnection();
conn.setDoOutput(true);
conn.setUseCaches(false);
int CHUCK_LEN = 64*1024;
conn.setChunkedStreamingMode(CHUCK_LEN);
conn.setRequestMethod(put? "PUT" : "POST"); // Use a post method to write the file.
conn.setRequestProperty("Connection", "Keep-Alive");
conn.setRequestProperty("Content-Type", "application/x-vnd.vmware-streamVmdk");
conn.setRequestProperty("Content-Length", Long.toString(new File(diskFilePath).length()));
BufferedOutputStream bos = new BufferedOutputStream(conn.getOutputStream());
BufferedInputStream diskis = new BufferedInputStream(new FileInputStream(diskFilePath));
int bytesAvailable = diskis.available();
int bufferSize = Math.min(bytesAvailable, CHUCK_LEN);
byte[] buffer = new byte[bufferSize];
long totalBytesWritten = 0;
while (true) {
int bytesRead = diskis.read(buffer, 0, bufferSize);
if (bytesRead == -1)
{
System.out.println("Total bytes written: " + totalBytesWritten);
break;
}
totalBytesWritten += bytesRead;
bos.write(buffer, 0, bufferSize);
bos.flush();
System.out.println("Total bytes written: " + totalBytesWritten);
/*
int progressPercent = (int) (((bytesAlreadyWritten + totalBytesWritten) * 100) / totalBytes);
leaseUpdater.setPercent(progressPercent);
*/
}
diskis.close();
bos.flush();
bos.close();
conn.disconnect();
}
public static long addTotalBytes(OvfCreateImportSpecResult ovfImportResult) {
OvfFileItem[] fileItemArr = ovfImportResult.getFileItem();
long totalBytes = 0;
if (fileItemArr != null) {
for (OvfFileItem fi : fileItemArr) {
printOvfFileItem(fi);
totalBytes += fi.getSize();
}
}
return totalBytes;
}
private static void printOvfFileItem(OvfFileItem fi) {
System.out.println("================ OvfFileItem ================");
System.out.println("chunkSize: " + fi.getChunkSize());
System.out.println("create: " + fi.isCreate());
System.out.println("deviceId: " + fi.getDeviceId());
System.out.println("path: " + fi.getPath());
System.out.println("size: " + fi.getSize());
System.out.println("==============================================");
}
public static String readOvfContent(String ovfFilePath) throws IOException {
StringBuffer strContent = new StringBuffer();
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(ovfFilePath)));
String lineStr;
while ((lineStr = in.readLine()) != null) {
strContent.append(lineStr);
}
in.close();
return strContent.toString();
}
public static String escapeSpecialChars(String str) {
str = str.replaceAll("<", "<");
return str.replaceAll(">", ">"); // do not escape "&" -> "&", "\"" -> """
}
public static void main(String[] args) throws Exception {
setupLog4j();
TestVMWare client = new TestVMWare();
// skip certificate check
System.setProperty("axis.socketSecureFactory", "org.apache.axis.components.net.SunFakeTrustSocketFactory");
String serviceUrl = "https://" + args[0] + "/sdk/vimService";
try {
String[] params = new String[] {"--url", serviceUrl, "--username", args[1], "--password", args[2] };
cb = ExtendedAppUtil.initialize("Connect", params);
cb.connect();
System.out.println("Connection Succesful.");
// client.listInventoryFolders();
// client.listDataCenters();
// client.powerOnVm();
// client.createSnapshot();
// client.registerTemplate();
// client.createVmFromTemplate();
// client.addNic();
// client.addNicToNetwork();
// client.createDatacenter();
// client.getPropertyWithPath();
// client.getHostVMs();
// client.testFT();
// client.testFTEnable();
client.importOVF();
cb.disConnect();
} catch (Exception e) {
e.printStackTrace();
}
}
public static class TrustAllManager implements javax.net.ssl.TrustManager, javax.net.ssl.X509TrustManager {
public java.security.cert.X509Certificate[] getAcceptedIssuers() {
return null;
}
public boolean isServerTrusted(java.security.cert.X509Certificate[] certs) {
return true;
}
public boolean isClientTrusted(java.security.cert.X509Certificate[] certs) {
return true;
}
public void checkServerTrusted(java.security.cert.X509Certificate[] certs, String authType)
throws java.security.cert.CertificateException {
return;
}
public void checkClientTrusted(java.security.cert.X509Certificate[] certs, String authType)
throws java.security.cert.CertificateException {
return;
}
}
}
| |
package org.usfirst.frc3824.BetaBot;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
public class Constants
{
// Set the robot configuration (competition or practice)
// Thing 1 - true (Competition)
// Thing 2 - false
static final boolean IS_COMP_ROBOT = true;
public static double SHOOTER_ELEVATION_ANGLE_A;
public static double SHOOTER_ELEVATION_ANGLE_B;
public static double SHOOTER_ELEVATION_ANGLE_C;
public static double SHOOTER_ELEVATION_POT_A;
public static double SHOOTER_ELEVATION_POT_B;
public static double SHOOTER_ELEVATION_POT_C;
public static double BOUDLER_INTAKE_RETRACTED_RIGHT;
public static double BOUDLER_INTAKE_RETRACTED_LEFT;
public static double BOUDLER_INTAKE_EXTENDED_RIGHT;
public static double BOUDLER_INTAKE_EXTENDED_LEFT;
public static void InitConstants()
{
if (IS_COMP_ROBOT) // Thing 1
{
SmartDashboard.putString("Active Robot Values", "Thing 1");
SHOOTER_ELEVATION_ANGLE_A = 66.893;
SHOOTER_ELEVATION_ANGLE_B = 46.043;
SHOOTER_ELEVATION_ANGLE_C = -17.92;
SHOOTER_ELEVATION_POT_A = -0.00005;
SHOOTER_ELEVATION_POT_B = 0.0115;
SHOOTER_ELEVATION_POT_C = 0.2713;
BOUDLER_INTAKE_RETRACTED_RIGHT = 0.524;
BOUDLER_INTAKE_RETRACTED_LEFT = 0.021;
BOUDLER_INTAKE_EXTENDED_RIGHT = 4.214;
BOUDLER_INTAKE_EXTENDED_LEFT = 4.474;
}
else // Thing 2
{
SmartDashboard.putString("Active Robot Values", "Thing 2");
SHOOTER_ELEVATION_ANGLE_A = +49.75;
SHOOTER_ELEVATION_ANGLE_B = +65.00;
SHOOTER_ELEVATION_ANGLE_C = -15.00;
SHOOTER_ELEVATION_POT_A = -0.00004;
SHOOTER_ELEVATION_POT_B = +0.0114;
SHOOTER_ELEVATION_POT_C = +0.1949;
BOUDLER_INTAKE_RETRACTED_RIGHT = 0.234;
BOUDLER_INTAKE_RETRACTED_LEFT = 0.078;
BOUDLER_INTAKE_EXTENDED_RIGHT = 3.76;
BOUDLER_INTAKE_EXTENDED_LEFT = 3.84;
}
}
// ***************************************************************************************
// Autonomous parameters
public static final int DEFENSE_DO_NOTHING = 0;
public static final int DEFENSE_LOW_BAR = 1;
public static final int DEFENSE_ROUGH_TERRIAN = 2;
public static final int DEFENSE_RAMPARTS = 3;
public static final int DEFENSE_ROCK_WALL = 4;
public static final int DEFENSE_MOAT = 5;
public static final int DEFENSE_CHEVAL_DE_FRISE = 6;
public static final int DEFENSE_PORTCULLIS = 7;
public static int STARTING_POSITION_SPY = 1;
public static int STARTING_POSITION_2 = 2;
public static int STARTING_POSITION_3 = 3;
public static int STARTING_POSITION_4 = 4;
public static int STARTING_POSITION_5 = 5;
public static int STARTING_POSITION_6 = 6;
public static int NO_GOAL = 0;
// public static int LOW_GOAL = 1;
public static int HIGH_GOAL = 2;
public static int SHOT_RETURN = 3;
// ***************************************************************************************
// Drive train turn constants
public static double DRIVETRAIN_DRIVE_STRAIGHT_P = 0.1;
public static double DRIVETRAIN_DRIVE_STRAIGHT_I = 0.0;
public static double DRIVETRAIN_DRIVE_STRAIGHT_D = 0.0;
public static double DRIVETRAIN_DRIVE_MINIMUM_OUTPUT = -1.0;
public static double DRIVETRAIN_DRIVE_MAXIMUM_OUTPUT = 1.0;
// ***************************************************************************************
// Chassis turn constants
public static double TURN_ANGLE_P = 0.05;
public static double TURN_ANGLE_I = 0.0;
public static double TURN_ANGLE_D = 0.0;
public static double TURN_ANGLE_MINIMUM_OUTPUT = -0.9;
public static double TURN_ANGLE_MAXIMUM_OUTPUT = 0.9;
public static double TURN_THRESHOLD = 5.0;
// ***************************************************************************************
// Image turn constants
public static double IMAGE_TURN_P = 0.07;
public static double IMAGE_TURN_I = 0.004;
public static double IMAGE_TURN_D = 0.008;
public static double IMAGE_TURN_MINIMUM_OUTPUT = -0.4;
public static double IMAGE_TURN_MAXIMUM_OUTPUT = 0.4;
public static int IMAGE_WIDTH = 320; // default image width
public static int IMAGE_HEIGHT = 240; // default image height
public static double CAM_FOV = 48.0;
public static int TARGET_LEFT = 0;
public static int TARGET_CENTER = 1;
public static int TARGET_RIGHT = 2;
// ***************************************************************************************
// Shooter Elevation constants
public static double SHOOTER_ELEVATION_SETPOINT_MIN = 10.0;
public static double SHOOTER_ELEVATION_SETPOINT_MAX = 60.0;
public static double SHOOTER_ELEVATION_BOULDER_INTAKE = -7.0;
public static double SHOOTER_ELEVATION_HOME = -4.7;
public static double SHOOTER_ELEVATION_POSITION1 = 43.5;
public static double SHOOTER_ELEVATION_POSITION2 = 18.0;
public static double SHOOTER_ELEVATION_POSITION3 = 65.0;
public static double SHOOTER_ELEVATION_POSITION4 = 69.0;
public static double SHOOTER_ELEVATION_POSITION5 = 45.0;
public static double SHOOTER_JOG_UP_BUTTON = -1100.0; // Button 11
public static double SHOOTER_JOG_DOWN_BUTTON = -1000.0; // Button 10
public static double SHOOTER_BOULDER_INTAKE_BUTTON = -1800.0; // Button 18
public static double SHOOTER_HOME_BUTTON = -200.0; // Button 2
public static double SHOOTER_SHOOT_1_BUTTON = -1700.0; // Button 17
public static double SHOOTER_SHOOT_2_BUTTON = -300.0; // Button 3
public static double SHOOTER_SHOOT_3_BUTTON = -400.0; // Button 4
public static double SHOOTER_SHOOT_4_BUTTON = -500.0; // Button 5
public static double SHOOTER_SHOOT_5_BUTTON = -100.0; // Button 1
// ***************************************************************************************
// Chassis Turn Jog constants
public static double JOG_TURN_WATCHDOG_TIME = 1.0;
public static double JOG_TURN_WHEEL_POWER = 0.3;
public static double JOG_TURN_ENCODER_TURN_VALUE = 15; // integer
// ***************************************************************************************
// Autonomous constants
public static double AUTONOMOUS_LIDAR_RANGE_SHOOTER_ANGLE = 32.0;
public static double AUTONOMOUS_LIDAR_DISTANCE_TO_TARGET = 240.0;
// ***************************************************************************************
// Automated aim and shoot constants
public static double IMAGE_SHOOTER_WHEEL_SPINUP_TIME = 1.5;
public static int IMAGE_LARGE_PIXEL_OFFSET_Y = 40;
public static int IMAGE_MEDIUM_PIXEL_OFFSET_Y = 20;
public static int IMAGE_SMALL_PIXEL_OFFSET_Y = 0;
public static double IMAGE_LARGE_STEP_ANGLE_Y = 2.0;
public static double IMAGE_MEDIUM_STEP_ANGLE_Y = 1.0;
public static double IMAGE_SMALL_STEP_ANGLE_Y = 0.1;
public static int IMAGE_LARGE_PIXEL_OFFSET_X = 40;
public static int IMAGE_MEDIUM_PIXEL_OFFSET_X = 20;
public static int IMAGE_SMALL_PIXEL_OFFSET_X = 0;
public static double IMAGE_LARGE_STEP_ANGLE_X = 1.50; // mm
public static double IMAGE_MEDIUM_STEP_ANGLE_X = 0.50; // mm
public static double IMAGE_SMALL_STEP_ANGLE_X = 0.15; // mm
public static double IMAGE_ANGLE_ENCODER_P = 0.3;
public static double IMAGE_ANGLE_ENCODER_I = 0.0005;
public static double IMAGE_ANGLE_ENCODER_D = 0.0;
public static double IMAGE_ANGLE_MINIMUM_INPUT = -1000.0;
public static double IMAGE_ANGLE_MAXIMUM_INPUT = 1000.0;
public static double IMAGE_ANGLE_MINIMUM_OUTPUT = -0.4;
public static double IMAGE_ANGLE_MAXIMUM_OUTPUT = 0.4;
public static int IMAGE_TURN_TO_TARGET_X = 20;
public static int IMAGE_TURN_TO_TARGET_Y = 50;
public static double IMAGE_SEARCH_MIN_SHOOTER_POSITION = 30;
public static double IMAGE_SEARCH_MAX_SHOOTER_POSITION = 60;
// Competition
public static int IMAGE_ON_TARGET_X_FAR = 3; // 4 // 4 // 2 // 2 // 1
public static int IMAGE_ON_TARGET_Y_FAR = 6; // 8 // 10 // 8 // 6 // 2
public static double IMAGE_DISTANCE_MEDIUM = 275;
public static int IMAGE_ON_TARGET_X_MEDIUM = 4; // 6 // 6 // 5 // 4 // 2
public static int IMAGE_ON_TARGET_Y_MEDIUM = 8; // 10 // 14 // 12 // 10 // 4
public static double IMAGE_DISTANCE_CLOSE = 200;
public static int IMAGE_ON_TARGET_X_CLOSE = 6; // 8 // 8 // 7 // 6 // 3
public static int IMAGE_ON_TARGET_Y_CLOSE = 10; // 12 // 18 // 15 // 10 // 6
//public static int IMAGE_ON_TARGET_X_FAR = 1;
//public static int IMAGE_ON_TARGET_Y_FAR = 2;
//
//public static double IMAGE_DISTANCE_MEDIUM = 275;
//public static int IMAGE_ON_TARGET_X_MEDIUM = 1;
//public static int IMAGE_ON_TARGET_Y_MEDIUM = 2;
//
//public static double IMAGE_DISTANCE_CLOSE = 200;
//public static int IMAGE_ON_TARGET_X_CLOSE = 1;
//public static int IMAGE_ON_TARGET_Y_CLOSE = 2;
public static double DISTANCE_A = 0.0839; // 0.0838; // x^2
public static double DISTANCE_B = -16.762; // -16.665; // x
public static double DISTANCE_C = 967.84; // 952.25; // offset
// y = 0.0003x2 - 0.0462x + 183.46
public static double IMAGE_Y_A = 0.0003; // 0.0001; //0.00005; // 0.0001; // x^2
public static double IMAGE_Y_B = -0.0462; // 0.0534; //0.1887; // 0.0534; // x
public static double IMAGE_Y_C = 183.46; // 172.81; //141.11; // 172.81; // offset
// Thing 1 // Thing 2 // Thing 1
public static int IMAGE_ON_TARGET_X_POSITION = 170; // 170; // X location of the "onTarget" position
public static int IMAGE_ON_TARGET_Y_OFFSET = -25; // -25;
// ***************************************************************************************
// Chassis Auto Defense Drive
public static double AUTO_DEFENSE_DRIVE_CLOSE_RANGE = 50.0; // inches
public static double AUTO_DEFENSE_DRIVE_NUDGE = 1.0; // degree
// ***************************************************************************************
// Climber
public static double CLIMBER_LEFT_WINCH_SPEED = 1.0;
public static double CLIMBER_RIGHT_WINCH_SPEED = 1.0;
}
| |
package net.floodlightcontroller.statistics;
import com.google.common.primitives.UnsignedLong;
import com.google.common.util.concurrent.ListenableFuture;
import net.floodlightcontroller.core.IOFSwitch;
import net.floodlightcontroller.core.internal.IOFSwitchService;
import net.floodlightcontroller.core.module.FloodlightModuleContext;
import net.floodlightcontroller.core.module.FloodlightModuleException;
import net.floodlightcontroller.core.module.IFloodlightModule;
import net.floodlightcontroller.core.module.IFloodlightService;
import net.floodlightcontroller.core.types.NodePortTuple;
import net.floodlightcontroller.restserver.IRestApiService;
import net.floodlightcontroller.statistics.web.SwitchStatisticsWebRoutable;
import net.floodlightcontroller.threadpool.IThreadPoolService;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.match.Match;
import org.projectfloodlight.openflow.protocol.ver13.OFMeterSerializerVer13;
import org.projectfloodlight.openflow.types.DatapathId;
import org.projectfloodlight.openflow.types.OFPort;
import org.projectfloodlight.openflow.types.TableId;
import org.projectfloodlight.openflow.types.U64;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.Thread.State;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
public class StatisticsCollector implements IFloodlightModule, IStatisticsService {
private static final Logger log = LoggerFactory.getLogger(StatisticsCollector.class);
private static IOFSwitchService switchService;
private static IThreadPoolService threadPoolService;
private static IRestApiService restApiService;
private static boolean isEnabled = false;
private static int portStatsInterval = 10; /* could be set by REST API, so not final */
private static ScheduledFuture<?> portStatsCollector;
private static final long BITS_PER_BYTE = 8;
private static final long MILLIS_PER_SEC = 1000;
private static final String INTERVAL_PORT_STATS_STR = "collectionIntervalPortStatsSeconds";
private static final String ENABLED_STR = "enable";
private static final HashMap<NodePortTuple, SwitchPortBandwidth> portStats = new HashMap<NodePortTuple, SwitchPortBandwidth>();
private static final HashMap<NodePortTuple, SwitchPortBandwidth> tentativePortStats = new HashMap<NodePortTuple, SwitchPortBandwidth>();
/**
* Run periodically to collect all port statistics. This only collects
* bandwidth stats right now, but it could be expanded to record other
* information as well. The difference between the most recent and the
* current RX/TX bytes is used to determine the "elapsed" bytes. A
* timestamp is saved each time stats results are saved to compute the
* bits per second over the elapsed time. There isn't a better way to
* compute the precise bandwidth unless the switch were to include a
* timestamp in the stats reply message, which would be nice but isn't
* likely to happen. It would be even better if the switch recorded
* bandwidth and reported bandwidth directly.
*
* Stats are not reported unless at least two iterations have occurred
* for a single switch's reply. This must happen to compare the byte
* counts and to get an elapsed time.
*
* @author Ryan Izard, ryan.izard@bigswitch.com, rizard@g.clemson.edu
*
*/
protected class PortStatsCollector implements Runnable {
@Override
public void run() {
Map<DatapathId, List<OFStatsReply>> replies = getSwitchStatistics(switchService.getAllSwitchDpids(), OFStatsType.PORT);
for (Entry<DatapathId, List<OFStatsReply>> e : replies.entrySet()) {
for (OFStatsReply r : e.getValue()) {
OFPortStatsReply psr = (OFPortStatsReply) r;
for (OFPortStatsEntry pse : psr.getEntries()) {
NodePortTuple npt = new NodePortTuple(e.getKey(), pse.getPortNo());
SwitchPortBandwidth spb;
if (portStats.containsKey(npt) || tentativePortStats.containsKey(npt)) {
if (portStats.containsKey(npt)) { /* update */
spb = portStats.get(npt);
} else if (tentativePortStats.containsKey(npt)) { /* finish */
spb = tentativePortStats.get(npt);
tentativePortStats.remove(npt);
} else {
log.error("Inconsistent state between tentative and official port stats lists.");
return;
}
/* Get counted bytes over the elapsed period. Check for counter overflow. */
U64 rxBytesCounted;
U64 txBytesCounted;
if (spb.getPriorByteValueRx().compareTo(pse.getRxBytes()) > 0) { /* overflow */
U64 upper = U64.NO_MASK.subtract(spb.getPriorByteValueRx());
U64 lower = pse.getRxBytes();
rxBytesCounted = upper.add(lower);
} else {
rxBytesCounted = pse.getRxBytes().subtract(spb.getPriorByteValueRx());
}
if (spb.getPriorByteValueTx().compareTo(pse.getTxBytes()) > 0) { /* overflow */
U64 upper = U64.NO_MASK.subtract(spb.getPriorByteValueTx());
U64 lower = pse.getTxBytes();
txBytesCounted = upper.add(lower);
} else {
txBytesCounted = pse.getTxBytes().subtract(spb.getPriorByteValueTx());
}
long speed = getSpeed(npt);
long timeDifSec = (System.currentTimeMillis() - spb.getUpdateTime()) / MILLIS_PER_SEC;
portStats.put(npt, SwitchPortBandwidth.of(npt.getNodeId(), npt.getPortId(),
U64.ofRaw(speed),
U64.ofRaw((rxBytesCounted.getValue() * BITS_PER_BYTE) / timeDifSec),
U64.ofRaw((txBytesCounted.getValue() * BITS_PER_BYTE) / timeDifSec),
pse.getRxBytes(), pse.getTxBytes())
);
} else { /* initialize */
tentativePortStats.put(npt, SwitchPortBandwidth.of(npt.getNodeId(), npt.getPortId(), U64.ZERO, U64.ZERO, U64.ZERO, pse.getRxBytes(), pse.getTxBytes()));
}
}
}
}
}
protected long getSpeed(NodePortTuple npt) {
IOFSwitch sw = switchService.getSwitch(npt.getNodeId());
long speed = 0;
if(sw == null) return speed; /* could have disconnected; we'll assume zero-speed then */
if(sw.getPort(npt.getPortId()) == null) return speed;
/* getCurrSpeed() should handle different OpenFlow Version */
OFVersion detectedVersion = sw.getOFFactory().getVersion();
switch(detectedVersion){
case OF_10:
log.debug("Port speed statistics not supported in OpenFlow 1.0");
break;
case OF_11:
case OF_12:
case OF_13:
speed = sw.getPort(npt.getPortId()).getCurrSpeed();
break;
case OF_14:
case OF_15:
for(OFPortDescProp p : sw.getPort(npt.getPortId()).getProperties()){
if( p.getType() == 0 ){ /* OpenFlow 1.4 and OpenFlow 1.5 will return zero */
speed = ((OFPortDescPropEthernet) p).getCurrSpeed();
}
}
break;
default:
break;
}
return speed;
}
}
/**
* Single thread for collecting switch statistics and
* containing the reply.
*
* @author Ryan Izard, ryan.izard@bigswitch.com, rizard@g.clemson.edu
*
*/
private class GetStatisticsThread extends Thread {
private List<OFStatsReply> statsReply;
private DatapathId switchId;
private OFStatsType statType;
public GetStatisticsThread(DatapathId switchId, OFStatsType statType) {
this.switchId = switchId;
this.statType = statType;
this.statsReply = null;
}
public List<OFStatsReply> getStatisticsReply() {
return statsReply;
}
public DatapathId getSwitchId() {
return switchId;
}
@Override
public void run() {
statsReply = getSwitchStatistics(switchId, statType);
}
}
/*
* IFloodlightModule implementation
*/
@Override
public Collection<Class<? extends IFloodlightService>> getModuleServices() {
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IStatisticsService.class);
return l;
}
@Override
public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() {
Map<Class<? extends IFloodlightService>, IFloodlightService> m =
new HashMap<Class<? extends IFloodlightService>, IFloodlightService>();
m.put(IStatisticsService.class, this);
return m;
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleDependencies() {
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IOFSwitchService.class);
l.add(IThreadPoolService.class);
l.add(IRestApiService.class);
return l;
}
@Override
public void init(FloodlightModuleContext context)
throws FloodlightModuleException {
switchService = context.getServiceImpl(IOFSwitchService.class);
threadPoolService = context.getServiceImpl(IThreadPoolService.class);
restApiService = context.getServiceImpl(IRestApiService.class);
Map<String, String> config = context.getConfigParams(this);
if (config.containsKey(ENABLED_STR)) {
try {
isEnabled = Boolean.parseBoolean(config.get(ENABLED_STR).trim());
} catch (Exception e) {
log.error("Could not parse '{}'. Using default of {}", ENABLED_STR, isEnabled);
}
}
log.info("Statistics collection {}", isEnabled ? "enabled" : "disabled");
if (config.containsKey(INTERVAL_PORT_STATS_STR)) {
try {
portStatsInterval = Integer.parseInt(config.get(INTERVAL_PORT_STATS_STR).trim());
} catch (Exception e) {
log.error("Could not parse '{}'. Using default of {}", INTERVAL_PORT_STATS_STR, portStatsInterval);
}
}
log.info("Port statistics collection interval set to {}s", portStatsInterval);
}
@Override
public void startUp(FloodlightModuleContext context)
throws FloodlightModuleException {
restApiService.addRestletRoutable(new SwitchStatisticsWebRoutable());
if (isEnabled) {
startStatisticsCollection();
}
}
/*
* IStatisticsService implementation
*/
@Override
public SwitchPortBandwidth getBandwidthConsumption(DatapathId dpid, OFPort p) {
return portStats.get(new NodePortTuple(dpid, p));
}
@Override
public Map<NodePortTuple, SwitchPortBandwidth> getBandwidthConsumption() {
return Collections.unmodifiableMap(portStats);
}
@Override
public synchronized void collectStatistics(boolean collect) {
if (collect && !isEnabled) {
startStatisticsCollection();
isEnabled = true;
} else if (!collect && isEnabled) {
stopStatisticsCollection();
isEnabled = false;
}
/* otherwise, state is not changing; no-op */
}
/*
* Helper functions
*/
/**
* Start all stats threads.
*/
private void startStatisticsCollection() {
portStatsCollector = threadPoolService.getScheduledExecutor().scheduleAtFixedRate(new PortStatsCollector(), portStatsInterval, portStatsInterval, TimeUnit.SECONDS);
tentativePortStats.clear(); /* must clear out, otherwise might have huge BW result if present and wait a long time before re-enabling stats */
log.warn("Statistics collection thread(s) started");
}
/**
* Stop all stats threads.
*/
private void stopStatisticsCollection() {
if (!portStatsCollector.cancel(false)) {
log.error("Could not cancel port stats thread");
} else {
log.warn("Statistics collection thread(s) stopped");
}
}
/**
* Retrieve the statistics from all switches in parallel.
* @param dpids
* @param statsType
* @return
*/
private Map<DatapathId, List<OFStatsReply>> getSwitchStatistics(Set<DatapathId> dpids, OFStatsType statsType) {
HashMap<DatapathId, List<OFStatsReply>> model = new HashMap<DatapathId, List<OFStatsReply>>();
List<GetStatisticsThread> activeThreads = new ArrayList<GetStatisticsThread>(dpids.size());
List<GetStatisticsThread> pendingRemovalThreads = new ArrayList<GetStatisticsThread>();
GetStatisticsThread t;
for (DatapathId d : dpids) {
t = new GetStatisticsThread(d, statsType);
activeThreads.add(t);
t.start();
}
/* Join all the threads after the timeout. Set a hard timeout
* of 12 seconds for the threads to finish. If the thread has not
* finished the switch has not replied yet and therefore we won't
* add the switch's stats to the reply.
*/
for (int iSleepCycles = 0; iSleepCycles < portStatsInterval; iSleepCycles++) {
for (GetStatisticsThread curThread : activeThreads) {
if (curThread.getState() == State.TERMINATED) {
model.put(curThread.getSwitchId(), curThread.getStatisticsReply());
pendingRemovalThreads.add(curThread);
}
}
/* remove the threads that have completed the queries to the switches */
for (GetStatisticsThread curThread : pendingRemovalThreads) {
activeThreads.remove(curThread);
}
/* clear the list so we don't try to double remove them */
pendingRemovalThreads.clear();
/* if we are done finish early */
if (activeThreads.isEmpty()) {
break;
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
log.error("Interrupted while waiting for statistics", e);
}
}
return model;
}
/**
* Get statistics from a switch.
* @param switchId
* @param statsType
* @return
*/
@SuppressWarnings("unchecked")
public List<OFStatsReply> getSwitchStatistics(DatapathId switchId, OFStatsType statsType) {
IOFSwitch sw = switchService.getSwitch(switchId);
ListenableFuture<?> future;
List<OFStatsReply> values = null;
Match match;
if (sw != null) {
OFStatsRequest<?> req = null;
switch (statsType) {
case FLOW:
match = sw.getOFFactory().buildMatch().build();
req = sw.getOFFactory().buildFlowStatsRequest()
.setMatch(match)
.setOutPort(OFPort.ANY)
.setTableId(TableId.ALL)
.build();
System.out.println("flow");
break;
case AGGREGATE:
match = sw.getOFFactory().buildMatch().build();
req = sw.getOFFactory().buildAggregateStatsRequest()
.setMatch(match)
.setOutPort(OFPort.ANY)
.setTableId(TableId.ALL)
.build();
break;
case PORT:
req = sw.getOFFactory().buildPortStatsRequest()
.setPortNo(OFPort.ANY)
.build();
break;
case QUEUE:
req = sw.getOFFactory().buildQueueStatsRequest()
.setPortNo(OFPort.ANY)
.setQueueId(UnsignedLong.MAX_VALUE.longValue())
.build();
break;
case DESC:
req = sw.getOFFactory().buildDescStatsRequest()
.build();
break;
case GROUP:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_10) > 0) {
req = sw.getOFFactory().buildGroupStatsRequest()
.build();
}
break;
case METER:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_13) >= 0) {
req = sw.getOFFactory().buildMeterStatsRequest()
.setMeterId(OFMeterSerializerVer13.ALL_VAL)
.build();
}
break;
case GROUP_DESC:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_10) > 0) {
req = sw.getOFFactory().buildGroupDescStatsRequest()
.build();
}
break;
case GROUP_FEATURES:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_10) > 0) {
req = sw.getOFFactory().buildGroupFeaturesStatsRequest()
.build();
}
break;
case METER_CONFIG:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_13) >= 0) {
req = sw.getOFFactory().buildMeterConfigStatsRequest()
.build();
}
break;
case METER_FEATURES:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_13) >= 0) {
req = sw.getOFFactory().buildMeterFeaturesStatsRequest()
.build();
}
break;
case TABLE:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_10) > 0) {
req = sw.getOFFactory().buildTableStatsRequest()
.build();
}
break;
case TABLE_FEATURES:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_10) > 0) {
req = sw.getOFFactory().buildTableFeaturesStatsRequest()
.build();
}
break;
case PORT_DESC:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_13) >= 0) {
req = sw.getOFFactory().buildPortDescStatsRequest()
.build();
}
break;
case EXPERIMENTER:
default:
log.error("Stats Request Type {} not implemented yet", statsType.name());
break;
}
try {
if (req != null) {
future = sw.writeStatsRequest(req);
values = (List<OFStatsReply>) future.get(portStatsInterval / 2, TimeUnit.SECONDS);
}
} catch (Exception e) {
log.error("Failure retrieving statistics from switch {}. {}", sw, e);
}
}
return values;
}
@Override
public List<OFStatsReply> getValues() {
// TODO Auto-generated method stub
log.info("I m in the get function"+this.getClass().getName());
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.integration.server;
import java.util.Arrays;
import java.util.Collection;
import org.apache.activemq.artemis.api.core.Message;
import org.apache.activemq.artemis.api.core.QueueConfiguration;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.client.ActiveMQClient;
import org.apache.activemq.artemis.api.core.client.ClientConsumer;
import org.apache.activemq.artemis.api.core.client.ClientMessage;
import org.apache.activemq.artemis.api.core.client.ClientProducer;
import org.apache.activemq.artemis.api.core.client.ClientSession;
import org.apache.activemq.artemis.api.core.client.ClientSessionFactory;
import org.apache.activemq.artemis.core.postoffice.impl.LocalQueueBinding;
import org.apache.activemq.artemis.core.server.impl.ScaleDownHandler;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.tests.integration.cluster.distribution.ClusterTestBase;
import org.apache.activemq.artemis.tests.util.Wait;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
/**
* On this test we will run ScaleDown directly as an unit-test in several cases,
* simulating what would happen during a real scale down.
*/
@RunWith(value = Parameterized.class)
public class ScaleDownDirectTest extends ClusterTestBase {
@Parameterized.Parameters(name = "isNetty={0}")
public static Collection getParameters() {
return Arrays.asList(new Object[][]{{false}, {true}});
}
private final boolean isNetty;
public ScaleDownDirectTest(boolean isNetty) {
this.isNetty = isNetty;
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
setupLiveServer(0, isFileStorage(), isNetty, true);
setupLiveServer(1, isFileStorage(), isNetty, true);
startServers(0, 1);
setupSessionFactory(0, isNetty);
setupSessionFactory(1, isNetty);
}
@Test
public void testSendMixedSmallMessages() throws Exception {
internalTest(100, 100);
}
@Test
public void testSendMixedLargelMessages() throws Exception {
internalTest(2 * ActiveMQClient.DEFAULT_MIN_LARGE_MESSAGE_SIZE, 100);
}
protected void internalTest(int bufferSize, int numberOfMessages) throws Exception {
ClientSessionFactory sf = sfs[0];
ClientSession session = sf.createSession(true, true);
session.createQueue(new QueueConfiguration("queue1").setAddress("ad1"));
ClientProducer producer = session.createProducer("ad1");
byte[] buffer = new byte[bufferSize];
for (int i = 0; i < bufferSize; i++) {
buffer[i] = getSamplebyte(i);
}
for (int i = 0; i < numberOfMessages; i++) {
ClientMessage message = session.createMessage(true);
message.putIntProperty("i", i);
message.getBodyBuffer().writeBytes(buffer);
producer.send(message);
}
session.createQueue(new QueueConfiguration("queue2").setAddress("ad1"));
for (int i = numberOfMessages; i < (numberOfMessages * 2); i++) {
ClientMessage message = session.createMessage(true);
message.putIntProperty("i", i);
message.getBodyBuffer().writeBytes(buffer);
producer.send(message);
}
assertEquals(numberOfMessages * 2, performScaledown());
sfs[0].close();
session.close();
stopServers(0);
session = sfs[1].createSession(true, true);
ClientConsumer consumer1 = session.createConsumer("queue1");
session.start();
for (int i = 0; i < numberOfMessages * 2; i++) {
ClientMessage message = consumer1.receive(5000);
assertNotNull(message);
assertEquals(i, message.getIntProperty("i").intValue());
// message.acknowledge();
checkBody(message, bufferSize);
}
ClientMessage messageCheckNull = consumer1.receiveImmediate();
assertNull(messageCheckNull);
ClientConsumer consumer2 = session.createConsumer("queue2");
for (int i = numberOfMessages; i < numberOfMessages * 2; i++) {
ClientMessage message = consumer2.receive(5000);
assertNotNull(message);
assertEquals(i, message.getIntProperty("i").intValue());
// message.acknowledge();
checkBody(message, bufferSize);
}
messageCheckNull = consumer2.receiveImmediate();
assertNull(messageCheckNull);
}
@Test
public void testPaging() throws Exception {
final int CHUNK_SIZE = 50;
int messageCount = 0;
final String addressName = "testAddress";
final String queueName = "testQueue";
createQueue(0, addressName, queueName, null, true);
createQueue(1, addressName, queueName, null, true);
ClientSessionFactory sf = sfs[0];
ClientSession session = addClientSession(sf.createSession(false, false));
ClientProducer producer = addClientProducer(session.createProducer(addressName));
AddressSettings defaultSetting = new AddressSettings().setPageSizeBytes(10 * 1024).setMaxSizeBytes(20 * 1024);
servers[0].getAddressSettingsRepository().addMatch("#", defaultSetting);
while (!servers[0].getPagingManager().getPageStore(new SimpleString(addressName)).isPaging()) {
for (int i = 0; i < CHUNK_SIZE; i++) {
Message message = session.createMessage(true);
message.getBodyBuffer().writeBytes(new byte[1024]);
// The only purpose of this count here is for eventually debug messages on print-data / print-pages
// message.putIntProperty("count", messageCount);
producer.send(message);
messageCount++;
}
session.commit();
}
assertEquals(messageCount, performScaledown());
servers[0].stop();
addConsumer(0, 1, queueName, null);
for (int i = 0; i < messageCount; i++) {
ClientMessage message = consumers[0].getConsumer().receive(500);
Assert.assertNotNull(message);
// Assert.assertEquals(i, message.getIntProperty("count").intValue());
}
Assert.assertNull(consumers[0].getConsumer().receiveImmediate());
removeConsumer(0);
}
@Test
public void testBasicScaleDown() throws Exception {
final int TEST_SIZE = 2;
final String addressName = "testAddress";
final String queueName1 = "testQueue1";
final String queueName2 = "testQueue2";
// create 2 queues on each node mapped to the same address
createQueue(0, addressName, queueName1, null, true);
createQueue(0, addressName, queueName2, null, true);
createQueue(1, addressName, queueName1, null, true);
createQueue(1, addressName, queueName2, null, true);
// send messages to node 0
send(0, addressName, TEST_SIZE, true, null);
// consume a message from queue 2
addConsumer(1, 0, queueName2, null, false);
ClientMessage clientMessage = consumers[1].getConsumer().receive(250);
Assert.assertNotNull(clientMessage);
clientMessage.acknowledge();
consumers[1].getSession().commit();
removeConsumer(1);
// at this point on node 0 there should be 2 messages in testQueue1 and 1 message in testQueue2
Wait.assertEquals(TEST_SIZE, () -> getMessageCount(((LocalQueueBinding) servers[0].getPostOffice().getBinding(new SimpleString(queueName1))).getQueue()));
Wait.assertEquals(TEST_SIZE - 1, () -> getMessageCount(((LocalQueueBinding) servers[0].getPostOffice().getBinding(new SimpleString(queueName2))).getQueue()));
assertEquals(TEST_SIZE, performScaledown());
// trigger scaleDown from node 0 to node 1
servers[0].stop();
// get the 2 messages from queue 1
addConsumer(0, 1, queueName1, null);
clientMessage = consumers[0].getConsumer().receive(250);
Assert.assertNotNull(clientMessage);
clientMessage.acknowledge();
clientMessage = consumers[0].getConsumer().receive(250);
Assert.assertNotNull(clientMessage);
clientMessage.acknowledge();
// ensure there are no more messages on queue 1
clientMessage = consumers[0].getConsumer().receive(250);
Assert.assertNull(clientMessage);
removeConsumer(0);
// get the 1 message from queue 2
addConsumer(0, 1, queueName2, null);
clientMessage = consumers[0].getConsumer().receive(250);
Assert.assertNotNull(clientMessage);
clientMessage.acknowledge();
// ensure there are no more messages on queue 1
clientMessage = consumers[0].getConsumer().receive(250);
Assert.assertNull(clientMessage);
removeConsumer(0);
}
private void checkBody(ClientMessage message, int bufferSize) {
assertEquals(bufferSize, message.getBodySize());
byte[] body = new byte[message.getBodySize()];
message.getBodyBuffer().readBytes(body);
for (int bpos = 0; bpos < bufferSize; bpos++) {
if (getSamplebyte(bpos) != body[bpos]) {
fail("body comparison failure at " + message);
}
}
}
private long performScaledown() throws Exception {
ScaleDownHandler handler = new ScaleDownHandler(servers[0].getPagingManager(), servers[0].getPostOffice(), servers[0].getNodeManager(), servers[0].getClusterManager().getClusterController(), servers[0].getStorageManager());
return handler.scaleDownMessages(sfs[1], servers[1].getNodeID(), servers[0].getConfiguration().getClusterUser(), servers[0].getConfiguration().getClusterPassword());
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.services.ejb.test;
import java.io.File;
import java.io.FileOutputStream;
import java.util.ArrayList;
import java.util.List;
import javax.ejb.EJB;
import javax.ejb.EJBException;
import org.drools.compiler.kie.builder.impl.InternalKieModule;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.jbpm.kie.services.impl.KModuleDeploymentUnit;
import org.jbpm.runtime.manager.impl.deploy.DeploymentDescriptorImpl;
import org.jbpm.services.api.model.DeploymentUnit;
import org.jbpm.services.ejb.api.DefinitionServiceEJBLocal;
import org.jbpm.services.ejb.api.DeploymentServiceEJBLocal;
import org.jbpm.services.ejb.api.ProcessServiceEJBLocal;
import org.jbpm.services.ejb.api.RuntimeDataServiceEJBLocal;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.api.KieServices;
import org.kie.api.builder.ReleaseId;
import org.kie.api.runtime.process.ProcessInstance;
import org.kie.internal.runtime.conf.DeploymentDescriptor;
import org.kie.internal.runtime.conf.RuntimeStrategy;
import org.kie.internal.runtime.manager.SessionNotFoundException;
import org.kie.scanner.KieMavenRepository;
import static org.junit.Assert.*;
import static org.kie.scanner.KieMavenRepository.getKieMavenRepository;
@RunWith(Arquillian.class)
public class MultipleDeploymentsProcessServiceEJBIntegrationTest extends AbstractTestSupport {
protected static final String ARTIFACT_ID2 = "second-test-module";
protected static final String GROUP_ID2 = "org.jbpm.test";
protected static final String VERSION2 = "1.0.0-SNAPSHOT";
@Deployment
public static WebArchive createDeployment() {
File archive = new File("target/sample-war-ejb-app.war");
if (!archive.exists()) {
throw new IllegalStateException("There is no archive yet generated, run maven build or mvn assembly:assembly");
}
WebArchive war = ShrinkWrap.createFromZipFile(WebArchive.class, archive);
war.addPackage("org.jbpm.services.ejb.test"); // test cases
// deploy test kjar
deployKjar();
return war;
}
protected static void deployKjar() {
KieServices ks = KieServices.Factory.get();
ReleaseId releaseId = ks.newReleaseId(GROUP_ID, ARTIFACT_ID, VERSION);
List<String> processes = new ArrayList<String>();
processes.add("processes/signal.bpmn");
processes.add("processes/import.bpmn");
InternalKieModule kJar1 = createKieJar(ks, releaseId, processes);
File pom = new File("target/kmodule", "pom.xml");
pom.getParentFile().mkdir();
try {
FileOutputStream fs = new FileOutputStream(pom);
fs.write(getPom(releaseId).getBytes());
fs.close();
} catch (Exception e) {
}
// second kjar
ReleaseId releaseId2 = ks.newReleaseId(GROUP_ID2, ARTIFACT_ID2, VERSION2);
List<String> processes2 = new ArrayList<String>();
processes2.add("processes/customtask.bpmn");
processes2.add("processes/humanTask.bpmn");
InternalKieModule kJar2 = createKieJar(ks, releaseId2, processes2);
File pom2 = new File("target/kmodule2", "pom.xml");
pom2.getParentFile().mkdir();
try {
FileOutputStream fs = new FileOutputStream(pom2);
fs.write(getPom(releaseId2).getBytes());
fs.close();
} catch (Exception e) {
}
KieMavenRepository repository = getKieMavenRepository();
repository.installArtifact(releaseId, kJar1, pom);
repository.installArtifact(releaseId2, kJar2, pom2);
}
private List<DeploymentUnit> units = new ArrayList<DeploymentUnit>();
@After
public void cleanup() {
cleanupSingletonSessionId();
if (units != null && !units.isEmpty()) {
for (DeploymentUnit unit : units) {
deploymentService.undeploy(unit);
}
units.clear();
}
}
@EJB
private DefinitionServiceEJBLocal bpmn2Service;
@EJB
private DeploymentServiceEJBLocal deploymentService;
@EJB
private ProcessServiceEJBLocal processService;
@EJB
private RuntimeDataServiceEJBLocal runtimeDataService;
@Test
public void testStartProcessFromDifferentDeployments() {
assertNotNull(deploymentService);
DeploymentDescriptor customDescriptor = new DeploymentDescriptorImpl("org.jbpm.domain");
customDescriptor.getBuilder()
.runtimeStrategy(RuntimeStrategy.PER_PROCESS_INSTANCE);
KModuleDeploymentUnit deploymentUnit = new KModuleDeploymentUnit(GROUP_ID, ARTIFACT_ID, VERSION);
deploymentUnit.setDeploymentDescriptor(customDescriptor);
deploymentService.deploy(deploymentUnit);
units.add(deploymentUnit);
DeploymentDescriptor customDescriptor2 = new DeploymentDescriptorImpl("org.jbpm.domain");
customDescriptor.getBuilder()
.runtimeStrategy(RuntimeStrategy.PER_PROCESS_INSTANCE);
KModuleDeploymentUnit deploymentUnit2 = new KModuleDeploymentUnit(GROUP_ID2, ARTIFACT_ID2, VERSION2);
deploymentUnit2.setDeploymentDescriptor(customDescriptor2);
deploymentService.deploy(deploymentUnit2);
units.add(deploymentUnit2);
boolean isDeployed = deploymentService.isDeployed(deploymentUnit.getIdentifier());
assertTrue(isDeployed);
isDeployed = deploymentService.isDeployed(deploymentUnit2.getIdentifier());
assertTrue(isDeployed);
assertNotNull(processService);
// first process from deployment 1
long processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "Import");
assertNotNull(processInstanceId);
try {
ProcessInstance pi = processService.getProcessInstance(processInstanceId);
assertNull(pi);
} catch (EJBException e) {
if (e.getCause() instanceof SessionNotFoundException) {
// ignore as this is expected when per process instance is used
} else {
throw e;
}
}
// second process from deployment 2
long processInstanceId2 = processService.startProcess(deploymentUnit2.getIdentifier(), "customtask");
assertNotNull(processInstanceId2);
try {
ProcessInstance pi2 = processService.getProcessInstance(processInstanceId2);
assertNull(pi2);
} catch (EJBException e) {
if (e.getCause() instanceof SessionNotFoundException) {
// ignore as this is expected when per process instance is used
} else {
throw e;
}
}
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.python;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.cxx.CxxBuckConfig;
import com.facebook.buck.cxx.CxxPlatform;
import com.facebook.buck.cxx.DefaultCxxPlatforms;
import com.facebook.buck.io.AlwaysFoundExecutableFinder;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.FlavorDomain;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleParamsFactory;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.BuildTargetSourcePath;
import com.facebook.buck.rules.FakeBuildContext;
import com.facebook.buck.rules.FakeBuildRuleParamsBuilder;
import com.facebook.buck.rules.FakeBuildableContext;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TestSourcePath;
import com.facebook.buck.shell.Genrule;
import com.facebook.buck.shell.GenruleBuilder;
import com.facebook.buck.step.Step;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedSet;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.nio.file.Path;
import java.nio.file.Paths;
public class PythonBinaryDescriptionTest {
private static final PythonBuckConfig PYTHON_BUCK_CONFIG =
new PythonBuckConfig(
new FakeBuckConfig(),
new AlwaysFoundExecutableFinder());
private static final PythonEnvironment PYTHON_ENV =
new PythonEnvironment(
Paths.get("python"),
PythonVersion.of("2.6"));
private static final CxxPlatform CXX_PLATFORM = DefaultCxxPlatforms.build(
new CxxBuckConfig(new FakeBuckConfig()));
private static final FlavorDomain<CxxPlatform> CXX_PLATFORMS =
new FlavorDomain<>("platform", ImmutableMap.<Flavor, CxxPlatform>of());
@Test
public void thatComponentSourcePathDepsPropagateProperly() {
BuildRuleResolver resolver = new BuildRuleResolver();
Genrule genrule = (Genrule) GenruleBuilder
.newGenruleBuilder(BuildTargetFactory.newInstance("//:gen"))
.setOut("blah.py")
.build(resolver);
BuildRuleParams libParams = BuildRuleParamsFactory.createTrivialBuildRuleParams(
BuildTargetFactory.newInstance("//:lib"));
PythonLibrary lib = new PythonLibrary(
libParams,
new SourcePathResolver(resolver),
ImmutableMap.<Path, SourcePath>of(
Paths.get("hello"),
new BuildTargetSourcePath(genrule.getBuildTarget())),
ImmutableMap.<Path, SourcePath>of(),
Optional.<Boolean>absent());
BuildRuleParams params =
new FakeBuildRuleParamsBuilder(BuildTargetFactory.newInstance("//:bin"))
.setDeps(ImmutableSortedSet.<BuildRule>of(lib))
.build();
PythonBinaryDescription desc =
new PythonBinaryDescription(
PYTHON_BUCK_CONFIG,
PYTHON_ENV,
CXX_PLATFORM,
CXX_PLATFORMS);
PythonBinaryDescription.Arg arg = desc.createUnpopulatedConstructorArg();
arg.deps = Optional.of(ImmutableSortedSet.<BuildTarget>of());
arg.mainModule = Optional.absent();
arg.main = Optional.<SourcePath>of(new TestSourcePath("blah.py"));
arg.baseModule = Optional.absent();
arg.zipSafe = Optional.absent();
arg.buildArgs = Optional.absent();
BuildRule rule = desc.createBuildRule(TargetGraph.EMPTY, params, resolver, arg);
assertEquals(
ImmutableSortedSet.<BuildRule>of(genrule),
rule.getDeps());
}
@Test
public void thatMainSourcePathPropagatesToDeps() {
BuildRuleResolver resolver = new BuildRuleResolver();
Genrule genrule = (Genrule) GenruleBuilder
.newGenruleBuilder(BuildTargetFactory.newInstance("//:gen"))
.setOut("blah.py")
.build(resolver);
BuildRuleParams params = BuildRuleParamsFactory.createTrivialBuildRuleParams(
BuildTargetFactory.newInstance("//:bin"));
PythonBinaryDescription desc =
new PythonBinaryDescription(
PYTHON_BUCK_CONFIG,
PYTHON_ENV,
CXX_PLATFORM,
CXX_PLATFORMS);
PythonBinaryDescription.Arg arg = desc.createUnpopulatedConstructorArg();
arg.deps = Optional.of(ImmutableSortedSet.<BuildTarget>of());
arg.mainModule = Optional.absent();
arg.main =
Optional.<SourcePath>of(
new BuildTargetSourcePath(genrule.getBuildTarget()));
arg.baseModule = Optional.absent();
arg.zipSafe = Optional.absent();
arg.buildArgs = Optional.absent();
BuildRule rule = desc.createBuildRule(TargetGraph.EMPTY, params, resolver, arg);
assertEquals(
ImmutableSortedSet.<BuildRule>of(genrule),
rule.getDeps());
}
@Test
public void baseModule() {
BuildRuleResolver resolver = new BuildRuleResolver();
BuildTarget target = BuildTargetFactory.newInstance("//foo:bin");
BuildRuleParams params = BuildRuleParamsFactory.createTrivialBuildRuleParams(target);
String mainName = "main.py";
PythonBinaryDescription desc =
new PythonBinaryDescription(
PYTHON_BUCK_CONFIG,
PYTHON_ENV,
CXX_PLATFORM,
CXX_PLATFORMS);
PythonBinaryDescription.Arg arg = desc.createUnpopulatedConstructorArg();
arg.deps = Optional.of(ImmutableSortedSet.<BuildTarget>of());
arg.mainModule = Optional.absent();
arg.main = Optional.<SourcePath>of(new TestSourcePath("foo/" + mainName));
arg.zipSafe = Optional.absent();
arg.buildArgs = Optional.absent();
// Run without a base module set and verify it defaults to using the build target
// base name.
arg.baseModule = Optional.absent();
PythonBinary normalRule = desc
.createBuildRule(TargetGraph.EMPTY, params, resolver, arg);
assertEquals(
PythonUtil.toModuleName(target, target.getBasePath().resolve(mainName).toString()),
normalRule.getMainModule());
// Run *with* a base module set and verify it gets used to build the main module path.
arg.baseModule = Optional.of("blah");
PythonBinary baseModuleRule = desc
.createBuildRule(TargetGraph.EMPTY, params, resolver, arg);
assertEquals(
PythonUtil.toModuleName(
target,
Paths.get(arg.baseModule.get()).resolve(mainName).toString()),
baseModuleRule.getMainModule());
}
@Test
public void mainModule() {
BuildRuleResolver resolver = new BuildRuleResolver();
BuildTarget target = BuildTargetFactory.newInstance("//foo:bin");
BuildRuleParams params = BuildRuleParamsFactory.createTrivialBuildRuleParams(target);
String mainModule = "foo.main";
PythonBinaryDescription desc =
new PythonBinaryDescription(
PYTHON_BUCK_CONFIG,
PYTHON_ENV,
CXX_PLATFORM,
CXX_PLATFORMS);
PythonBinaryDescription.Arg arg = desc.createUnpopulatedConstructorArg();
arg.deps = Optional.of(ImmutableSortedSet.<BuildTarget>of());
arg.mainModule = Optional.of(mainModule);
arg.main = Optional.absent();
arg.baseModule = Optional.absent();
arg.zipSafe = Optional.absent();
arg.buildArgs = Optional.absent();
PythonBinary rule = desc
.createBuildRule(TargetGraph.EMPTY, params, resolver, arg);
assertEquals(mainModule, rule.getMainModule());
}
@Test
public void pexExtension() {
BuildRuleResolver resolver = new BuildRuleResolver();
BuildTarget target = BuildTargetFactory.newInstance("//foo:bin");
BuildRuleParams params = BuildRuleParamsFactory.createTrivialBuildRuleParams(target);
PythonBuckConfig config =
new PythonBuckConfig(
new FakeBuckConfig(
ImmutableMap.of(
"python",
ImmutableMap.of("pex_extension", ".different_extension"))),
new AlwaysFoundExecutableFinder());
PythonBinaryDescription desc =
new PythonBinaryDescription(
config,
PYTHON_ENV,
CXX_PLATFORM,
CXX_PLATFORMS);
PythonBinaryDescription.Arg arg = desc.createUnpopulatedConstructorArg();
arg.deps = Optional.of(ImmutableSortedSet.<BuildTarget>of());
arg.mainModule = Optional.of("main");
arg.main = Optional.absent();
arg.baseModule = Optional.absent();
arg.zipSafe = Optional.absent();
arg.buildArgs = Optional.absent();
PythonBinary rule = desc
.createBuildRule(TargetGraph.EMPTY, params, resolver, arg);
assertThat(
Preconditions.checkNotNull(rule.getPathToOutput()).toString(),
Matchers.endsWith(".different_extension"));
}
@Test
public void buildArgs() {
BuildTarget target = BuildTargetFactory.newInstance("//foo:bin");
BuildRuleResolver resolver = new BuildRuleResolver();
ImmutableList<String> buildArgs = ImmutableList.of("--some", "--args");
PythonBinary binary =
(PythonBinary) PythonBinaryBuilder.create(target)
.setMainModule("main")
.setBuildArgs(buildArgs)
.build(resolver);
ImmutableList<Step> buildSteps =
binary.getBuildSteps(FakeBuildContext.NOOP_CONTEXT, new FakeBuildableContext());
PexStep pexStep = FluentIterable.from(buildSteps)
.filter(PexStep.class)
.get(0);
assertThat(
pexStep.getCommandPrefix(),
Matchers.hasItems(buildArgs.toArray(new String[buildArgs.size()])));
}
}
| |
package com.fsck.k9.activity.compose;
import java.util.List;
import android.content.ContentResolver;
import android.content.Context;
import android.database.MatrixCursor;
import android.net.Uri;
import android.provider.ContactsContract;
import android.provider.ContactsContract.CommonDataKinds.Email;
import com.fsck.k9.RobolectricTest;
import com.fsck.k9.mail.Address;
import com.fsck.k9.view.RecipientSelectView.Recipient;
import com.fsck.k9.view.RecipientSelectView.RecipientCryptoStatus;
import org.junit.Before;
import org.junit.Test;
import org.robolectric.RuntimeEnvironment;
import static android.provider.ContactsContract.CommonDataKinds.Email.TYPE_HOME;
import static org.junit.Assert.assertEquals;
import static org.mockito.AdditionalMatchers.aryEq;
import static org.mockito.ArgumentMatchers.nullable;
import static org.mockito.Matchers.eq;
import static org.mockito.Matchers.isNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@SuppressWarnings("WeakerAccess")
public class RecipientLoaderTest extends RobolectricTest {
static final String CRYPTO_PROVIDER = "cryptoProvider";
static final String[] PROJECTION = {
ContactsContract.CommonDataKinds.Email._ID,
ContactsContract.Contacts.DISPLAY_NAME_PRIMARY,
ContactsContract.Contacts.LOOKUP_KEY,
ContactsContract.CommonDataKinds.Email.ADDRESS,
ContactsContract.CommonDataKinds.Email.TYPE,
ContactsContract.CommonDataKinds.Email.LABEL,
ContactsContract.CommonDataKinds.Email.CONTACT_ID,
ContactsContract.Contacts.PHOTO_THUMBNAIL_URI,
ContactsContract.CommonDataKinds.Email.TIMES_CONTACTED,
ContactsContract.Contacts.SORT_KEY_PRIMARY
};
static final String[] PROJECTION_NICKNAME = {
ContactsContract.Data.CONTACT_ID,
ContactsContract.CommonDataKinds.Nickname.NAME
};
static final String[] PROJECTION_CRYPTO_ADDRESSES = { "address", "uid_address" };
static final String[] PROJECTION_CRYPTO_STATUS = { "address", "uid_key_status", "autocrypt_key_status" };
static final Address CONTACT_ADDRESS_1 = Address.parse("Contact Name <address@example.org>")[0];
static final Address CONTACT_ADDRESS_2 = Address.parse("Other Contact Name <address_two@example.org>")[0];
static final String TYPE = "" + TYPE_HOME;
static final String[] CONTACT_1 =
new String[] { "0", "Bob", "bob", "bob@host.com", TYPE, null, "1", null, "100", "Bob" };
static final String[] CONTACT_2 =
new String[] { "2", "Bob2", "bob2", "bob2@host.com", TYPE, null, "2", null, "99", "Bob2" };
static final String[] CONTACT_NO_EMAIL =
new String[] { "0", "Bob", "bob", null, TYPE, null, "1", null, "10", "Bob_noMail" };
static final String[] CONTACT_WITH_NICKNAME_NOT_CONTACTED =
new String[] { "0", "Eve_notContacted", "eve_notContacted", "eve_notContacted@host.com", TYPE, null, "2",
null, "0", "Eve" };
static final String[] NICKNAME_NOT_CONTACTED = new String[] { "2", "Eves_Nickname_Bob" };
static final String QUERYSTRING = "querystring";
Context context;
ContentResolver contentResolver;
@Before
public void setUp() throws Exception {
context = mock(Context.class);
contentResolver = mock(ContentResolver.class);
when(context.getApplicationContext()).thenReturn(RuntimeEnvironment.application);
when(context.getContentResolver()).thenReturn(contentResolver);
}
@Test
public void queryCryptoProvider() throws Exception {
RecipientLoader recipientLoader = new RecipientLoader(context, CRYPTO_PROVIDER, QUERYSTRING);
setupQueryCryptoProvider("%" + QUERYSTRING + "%", CONTACT_ADDRESS_1, CONTACT_ADDRESS_2);
List<Recipient> recipients = recipientLoader.loadInBackground();
assertEquals(2, recipients.size());
assertEquals(CONTACT_ADDRESS_1, recipients.get(0).address);
assertEquals(CONTACT_ADDRESS_2, recipients.get(1).address);
assertEquals(RecipientCryptoStatus.UNAVAILABLE, recipients.get(0).getCryptoStatus());
}
@Test
public void queryCryptoStatus_unavailable() throws Exception {
RecipientLoader recipientLoader = new RecipientLoader(context, CRYPTO_PROVIDER, CONTACT_ADDRESS_1);
setupCryptoProviderStatus(CONTACT_ADDRESS_1, "0", "0");
List<Recipient> recipients = recipientLoader.loadInBackground();
assertEquals(1, recipients.size());
Recipient recipient = recipients.get(0);
assertEquals(CONTACT_ADDRESS_1, recipient.address);
assertEquals(RecipientCryptoStatus.UNAVAILABLE, recipient.getCryptoStatus());
}
@Test
public void queryCryptoStatus_autocrypt_untrusted() throws Exception {
RecipientLoader recipientLoader = new RecipientLoader(context, CRYPTO_PROVIDER, CONTACT_ADDRESS_1);
setupCryptoProviderStatus(CONTACT_ADDRESS_1, "0", "1");
List<Recipient> recipients = recipientLoader.loadInBackground();
assertEquals(1, recipients.size());
Recipient recipient = recipients.get(0);
assertEquals(CONTACT_ADDRESS_1, recipient.address);
assertEquals(RecipientCryptoStatus.AVAILABLE_UNTRUSTED, recipient.getCryptoStatus());
}
@Test
public void queryCryptoStatus_autocrypt_trusted() throws Exception {
RecipientLoader recipientLoader = new RecipientLoader(context, CRYPTO_PROVIDER, CONTACT_ADDRESS_1);
setupCryptoProviderStatus(CONTACT_ADDRESS_1, "0", "2");
List<Recipient> recipients = recipientLoader.loadInBackground();
assertEquals(1, recipients.size());
Recipient recipient = recipients.get(0);
assertEquals(CONTACT_ADDRESS_1, recipient.address);
assertEquals(RecipientCryptoStatus.AVAILABLE_TRUSTED, recipient.getCryptoStatus());
}
@Test
public void queryCryptoStatus_withHigherUidStatus() throws Exception {
RecipientLoader recipientLoader = new RecipientLoader(context, CRYPTO_PROVIDER, CONTACT_ADDRESS_1);
setupCryptoProviderStatus(CONTACT_ADDRESS_1, "2", "1");
List<Recipient> recipients = recipientLoader.loadInBackground();
assertEquals(1, recipients.size());
Recipient recipient = recipients.get(0);
assertEquals(CONTACT_ADDRESS_1, recipient.address);
assertEquals(RecipientCryptoStatus.AVAILABLE_TRUSTED, recipient.getCryptoStatus());
}
private void setupQueryCryptoProvider(String queriedAddress, Address... contactAddresses) {
MatrixCursor cursor = new MatrixCursor(PROJECTION_CRYPTO_ADDRESSES);
for (Address contactAddress : contactAddresses) {
cursor.addRow(new String[] { queriedAddress, contactAddress.toString() });
}
when(contentResolver
.query(eq(Uri.parse("content://" + CRYPTO_PROVIDER + ".provider.exported/autocrypt_status")),
aryEq(PROJECTION_CRYPTO_ADDRESSES), nullable(String.class),
aryEq(new String[] { queriedAddress }),
nullable(String.class))).thenReturn(cursor);
}
private void setupCryptoProviderStatus(Address address, String uidStatus, String autocryptStatus) {
MatrixCursor cursorCryptoStatus = new MatrixCursor(PROJECTION_CRYPTO_STATUS);
cursorCryptoStatus.addRow(new String[] { address.getAddress(), uidStatus, autocryptStatus });
when(contentResolver
.query(eq(Uri.parse("content://" + CRYPTO_PROVIDER + ".provider.exported/autocrypt_status")),
aryEq(PROJECTION_CRYPTO_STATUS), nullable(String.class),
aryEq(new String[] { address.getAddress() }),
nullable(String.class))).thenReturn(cursorCryptoStatus);
}
private void setupContactProvider(String queriedAddress, String[]... contacts) {
MatrixCursor cursor = new MatrixCursor(PROJECTION);
for (String[] contact : contacts) {
cursor.addRow(contact);
}
when(contentResolver
.query(eq(Email.CONTENT_URI),
aryEq(PROJECTION),
nullable(String.class),
aryEq(new String[] { queriedAddress, queriedAddress }),
nullable(String.class))).thenReturn(cursor);
}
private void setupNicknameContactProvider(String[]... contactsWithNickname) {
MatrixCursor cursor = new MatrixCursor(PROJECTION_NICKNAME);
for (String[] contact : contactsWithNickname) {
cursor.addRow(contact);
}
when(contentResolver
.query(eq(ContactsContract.Data.CONTENT_URI),
aryEq(PROJECTION_NICKNAME),
nullable(String.class),
nullable(String[].class),
nullable(String.class))).thenReturn(cursor);
}
private void setupContactProviderForId(String id, String[]... contacts) {
MatrixCursor cursor = new MatrixCursor(PROJECTION);
for (String[] contact : contacts) {
cursor.addRow(contact);
}
when(contentResolver
.query(eq(Email.CONTENT_URI),
aryEq(PROJECTION),
nullable(String.class),
aryEq(new String[] { id }),
nullable(String.class))).thenReturn(cursor);
}
@Test
public void queryContactProvider() throws Exception {
RecipientLoader recipientLoader = new RecipientLoader(context, CRYPTO_PROVIDER, QUERYSTRING);
setupContactProvider("%" + QUERYSTRING + "%", CONTACT_1);
List<Recipient> recipients = recipientLoader.loadInBackground();
assertEquals(1, recipients.size());
assertEquals("bob@host.com", recipients.get(0).address.getAddress());
assertEquals(RecipientCryptoStatus.UNAVAILABLE, recipients.get(0).getCryptoStatus());
}
@Test
public void queryContactProvider_ignoresRecipientWithNoEmail() throws Exception {
RecipientLoader recipientLoader = new RecipientLoader(context, CRYPTO_PROVIDER, QUERYSTRING);
setupContactProvider("%" + QUERYSTRING + "%", CONTACT_NO_EMAIL);
List<Recipient> recipients = recipientLoader.loadInBackground();
assertEquals(0, recipients.size());
}
@Test
public void queryContactProvider_sortByTimesContactedForNickname() throws Exception {
RecipientLoader recipientLoader = new RecipientLoader(context, null, QUERYSTRING);
setupContactProvider("%" + QUERYSTRING + "%", CONTACT_1);
setupNicknameContactProvider(NICKNAME_NOT_CONTACTED);
setupContactProviderForId(NICKNAME_NOT_CONTACTED[0], CONTACT_WITH_NICKNAME_NOT_CONTACTED);
List<Recipient> recipients = recipientLoader.loadInBackground();
assertEquals(2, recipients.size());
assertEquals("bob@host.com", recipients.get(0).address.getAddress());
assertEquals("eve_notContacted@host.com", recipients.get(1).address.getAddress());
}
@Test
public void getMostContactedFoundMore() throws Exception {
int maxTargets = 1;
setupContactProvider(CONTACT_1, CONTACT_2);
RecipientLoader recipientLoader = RecipientLoader.getMostContactedRecipientLoader(context, maxTargets);
List<Recipient> recipients = recipientLoader.loadInBackground();
assertEquals(maxTargets, recipients.size());
assertEquals("bob@host.com", recipients.get(0).address.getAddress());
assertEquals(RecipientCryptoStatus.UNDEFINED, recipients.get(0).getCryptoStatus());
}
@Test
public void getMostContactedFoundLess() throws Exception {
int maxTargets = 5;
setupContactProvider(CONTACT_1, CONTACT_2);
RecipientLoader recipientLoader = RecipientLoader.getMostContactedRecipientLoader(context, maxTargets);
List<Recipient> recipients = recipientLoader.loadInBackground();
assertEquals(2, recipients.size());
assertEquals("bob@host.com", recipients.get(0).address.getAddress());
assertEquals(RecipientCryptoStatus.UNDEFINED, recipients.get(0).getCryptoStatus());
assertEquals("bob2@host.com", recipients.get(1).address.getAddress());
assertEquals(RecipientCryptoStatus.UNDEFINED, recipients.get(1).getCryptoStatus());
}
@Test
public void getMostContactedFoundNothing() throws Exception {
int maxTargets = 5;
setupContactProvider();
RecipientLoader recipientLoader = RecipientLoader.getMostContactedRecipientLoader(context, maxTargets);
List<Recipient> recipients = recipientLoader.loadInBackground();
assertEquals(0, recipients.size());
}
private void setupContactProvider(String[]... contacts) {
MatrixCursor cursor = new MatrixCursor(PROJECTION);
for (String[] contact : contacts) {
cursor.addRow(contact);
}
when(contentResolver
.query(eq(Email.CONTENT_URI),
aryEq(PROJECTION),
isNull(String.class),
isNull(String[].class),
nullable(String.class))).thenReturn(cursor);
}
}
| |
/*
* Copyright 2011 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.buffer;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.GatheringByteChannel;
import java.nio.channels.ScatteringByteChannel;
/**
* A skeletal implementation for Java heap buffers.
*/
public abstract class HeapChannelBuffer extends AbstractChannelBuffer {
/**
* The underlying heap byte array that this buffer is wrapping.
*/
protected final byte[] array;
protected final ByteBuffer nioBuf;
/**
* Creates a new heap buffer with a newly allocated byte array.
*
* @param length the length of the new byte array
*/
public HeapChannelBuffer(int length) {
this(new byte[length], 0, 0);
}
/**
* Creates a new heap buffer with an existing byte array.
*
* @param array the byte array to wrap
*/
public HeapChannelBuffer(byte[] array) {
this(array, 0, array.length);
}
/**
* Creates a new heap buffer with an existing byte array.
*
* @param array the byte array to wrap
* @param readerIndex the initial reader index of this buffer
* @param writerIndex the initial writer index of this buffer
*/
protected HeapChannelBuffer(byte[] array, int readerIndex, int writerIndex) {
if (array == null) {
throw new NullPointerException("array");
}
this.array = array;
setIndex(readerIndex, writerIndex);
nioBuf = ByteBuffer.wrap(array);
}
@Override
public boolean isDirect() {
return false;
}
@Override
public int capacity() {
return array.length;
}
@Override
public boolean hasArray() {
return true;
}
@Override
public byte[] array() {
return array;
}
@Override
public int arrayOffset() {
return 0;
}
@Override
public byte getByte(int index) {
return array[index];
}
@Override
public void getBytes(int index, ChannelBuffer dst, int dstIndex, int length) {
if (dst instanceof HeapChannelBuffer) {
getBytes(index, ((HeapChannelBuffer) dst).array, dstIndex, length);
} else {
dst.setBytes(dstIndex, array, index, length);
}
}
@Override
public void getBytes(int index, byte[] dst, int dstIndex, int length) {
System.arraycopy(array, index, dst, dstIndex, length);
}
@Override
public void getBytes(int index, ByteBuffer dst) {
dst.put(array, index, Math.min(capacity() - index, dst.remaining()));
}
@Override
public void getBytes(int index, OutputStream out, int length)
throws IOException {
out.write(array, index, length);
}
@Override
public int getBytes(int index, GatheringByteChannel out, int length)
throws IOException {
return out.write((ByteBuffer) nioBuf.clear().position(index).limit(index + length));
}
@Override
public void setByte(int index, int value) {
array[index] = (byte) value;
}
@Override
public void setBytes(int index, ChannelBuffer src, int srcIndex, int length) {
if (src instanceof HeapChannelBuffer) {
setBytes(index, ((HeapChannelBuffer) src).array, srcIndex, length);
} else {
src.getBytes(srcIndex, array, index, length);
}
}
@Override
public void setBytes(int index, byte[] src, int srcIndex, int length) {
System.arraycopy(src, srcIndex, array, index, length);
}
@Override
public void setBytes(int index, ByteBuffer src) {
src.get(array, index, src.remaining());
}
@Override
public int setBytes(int index, InputStream in, int length) throws IOException {
return in.read(array, index, length);
}
@Override
public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException {
try {
return in.read((ByteBuffer) nioBuf.clear().position(index).limit(index + length));
} catch (ClosedChannelException e) {
return -1;
}
}
@Override
public ChannelBuffer slice(int index, int length) {
if (index == 0) {
if (length == 0) {
return ChannelBuffers.EMPTY_BUFFER;
}
if (length == array.length) {
ChannelBuffer slice = duplicate();
slice.setIndex(0, length);
return slice;
} else {
return new TruncatedChannelBuffer(this, length);
}
} else {
if (length == 0) {
return ChannelBuffers.EMPTY_BUFFER;
}
return new SlicedChannelBuffer(this, index, length);
}
}
@Override
public boolean hasNioBuffer() {
return true;
}
@Override
public ByteBuffer nioBuffer(int index, int length) {
return ByteBuffer.wrap(array, index, length).order(order());
}
}
| |
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.Mockito.when;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.analysis.util.BuildViewTestCase;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.packages.NoSuchPackageException;
import com.google.devtools.build.lib.packages.NoSuchTargetException;
import com.google.devtools.build.lib.packages.Package;
import com.google.devtools.build.lib.skyframe.TransitiveBaseTraversalFunction.TargetAndErrorIfAnyImpl;
import com.google.devtools.build.lib.skyframe.util.SkyframeExecutorTestUtils;
import com.google.devtools.build.lib.util.GroupedList;
import com.google.devtools.build.lib.util.GroupedList.GroupedListHelper;
import com.google.devtools.build.skyframe.EvaluationResult;
import com.google.devtools.build.skyframe.SkyFunction;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyframeLookupResult;
import com.google.devtools.build.skyframe.ValueOrUntypedException;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mockito;
/** Test for {@link TransitiveTraversalFunction}. */
@RunWith(JUnit4.class)
public class TransitiveTraversalFunctionTest extends BuildViewTestCase {
@Test
public void noRepeatedLabelVisitationForTransitiveTraversalFunction() throws Exception {
// Create a basic package with a target //foo:foo.
Label label = Label.parseAbsolute("//foo:foo", ImmutableMap.of());
scratch.file("foo/BUILD", "sh_library(name = '" + label.getName() + "')");
Package pkg = loadPackage(label.getPackageIdentifier());
TargetAndErrorIfAnyImpl targetAndErrorIfAny =
new TargetAndErrorIfAnyImpl(
/*packageLoadedSuccessfully=*/ true,
/*errorLoadingTarget=*/ null,
pkg.getTarget(label.getName()));
TransitiveTraversalFunction function =
new TransitiveTraversalFunction() {
@Override
TargetAndErrorIfAny loadTarget(Environment env, Label label) {
return targetAndErrorIfAny;
}
};
// Create the GroupedList saying we had already requested two targets the last time we called
// #compute.
GroupedListHelper<SkyKey> helper = new GroupedListHelper<>();
SkyKey fakeDep1 = function.getKey(Label.parseAbsolute("//foo:bar", ImmutableMap.of()));
SkyKey fakeDep2 = function.getKey(Label.parseAbsolute("//foo:baz", ImmutableMap.of()));
helper.add(PackageValue.key(label.getPackageIdentifier()));
helper.startGroup();
// Note that these targets don't actually exist in the package we created initially. It doesn't
// matter for the purpose of this test, the original package was just to create some objects
// that we needed.
helper.add(fakeDep1);
helper.add(fakeDep2);
helper.endGroup();
GroupedList<SkyKey> groupedList = new GroupedList<>();
groupedList.append(helper);
AtomicBoolean wasOptimizationUsed = new AtomicBoolean(false);
SkyFunction.Environment mockEnv = Mockito.mock(SkyFunction.Environment.class);
when(mockEnv.getTemporaryDirectDeps()).thenReturn(groupedList);
when(mockEnv.getValuesAndExceptions(groupedList.get(1)))
.thenAnswer(
(invocationOnMock) -> {
wasOptimizationUsed.set(true);
// It doesn't matter what this SkyframeLookupResult is, we'll return true in the
// valuesMissing() call.
return new SkyframeLookupResult(
/* valuesMissingCallback= */ () -> {},
k -> {
throw new IllegalStateException("Shouldn't have been called: " + k);
});
});
when(mockEnv.valuesMissing()).thenReturn(true);
// Run the compute function and check that we returned null.
assertThat(function.compute(function.getKey(label), mockEnv)).isNull();
// Verify that the mock was called with the arguments we expected.
assertThat(wasOptimizationUsed.get()).isTrue();
}
@Test
public void multipleErrorsForTransitiveTraversalFunction() throws Exception {
Label label = Label.parseAbsolute("//foo:foo", ImmutableMap.of());
scratch.file(
"foo/BUILD", "sh_library(name = '" + label.getName() + "', deps = [':bar', ':baz'])");
Package pkg = loadPackage(label.getPackageIdentifier());
TargetAndErrorIfAnyImpl targetAndErrorIfAny =
new TargetAndErrorIfAnyImpl(
/*packageLoadedSuccessfully=*/ true,
/*errorLoadingTarget=*/ null,
pkg.getTarget(label.getName()));
TransitiveTraversalFunction function =
new TransitiveTraversalFunction() {
@Override
TargetAndErrorIfAny loadTarget(Environment env, Label label) {
return targetAndErrorIfAny;
}
};
SkyKey dep1 = function.getKey(Label.parseAbsolute("//foo:bar", ImmutableMap.of()));
SkyKey dep2 = function.getKey(Label.parseAbsolute("//foo:baz", ImmutableMap.of()));
SkyFunction.Environment mockEnv = Mockito.mock(SkyFunction.Environment.class);
NoSuchTargetException exp1 = new NoSuchTargetException("bad bar");
NoSuchTargetException exp2 = new NoSuchTargetException("bad baz");
SkyframeLookupResult returnedDeps =
new SkyframeLookupResult(
() -> {},
key ->
key.equals(dep1)
? ValueOrUntypedException.ofExn(exp1)
: key.equals(dep2) ? ValueOrUntypedException.ofExn(exp2) : null);
when(mockEnv.getValuesAndExceptions(ImmutableSet.of(dep1, dep2))).thenReturn(returnedDeps);
when(mockEnv.valuesMissing()).thenReturn(false);
assertThat(
((TransitiveTraversalValue) function.compute(function.getKey(label), mockEnv))
.getErrorMessage())
.isEqualTo("bad bar");
}
@Test
public void selfErrorWins() throws Exception {
Label label = Label.parseAbsolute("//foo:foo", ImmutableMap.of());
scratch.file("foo/BUILD", "sh_library(name = '" + label.getName() + "', deps = [':bar'])");
Package pkg = loadPackage(label.getPackageIdentifier());
TargetAndErrorIfAnyImpl targetAndErrorIfAny =
new TargetAndErrorIfAnyImpl(
/*packageLoadedSuccessfully=*/ true,
/*errorLoadingTarget=*/ new NoSuchTargetException("self error is long and last"),
pkg.getTarget(label.getName()));
TransitiveTraversalFunction function =
new TransitiveTraversalFunction() {
@Override
TargetAndErrorIfAny loadTarget(Environment env, Label label) {
return targetAndErrorIfAny;
}
};
SkyKey dep = function.getKey(Label.parseAbsolute("//foo:bar", ImmutableMap.of()));
NoSuchTargetException exp = new NoSuchTargetException("bad bar");
SkyframeLookupResult returnedDep =
new SkyframeLookupResult(
() -> {}, key -> key.equals(dep) ? ValueOrUntypedException.ofExn(exp) : null);
SkyFunction.Environment mockEnv = Mockito.mock(SkyFunction.Environment.class);
when(mockEnv.getValuesAndExceptions(ImmutableSet.of(dep))).thenReturn(returnedDep);
when(mockEnv.valuesMissing()).thenReturn(false);
TransitiveTraversalValue transitiveTraversalValue =
(TransitiveTraversalValue) function.compute(function.getKey(label), mockEnv);
assertThat(transitiveTraversalValue.getErrorMessage()).isEqualTo("self error is long and last");
}
@Test
public void getStrictLabelAspectKeys() throws Exception {
Label label = Label.parseAbsolute("//test:foo", ImmutableMap.of());
scratch.file(
"test/aspect.bzl",
"def _aspect_impl(target, ctx):",
" return struct()",
"def _rule_impl(ctx):",
" return struct()",
"",
"MyAspect = aspect(",
" implementation=_aspect_impl,",
" attr_aspects=['deps'],",
" attrs = { '_extra_deps' : attr.label(default = Label('//foo:bar'))},",
")",
"my_rule = rule(",
" implementation=_rule_impl,",
" attrs = { 'attr' : ",
" attr.label_list(mandatory=True, aspects = [MyAspect]) ",
" },",
")");
scratch.file(
"test/BUILD",
"load('//test:aspect.bzl', 'my_rule')",
"my_rule(name = 'foo',attr = [':bad'])");
Package pkg = loadPackage(label.getPackageIdentifier());
TargetAndErrorIfAnyImpl targetAndErrorIfAny =
new TargetAndErrorIfAnyImpl(
/*packageLoadedSuccessfully=*/ true,
/*errorLoadingTarget=*/ null,
pkg.getTarget(label.getName()));
TransitiveTraversalFunction function =
new TransitiveTraversalFunction() {
@Override
TargetAndErrorIfAny loadTarget(Environment env, Label label) {
return targetAndErrorIfAny;
}
};
SkyKey badDep = function.getKey(Label.parseAbsolute("//test:bad", ImmutableMap.of()));
NoSuchTargetException exp = new NoSuchTargetException("bad test");
AtomicBoolean valuesMissing = new AtomicBoolean(false);
SkyframeLookupResult returnedDep =
new SkyframeLookupResult(
() -> valuesMissing.set(true),
key -> key.equals(badDep) ? ValueOrUntypedException.ofExn(exp) : null);
SkyFunction.Environment mockEnv = Mockito.mock(SkyFunction.Environment.class);
when(mockEnv.getValuesAndExceptions(ImmutableSet.of(badDep))).thenReturn(returnedDep);
TransitiveTraversalValue transitiveTraversalValue =
(TransitiveTraversalValue) function.compute(function.getKey(label), mockEnv);
assertThat(transitiveTraversalValue.getErrorMessage()).isEqualTo("bad test");
assertThat(valuesMissing.get()).isFalse();
}
/* Invokes the loading phase, using Skyframe. */
private Package loadPackage(PackageIdentifier pkgid)
throws InterruptedException, NoSuchPackageException {
SkyKey key = PackageValue.key(pkgid);
EvaluationResult<PackageValue> result =
SkyframeExecutorTestUtils.evaluate(
getSkyframeExecutor(), key, /*keepGoing=*/ false, reporter);
if (result.hasError()) {
throw (NoSuchPackageException) result.getError(key).getException();
}
return result.get(key).getPackage();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.